2013-11-12 Andrew MacLeod <amacleod@redhat.com>
[official-gcc.git] / gcc / fortran / trans-openmp.c
blob9d6d4d4df456caa2de4b7271a30532554b785c90
1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tree.h"
26 #include "gimplify.h" /* For create_tmp_var_raw. */
27 #include "diagnostic-core.h" /* For internal_error. */
28 #include "gfortran.h"
29 #include "trans.h"
30 #include "trans-stmt.h"
31 #include "trans-types.h"
32 #include "trans-array.h"
33 #include "trans-const.h"
34 #include "arith.h"
35 #include "omp-low.h"
37 int ompws_flags;
39 /* True if OpenMP should privatize what this DECL points to rather
40 than the DECL itself. */
42 bool
43 gfc_omp_privatize_by_reference (const_tree decl)
45 tree type = TREE_TYPE (decl);
47 if (TREE_CODE (type) == REFERENCE_TYPE
48 && (!DECL_ARTIFICIAL (decl) || TREE_CODE (decl) == PARM_DECL))
49 return true;
51 if (TREE_CODE (type) == POINTER_TYPE)
53 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
54 that have POINTER_TYPE type and don't have GFC_POINTER_TYPE_P
55 set are supposed to be privatized by reference. */
56 if (GFC_POINTER_TYPE_P (type))
57 return false;
59 if (!DECL_ARTIFICIAL (decl)
60 && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
61 return true;
63 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
64 by the frontend. */
65 if (DECL_LANG_SPECIFIC (decl)
66 && GFC_DECL_SAVED_DESCRIPTOR (decl))
67 return true;
70 return false;
73 /* True if OpenMP sharing attribute of DECL is predetermined. */
75 enum omp_clause_default_kind
76 gfc_omp_predetermined_sharing (tree decl)
78 if (DECL_ARTIFICIAL (decl)
79 && ! GFC_DECL_RESULT (decl)
80 && ! (DECL_LANG_SPECIFIC (decl)
81 && GFC_DECL_SAVED_DESCRIPTOR (decl)))
82 return OMP_CLAUSE_DEFAULT_SHARED;
84 /* Cray pointees shouldn't be listed in any clauses and should be
85 gimplified to dereference of the corresponding Cray pointer.
86 Make them all private, so that they are emitted in the debug
87 information. */
88 if (GFC_DECL_CRAY_POINTEE (decl))
89 return OMP_CLAUSE_DEFAULT_PRIVATE;
91 /* Assumed-size arrays are predetermined shared. */
92 if (TREE_CODE (decl) == PARM_DECL
93 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl))
94 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_UNKNOWN
95 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl),
96 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl)) - 1)
97 == NULL)
98 return OMP_CLAUSE_DEFAULT_SHARED;
100 /* Dummy procedures aren't considered variables by OpenMP, thus are
101 disallowed in OpenMP clauses. They are represented as PARM_DECLs
102 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
103 to avoid complaining about their uses with default(none). */
104 if (TREE_CODE (decl) == PARM_DECL
105 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
106 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == FUNCTION_TYPE)
107 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
109 /* COMMON and EQUIVALENCE decls are shared. They
110 are only referenced through DECL_VALUE_EXPR of the variables
111 contained in them. If those are privatized, they will not be
112 gimplified to the COMMON or EQUIVALENCE decls. */
113 if (GFC_DECL_COMMON_OR_EQUIV (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
114 return OMP_CLAUSE_DEFAULT_SHARED;
116 if (GFC_DECL_RESULT (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
117 return OMP_CLAUSE_DEFAULT_SHARED;
119 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
122 /* Return decl that should be used when reporting DEFAULT(NONE)
123 diagnostics. */
125 tree
126 gfc_omp_report_decl (tree decl)
128 if (DECL_ARTIFICIAL (decl)
129 && DECL_LANG_SPECIFIC (decl)
130 && GFC_DECL_SAVED_DESCRIPTOR (decl))
131 return GFC_DECL_SAVED_DESCRIPTOR (decl);
133 return decl;
136 /* Return true if DECL in private clause needs
137 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
138 bool
139 gfc_omp_private_outer_ref (tree decl)
141 tree type = TREE_TYPE (decl);
143 if (GFC_DESCRIPTOR_TYPE_P (type)
144 && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
145 return true;
147 return false;
150 /* Return code to initialize DECL with its default constructor, or
151 NULL if there's nothing to do. */
153 tree
154 gfc_omp_clause_default_ctor (tree clause, tree decl, tree outer)
156 tree type = TREE_TYPE (decl), rank, size, esize, ptr, cond, then_b, else_b;
157 stmtblock_t block, cond_block;
159 if (! GFC_DESCRIPTOR_TYPE_P (type)
160 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
161 return NULL;
163 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_REDUCTION)
164 return NULL;
166 gcc_assert (outer != NULL);
167 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_PRIVATE
168 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LASTPRIVATE);
170 /* Allocatable arrays in PRIVATE clauses need to be set to
171 "not currently allocated" allocation status if outer
172 array is "not currently allocated", otherwise should be allocated. */
173 gfc_start_block (&block);
175 gfc_init_block (&cond_block);
177 gfc_add_modify (&cond_block, decl, outer);
178 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
179 size = gfc_conv_descriptor_ubound_get (decl, rank);
180 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
181 size, gfc_conv_descriptor_lbound_get (decl, rank));
182 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
183 size, gfc_index_one_node);
184 if (GFC_TYPE_ARRAY_RANK (type) > 1)
185 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
186 size, gfc_conv_descriptor_stride_get (decl, rank));
187 esize = fold_convert (gfc_array_index_type,
188 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
189 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
190 size, esize);
191 size = gfc_evaluate_now (fold_convert (size_type_node, size), &cond_block);
193 ptr = gfc_create_var (pvoid_type_node, NULL);
194 gfc_allocate_using_malloc (&cond_block, ptr, size, NULL_TREE);
195 gfc_conv_descriptor_data_set (&cond_block, decl, ptr);
197 then_b = gfc_finish_block (&cond_block);
199 gfc_init_block (&cond_block);
200 gfc_conv_descriptor_data_set (&cond_block, decl, null_pointer_node);
201 else_b = gfc_finish_block (&cond_block);
203 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
204 fold_convert (pvoid_type_node,
205 gfc_conv_descriptor_data_get (outer)),
206 null_pointer_node);
207 gfc_add_expr_to_block (&block, build3_loc (input_location, COND_EXPR,
208 void_type_node, cond, then_b, else_b));
210 return gfc_finish_block (&block);
213 /* Build and return code for a copy constructor from SRC to DEST. */
215 tree
216 gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src)
218 tree type = TREE_TYPE (dest), ptr, size, esize, rank, call;
219 tree cond, then_b, else_b;
220 stmtblock_t block, cond_block;
222 if (! GFC_DESCRIPTOR_TYPE_P (type)
223 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
224 return build2_v (MODIFY_EXPR, dest, src);
226 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_FIRSTPRIVATE);
228 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
229 and copied from SRC. */
230 gfc_start_block (&block);
232 gfc_init_block (&cond_block);
234 gfc_add_modify (&cond_block, dest, src);
235 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
236 size = gfc_conv_descriptor_ubound_get (dest, rank);
237 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
238 size, gfc_conv_descriptor_lbound_get (dest, rank));
239 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
240 size, gfc_index_one_node);
241 if (GFC_TYPE_ARRAY_RANK (type) > 1)
242 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
243 size, gfc_conv_descriptor_stride_get (dest, rank));
244 esize = fold_convert (gfc_array_index_type,
245 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
246 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
247 size, esize);
248 size = gfc_evaluate_now (fold_convert (size_type_node, size), &cond_block);
250 ptr = gfc_create_var (pvoid_type_node, NULL);
251 gfc_allocate_using_malloc (&cond_block, ptr, size, NULL_TREE);
252 gfc_conv_descriptor_data_set (&cond_block, dest, ptr);
254 call = build_call_expr_loc (input_location,
255 builtin_decl_explicit (BUILT_IN_MEMCPY),
256 3, ptr,
257 fold_convert (pvoid_type_node,
258 gfc_conv_descriptor_data_get (src)),
259 size);
260 gfc_add_expr_to_block (&cond_block, fold_convert (void_type_node, call));
261 then_b = gfc_finish_block (&cond_block);
263 gfc_init_block (&cond_block);
264 gfc_conv_descriptor_data_set (&cond_block, dest, null_pointer_node);
265 else_b = gfc_finish_block (&cond_block);
267 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
268 fold_convert (pvoid_type_node,
269 gfc_conv_descriptor_data_get (src)),
270 null_pointer_node);
271 gfc_add_expr_to_block (&block, build3_loc (input_location, COND_EXPR,
272 void_type_node, cond, then_b, else_b));
274 return gfc_finish_block (&block);
277 /* Similarly, except use an assignment operator instead. */
279 tree
280 gfc_omp_clause_assign_op (tree clause ATTRIBUTE_UNUSED, tree dest, tree src)
282 tree type = TREE_TYPE (dest), rank, size, esize, call;
283 stmtblock_t block;
285 if (! GFC_DESCRIPTOR_TYPE_P (type)
286 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
287 return build2_v (MODIFY_EXPR, dest, src);
289 /* Handle copying allocatable arrays. */
290 gfc_start_block (&block);
292 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
293 size = gfc_conv_descriptor_ubound_get (dest, rank);
294 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
295 size, gfc_conv_descriptor_lbound_get (dest, rank));
296 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
297 size, gfc_index_one_node);
298 if (GFC_TYPE_ARRAY_RANK (type) > 1)
299 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
300 size, gfc_conv_descriptor_stride_get (dest, rank));
301 esize = fold_convert (gfc_array_index_type,
302 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
303 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
304 size, esize);
305 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
306 call = build_call_expr_loc (input_location,
307 builtin_decl_explicit (BUILT_IN_MEMCPY), 3,
308 fold_convert (pvoid_type_node,
309 gfc_conv_descriptor_data_get (dest)),
310 fold_convert (pvoid_type_node,
311 gfc_conv_descriptor_data_get (src)),
312 size);
313 gfc_add_expr_to_block (&block, fold_convert (void_type_node, call));
315 return gfc_finish_block (&block);
318 /* Build and return code destructing DECL. Return NULL if nothing
319 to be done. */
321 tree
322 gfc_omp_clause_dtor (tree clause ATTRIBUTE_UNUSED, tree decl)
324 tree type = TREE_TYPE (decl);
326 if (! GFC_DESCRIPTOR_TYPE_P (type)
327 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
328 return NULL;
330 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_REDUCTION)
331 return NULL;
333 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
334 to be deallocated if they were allocated. */
335 return gfc_trans_dealloc_allocated (decl, false, NULL);
339 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
340 disregarded in OpenMP construct, because it is going to be
341 remapped during OpenMP lowering. SHARED is true if DECL
342 is going to be shared, false if it is going to be privatized. */
344 bool
345 gfc_omp_disregard_value_expr (tree decl, bool shared)
347 if (GFC_DECL_COMMON_OR_EQUIV (decl)
348 && DECL_HAS_VALUE_EXPR_P (decl))
350 tree value = DECL_VALUE_EXPR (decl);
352 if (TREE_CODE (value) == COMPONENT_REF
353 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
354 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
356 /* If variable in COMMON or EQUIVALENCE is privatized, return
357 true, as just that variable is supposed to be privatized,
358 not the whole COMMON or whole EQUIVALENCE.
359 For shared variables in COMMON or EQUIVALENCE, let them be
360 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
361 from the same COMMON or EQUIVALENCE just one sharing of the
362 whole COMMON or EQUIVALENCE is enough. */
363 return ! shared;
367 if (GFC_DECL_RESULT (decl) && DECL_HAS_VALUE_EXPR_P (decl))
368 return ! shared;
370 return false;
373 /* Return true if DECL that is shared iff SHARED is true should
374 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
375 flag set. */
377 bool
378 gfc_omp_private_debug_clause (tree decl, bool shared)
380 if (GFC_DECL_CRAY_POINTEE (decl))
381 return true;
383 if (GFC_DECL_COMMON_OR_EQUIV (decl)
384 && DECL_HAS_VALUE_EXPR_P (decl))
386 tree value = DECL_VALUE_EXPR (decl);
388 if (TREE_CODE (value) == COMPONENT_REF
389 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
390 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
391 return shared;
394 return false;
397 /* Register language specific type size variables as potentially OpenMP
398 firstprivate variables. */
400 void
401 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
403 if (GFC_ARRAY_TYPE_P (type) || GFC_DESCRIPTOR_TYPE_P (type))
405 int r;
407 gcc_assert (TYPE_LANG_SPECIFIC (type) != NULL);
408 for (r = 0; r < GFC_TYPE_ARRAY_RANK (type); r++)
410 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_LBOUND (type, r));
411 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_UBOUND (type, r));
412 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_STRIDE (type, r));
414 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_SIZE (type));
415 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_OFFSET (type));
420 static inline tree
421 gfc_trans_add_clause (tree node, tree tail)
423 OMP_CLAUSE_CHAIN (node) = tail;
424 return node;
427 static tree
428 gfc_trans_omp_variable (gfc_symbol *sym)
430 tree t = gfc_get_symbol_decl (sym);
431 tree parent_decl;
432 int parent_flag;
433 bool return_value;
434 bool alternate_entry;
435 bool entry_master;
437 return_value = sym->attr.function && sym->result == sym;
438 alternate_entry = sym->attr.function && sym->attr.entry
439 && sym->result == sym;
440 entry_master = sym->attr.result
441 && sym->ns->proc_name->attr.entry_master
442 && !gfc_return_by_reference (sym->ns->proc_name);
443 parent_decl = DECL_CONTEXT (current_function_decl);
445 if ((t == parent_decl && return_value)
446 || (sym->ns && sym->ns->proc_name
447 && sym->ns->proc_name->backend_decl == parent_decl
448 && (alternate_entry || entry_master)))
449 parent_flag = 1;
450 else
451 parent_flag = 0;
453 /* Special case for assigning the return value of a function.
454 Self recursive functions must have an explicit return value. */
455 if (return_value && (t == current_function_decl || parent_flag))
456 t = gfc_get_fake_result_decl (sym, parent_flag);
458 /* Similarly for alternate entry points. */
459 else if (alternate_entry
460 && (sym->ns->proc_name->backend_decl == current_function_decl
461 || parent_flag))
463 gfc_entry_list *el = NULL;
465 for (el = sym->ns->entries; el; el = el->next)
466 if (sym == el->sym)
468 t = gfc_get_fake_result_decl (sym, parent_flag);
469 break;
473 else if (entry_master
474 && (sym->ns->proc_name->backend_decl == current_function_decl
475 || parent_flag))
476 t = gfc_get_fake_result_decl (sym, parent_flag);
478 return t;
481 static tree
482 gfc_trans_omp_variable_list (enum omp_clause_code code, gfc_namelist *namelist,
483 tree list)
485 for (; namelist != NULL; namelist = namelist->next)
486 if (namelist->sym->attr.referenced)
488 tree t = gfc_trans_omp_variable (namelist->sym);
489 if (t != error_mark_node)
491 tree node = build_omp_clause (input_location, code);
492 OMP_CLAUSE_DECL (node) = t;
493 list = gfc_trans_add_clause (node, list);
496 return list;
499 static void
500 gfc_trans_omp_array_reduction (tree c, gfc_symbol *sym, locus where)
502 gfc_symtree *root1 = NULL, *root2 = NULL, *root3 = NULL, *root4 = NULL;
503 gfc_symtree *symtree1, *symtree2, *symtree3, *symtree4 = NULL;
504 gfc_symbol init_val_sym, outer_sym, intrinsic_sym;
505 gfc_expr *e1, *e2, *e3, *e4;
506 gfc_ref *ref;
507 tree decl, backend_decl, stmt, type, outer_decl;
508 locus old_loc = gfc_current_locus;
509 const char *iname;
510 bool t;
512 decl = OMP_CLAUSE_DECL (c);
513 gfc_current_locus = where;
514 type = TREE_TYPE (decl);
515 outer_decl = create_tmp_var_raw (type, NULL);
516 if (TREE_CODE (decl) == PARM_DECL
517 && TREE_CODE (type) == REFERENCE_TYPE
518 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type))
519 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type)) == GFC_ARRAY_ALLOCATABLE)
521 decl = build_fold_indirect_ref (decl);
522 type = TREE_TYPE (type);
525 /* Create a fake symbol for init value. */
526 memset (&init_val_sym, 0, sizeof (init_val_sym));
527 init_val_sym.ns = sym->ns;
528 init_val_sym.name = sym->name;
529 init_val_sym.ts = sym->ts;
530 init_val_sym.attr.referenced = 1;
531 init_val_sym.declared_at = where;
532 init_val_sym.attr.flavor = FL_VARIABLE;
533 backend_decl = omp_reduction_init (c, gfc_sym_type (&init_val_sym));
534 init_val_sym.backend_decl = backend_decl;
536 /* Create a fake symbol for the outer array reference. */
537 outer_sym = *sym;
538 outer_sym.as = gfc_copy_array_spec (sym->as);
539 outer_sym.attr.dummy = 0;
540 outer_sym.attr.result = 0;
541 outer_sym.attr.flavor = FL_VARIABLE;
542 outer_sym.backend_decl = outer_decl;
543 if (decl != OMP_CLAUSE_DECL (c))
544 outer_sym.backend_decl = build_fold_indirect_ref (outer_decl);
546 /* Create fake symtrees for it. */
547 symtree1 = gfc_new_symtree (&root1, sym->name);
548 symtree1->n.sym = sym;
549 gcc_assert (symtree1 == root1);
551 symtree2 = gfc_new_symtree (&root2, sym->name);
552 symtree2->n.sym = &init_val_sym;
553 gcc_assert (symtree2 == root2);
555 symtree3 = gfc_new_symtree (&root3, sym->name);
556 symtree3->n.sym = &outer_sym;
557 gcc_assert (symtree3 == root3);
559 /* Create expressions. */
560 e1 = gfc_get_expr ();
561 e1->expr_type = EXPR_VARIABLE;
562 e1->where = where;
563 e1->symtree = symtree1;
564 e1->ts = sym->ts;
565 e1->ref = ref = gfc_get_ref ();
566 ref->type = REF_ARRAY;
567 ref->u.ar.where = where;
568 ref->u.ar.as = sym->as;
569 ref->u.ar.type = AR_FULL;
570 ref->u.ar.dimen = 0;
571 t = gfc_resolve_expr (e1);
572 gcc_assert (t);
574 e2 = gfc_get_expr ();
575 e2->expr_type = EXPR_VARIABLE;
576 e2->where = where;
577 e2->symtree = symtree2;
578 e2->ts = sym->ts;
579 t = gfc_resolve_expr (e2);
580 gcc_assert (t);
582 e3 = gfc_copy_expr (e1);
583 e3->symtree = symtree3;
584 t = gfc_resolve_expr (e3);
585 gcc_assert (t);
587 iname = NULL;
588 switch (OMP_CLAUSE_REDUCTION_CODE (c))
590 case PLUS_EXPR:
591 case MINUS_EXPR:
592 e4 = gfc_add (e3, e1);
593 break;
594 case MULT_EXPR:
595 e4 = gfc_multiply (e3, e1);
596 break;
597 case TRUTH_ANDIF_EXPR:
598 e4 = gfc_and (e3, e1);
599 break;
600 case TRUTH_ORIF_EXPR:
601 e4 = gfc_or (e3, e1);
602 break;
603 case EQ_EXPR:
604 e4 = gfc_eqv (e3, e1);
605 break;
606 case NE_EXPR:
607 e4 = gfc_neqv (e3, e1);
608 break;
609 case MIN_EXPR:
610 iname = "min";
611 break;
612 case MAX_EXPR:
613 iname = "max";
614 break;
615 case BIT_AND_EXPR:
616 iname = "iand";
617 break;
618 case BIT_IOR_EXPR:
619 iname = "ior";
620 break;
621 case BIT_XOR_EXPR:
622 iname = "ieor";
623 break;
624 default:
625 gcc_unreachable ();
627 if (iname != NULL)
629 memset (&intrinsic_sym, 0, sizeof (intrinsic_sym));
630 intrinsic_sym.ns = sym->ns;
631 intrinsic_sym.name = iname;
632 intrinsic_sym.ts = sym->ts;
633 intrinsic_sym.attr.referenced = 1;
634 intrinsic_sym.attr.intrinsic = 1;
635 intrinsic_sym.attr.function = 1;
636 intrinsic_sym.result = &intrinsic_sym;
637 intrinsic_sym.declared_at = where;
639 symtree4 = gfc_new_symtree (&root4, iname);
640 symtree4->n.sym = &intrinsic_sym;
641 gcc_assert (symtree4 == root4);
643 e4 = gfc_get_expr ();
644 e4->expr_type = EXPR_FUNCTION;
645 e4->where = where;
646 e4->symtree = symtree4;
647 e4->value.function.isym = gfc_find_function (iname);
648 e4->value.function.actual = gfc_get_actual_arglist ();
649 e4->value.function.actual->expr = e3;
650 e4->value.function.actual->next = gfc_get_actual_arglist ();
651 e4->value.function.actual->next->expr = e1;
653 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
654 e1 = gfc_copy_expr (e1);
655 e3 = gfc_copy_expr (e3);
656 t = gfc_resolve_expr (e4);
657 gcc_assert (t);
659 /* Create the init statement list. */
660 pushlevel ();
661 if (GFC_DESCRIPTOR_TYPE_P (type)
662 && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
664 /* If decl is an allocatable array, it needs to be allocated
665 with the same bounds as the outer var. */
666 tree rank, size, esize, ptr;
667 stmtblock_t block;
669 gfc_start_block (&block);
671 gfc_add_modify (&block, decl, outer_sym.backend_decl);
672 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
673 size = gfc_conv_descriptor_ubound_get (decl, rank);
674 size = fold_build2_loc (input_location, MINUS_EXPR,
675 gfc_array_index_type, size,
676 gfc_conv_descriptor_lbound_get (decl, rank));
677 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
678 size, gfc_index_one_node);
679 if (GFC_TYPE_ARRAY_RANK (type) > 1)
680 size = fold_build2_loc (input_location, MULT_EXPR,
681 gfc_array_index_type, size,
682 gfc_conv_descriptor_stride_get (decl, rank));
683 esize = fold_convert (gfc_array_index_type,
684 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
685 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
686 size, esize);
687 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
689 ptr = gfc_create_var (pvoid_type_node, NULL);
690 gfc_allocate_using_malloc (&block, ptr, size, NULL_TREE);
691 gfc_conv_descriptor_data_set (&block, decl, ptr);
693 gfc_add_expr_to_block (&block, gfc_trans_assignment (e1, e2, false,
694 false));
695 stmt = gfc_finish_block (&block);
697 else
698 stmt = gfc_trans_assignment (e1, e2, false, false);
699 if (TREE_CODE (stmt) != BIND_EXPR)
700 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
701 else
702 poplevel (0, 0);
703 OMP_CLAUSE_REDUCTION_INIT (c) = stmt;
705 /* Create the merge statement list. */
706 pushlevel ();
707 if (GFC_DESCRIPTOR_TYPE_P (type)
708 && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
710 /* If decl is an allocatable array, it needs to be deallocated
711 afterwards. */
712 stmtblock_t block;
714 gfc_start_block (&block);
715 gfc_add_expr_to_block (&block, gfc_trans_assignment (e3, e4, false,
716 true));
717 gfc_add_expr_to_block (&block, gfc_trans_dealloc_allocated (decl, false,
718 NULL));
719 stmt = gfc_finish_block (&block);
721 else
722 stmt = gfc_trans_assignment (e3, e4, false, true);
723 if (TREE_CODE (stmt) != BIND_EXPR)
724 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
725 else
726 poplevel (0, 0);
727 OMP_CLAUSE_REDUCTION_MERGE (c) = stmt;
729 /* And stick the placeholder VAR_DECL into the clause as well. */
730 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = outer_decl;
732 gfc_current_locus = old_loc;
734 gfc_free_expr (e1);
735 gfc_free_expr (e2);
736 gfc_free_expr (e3);
737 gfc_free_expr (e4);
738 free (symtree1);
739 free (symtree2);
740 free (symtree3);
741 free (symtree4);
742 gfc_free_array_spec (outer_sym.as);
745 static tree
746 gfc_trans_omp_reduction_list (gfc_namelist *namelist, tree list,
747 enum tree_code reduction_code, locus where)
749 for (; namelist != NULL; namelist = namelist->next)
750 if (namelist->sym->attr.referenced)
752 tree t = gfc_trans_omp_variable (namelist->sym);
753 if (t != error_mark_node)
755 tree node = build_omp_clause (where.lb->location,
756 OMP_CLAUSE_REDUCTION);
757 OMP_CLAUSE_DECL (node) = t;
758 OMP_CLAUSE_REDUCTION_CODE (node) = reduction_code;
759 if (namelist->sym->attr.dimension)
760 gfc_trans_omp_array_reduction (node, namelist->sym, where);
761 list = gfc_trans_add_clause (node, list);
764 return list;
767 static tree
768 gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
769 locus where)
771 tree omp_clauses = NULL_TREE, chunk_size, c;
772 int list;
773 enum omp_clause_code clause_code;
774 gfc_se se;
776 if (clauses == NULL)
777 return NULL_TREE;
779 for (list = 0; list < OMP_LIST_NUM; list++)
781 gfc_namelist *n = clauses->lists[list];
783 if (n == NULL)
784 continue;
785 if (list >= OMP_LIST_REDUCTION_FIRST
786 && list <= OMP_LIST_REDUCTION_LAST)
788 enum tree_code reduction_code;
789 switch (list)
791 case OMP_LIST_PLUS:
792 reduction_code = PLUS_EXPR;
793 break;
794 case OMP_LIST_MULT:
795 reduction_code = MULT_EXPR;
796 break;
797 case OMP_LIST_SUB:
798 reduction_code = MINUS_EXPR;
799 break;
800 case OMP_LIST_AND:
801 reduction_code = TRUTH_ANDIF_EXPR;
802 break;
803 case OMP_LIST_OR:
804 reduction_code = TRUTH_ORIF_EXPR;
805 break;
806 case OMP_LIST_EQV:
807 reduction_code = EQ_EXPR;
808 break;
809 case OMP_LIST_NEQV:
810 reduction_code = NE_EXPR;
811 break;
812 case OMP_LIST_MAX:
813 reduction_code = MAX_EXPR;
814 break;
815 case OMP_LIST_MIN:
816 reduction_code = MIN_EXPR;
817 break;
818 case OMP_LIST_IAND:
819 reduction_code = BIT_AND_EXPR;
820 break;
821 case OMP_LIST_IOR:
822 reduction_code = BIT_IOR_EXPR;
823 break;
824 case OMP_LIST_IEOR:
825 reduction_code = BIT_XOR_EXPR;
826 break;
827 default:
828 gcc_unreachable ();
830 omp_clauses
831 = gfc_trans_omp_reduction_list (n, omp_clauses, reduction_code,
832 where);
833 continue;
835 switch (list)
837 case OMP_LIST_PRIVATE:
838 clause_code = OMP_CLAUSE_PRIVATE;
839 goto add_clause;
840 case OMP_LIST_SHARED:
841 clause_code = OMP_CLAUSE_SHARED;
842 goto add_clause;
843 case OMP_LIST_FIRSTPRIVATE:
844 clause_code = OMP_CLAUSE_FIRSTPRIVATE;
845 goto add_clause;
846 case OMP_LIST_LASTPRIVATE:
847 clause_code = OMP_CLAUSE_LASTPRIVATE;
848 goto add_clause;
849 case OMP_LIST_COPYIN:
850 clause_code = OMP_CLAUSE_COPYIN;
851 goto add_clause;
852 case OMP_LIST_COPYPRIVATE:
853 clause_code = OMP_CLAUSE_COPYPRIVATE;
854 /* FALLTHROUGH */
855 add_clause:
856 omp_clauses
857 = gfc_trans_omp_variable_list (clause_code, n, omp_clauses);
858 break;
859 default:
860 break;
864 if (clauses->if_expr)
866 tree if_var;
868 gfc_init_se (&se, NULL);
869 gfc_conv_expr (&se, clauses->if_expr);
870 gfc_add_block_to_block (block, &se.pre);
871 if_var = gfc_evaluate_now (se.expr, block);
872 gfc_add_block_to_block (block, &se.post);
874 c = build_omp_clause (where.lb->location, OMP_CLAUSE_IF);
875 OMP_CLAUSE_IF_EXPR (c) = if_var;
876 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
879 if (clauses->final_expr)
881 tree final_var;
883 gfc_init_se (&se, NULL);
884 gfc_conv_expr (&se, clauses->final_expr);
885 gfc_add_block_to_block (block, &se.pre);
886 final_var = gfc_evaluate_now (se.expr, block);
887 gfc_add_block_to_block (block, &se.post);
889 c = build_omp_clause (where.lb->location, OMP_CLAUSE_FINAL);
890 OMP_CLAUSE_FINAL_EXPR (c) = final_var;
891 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
894 if (clauses->num_threads)
896 tree num_threads;
898 gfc_init_se (&se, NULL);
899 gfc_conv_expr (&se, clauses->num_threads);
900 gfc_add_block_to_block (block, &se.pre);
901 num_threads = gfc_evaluate_now (se.expr, block);
902 gfc_add_block_to_block (block, &se.post);
904 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NUM_THREADS);
905 OMP_CLAUSE_NUM_THREADS_EXPR (c) = num_threads;
906 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
909 chunk_size = NULL_TREE;
910 if (clauses->chunk_size)
912 gfc_init_se (&se, NULL);
913 gfc_conv_expr (&se, clauses->chunk_size);
914 gfc_add_block_to_block (block, &se.pre);
915 chunk_size = gfc_evaluate_now (se.expr, block);
916 gfc_add_block_to_block (block, &se.post);
919 if (clauses->sched_kind != OMP_SCHED_NONE)
921 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SCHEDULE);
922 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = chunk_size;
923 switch (clauses->sched_kind)
925 case OMP_SCHED_STATIC:
926 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_STATIC;
927 break;
928 case OMP_SCHED_DYNAMIC:
929 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_DYNAMIC;
930 break;
931 case OMP_SCHED_GUIDED:
932 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_GUIDED;
933 break;
934 case OMP_SCHED_RUNTIME:
935 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_RUNTIME;
936 break;
937 case OMP_SCHED_AUTO:
938 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_AUTO;
939 break;
940 default:
941 gcc_unreachable ();
943 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
946 if (clauses->default_sharing != OMP_DEFAULT_UNKNOWN)
948 c = build_omp_clause (where.lb->location, OMP_CLAUSE_DEFAULT);
949 switch (clauses->default_sharing)
951 case OMP_DEFAULT_NONE:
952 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_NONE;
953 break;
954 case OMP_DEFAULT_SHARED:
955 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_SHARED;
956 break;
957 case OMP_DEFAULT_PRIVATE:
958 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_PRIVATE;
959 break;
960 case OMP_DEFAULT_FIRSTPRIVATE:
961 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
962 break;
963 default:
964 gcc_unreachable ();
966 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
969 if (clauses->nowait)
971 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NOWAIT);
972 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
975 if (clauses->ordered)
977 c = build_omp_clause (where.lb->location, OMP_CLAUSE_ORDERED);
978 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
981 if (clauses->untied)
983 c = build_omp_clause (where.lb->location, OMP_CLAUSE_UNTIED);
984 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
987 if (clauses->mergeable)
989 c = build_omp_clause (where.lb->location, OMP_CLAUSE_MERGEABLE);
990 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
993 if (clauses->collapse)
995 c = build_omp_clause (where.lb->location, OMP_CLAUSE_COLLAPSE);
996 OMP_CLAUSE_COLLAPSE_EXPR (c)
997 = build_int_cst (integer_type_node, clauses->collapse);
998 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
1001 return omp_clauses;
1004 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
1006 static tree
1007 gfc_trans_omp_code (gfc_code *code, bool force_empty)
1009 tree stmt;
1011 pushlevel ();
1012 stmt = gfc_trans_code (code);
1013 if (TREE_CODE (stmt) != BIND_EXPR)
1015 if (!IS_EMPTY_STMT (stmt) || force_empty)
1017 tree block = poplevel (1, 0);
1018 stmt = build3_v (BIND_EXPR, NULL, stmt, block);
1020 else
1021 poplevel (0, 0);
1023 else
1024 poplevel (0, 0);
1025 return stmt;
1029 static tree gfc_trans_omp_sections (gfc_code *, gfc_omp_clauses *);
1030 static tree gfc_trans_omp_workshare (gfc_code *, gfc_omp_clauses *);
1032 static tree
1033 gfc_trans_omp_atomic (gfc_code *code)
1035 gfc_code *atomic_code = code;
1036 gfc_se lse;
1037 gfc_se rse;
1038 gfc_se vse;
1039 gfc_expr *expr2, *e;
1040 gfc_symbol *var;
1041 stmtblock_t block;
1042 tree lhsaddr, type, rhs, x;
1043 enum tree_code op = ERROR_MARK;
1044 enum tree_code aop = OMP_ATOMIC;
1045 bool var_on_left = false;
1047 code = code->block->next;
1048 gcc_assert (code->op == EXEC_ASSIGN);
1049 var = code->expr1->symtree->n.sym;
1051 gfc_init_se (&lse, NULL);
1052 gfc_init_se (&rse, NULL);
1053 gfc_init_se (&vse, NULL);
1054 gfc_start_block (&block);
1056 expr2 = code->expr2;
1057 if (expr2->expr_type == EXPR_FUNCTION
1058 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
1059 expr2 = expr2->value.function.actual->expr;
1061 switch (atomic_code->ext.omp_atomic)
1063 case GFC_OMP_ATOMIC_READ:
1064 gfc_conv_expr (&vse, code->expr1);
1065 gfc_add_block_to_block (&block, &vse.pre);
1067 gfc_conv_expr (&lse, expr2);
1068 gfc_add_block_to_block (&block, &lse.pre);
1069 type = TREE_TYPE (lse.expr);
1070 lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
1072 x = build1 (OMP_ATOMIC_READ, type, lhsaddr);
1073 x = convert (TREE_TYPE (vse.expr), x);
1074 gfc_add_modify (&block, vse.expr, x);
1076 gfc_add_block_to_block (&block, &lse.pre);
1077 gfc_add_block_to_block (&block, &rse.pre);
1079 return gfc_finish_block (&block);
1080 case GFC_OMP_ATOMIC_CAPTURE:
1081 aop = OMP_ATOMIC_CAPTURE_NEW;
1082 if (expr2->expr_type == EXPR_VARIABLE)
1084 aop = OMP_ATOMIC_CAPTURE_OLD;
1085 gfc_conv_expr (&vse, code->expr1);
1086 gfc_add_block_to_block (&block, &vse.pre);
1088 gfc_conv_expr (&lse, expr2);
1089 gfc_add_block_to_block (&block, &lse.pre);
1090 gfc_init_se (&lse, NULL);
1091 code = code->next;
1092 var = code->expr1->symtree->n.sym;
1093 expr2 = code->expr2;
1094 if (expr2->expr_type == EXPR_FUNCTION
1095 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
1096 expr2 = expr2->value.function.actual->expr;
1098 break;
1099 default:
1100 break;
1103 gfc_conv_expr (&lse, code->expr1);
1104 gfc_add_block_to_block (&block, &lse.pre);
1105 type = TREE_TYPE (lse.expr);
1106 lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
1108 if (atomic_code->ext.omp_atomic == GFC_OMP_ATOMIC_WRITE)
1110 gfc_conv_expr (&rse, expr2);
1111 gfc_add_block_to_block (&block, &rse.pre);
1113 else if (expr2->expr_type == EXPR_OP)
1115 gfc_expr *e;
1116 switch (expr2->value.op.op)
1118 case INTRINSIC_PLUS:
1119 op = PLUS_EXPR;
1120 break;
1121 case INTRINSIC_TIMES:
1122 op = MULT_EXPR;
1123 break;
1124 case INTRINSIC_MINUS:
1125 op = MINUS_EXPR;
1126 break;
1127 case INTRINSIC_DIVIDE:
1128 if (expr2->ts.type == BT_INTEGER)
1129 op = TRUNC_DIV_EXPR;
1130 else
1131 op = RDIV_EXPR;
1132 break;
1133 case INTRINSIC_AND:
1134 op = TRUTH_ANDIF_EXPR;
1135 break;
1136 case INTRINSIC_OR:
1137 op = TRUTH_ORIF_EXPR;
1138 break;
1139 case INTRINSIC_EQV:
1140 op = EQ_EXPR;
1141 break;
1142 case INTRINSIC_NEQV:
1143 op = NE_EXPR;
1144 break;
1145 default:
1146 gcc_unreachable ();
1148 e = expr2->value.op.op1;
1149 if (e->expr_type == EXPR_FUNCTION
1150 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
1151 e = e->value.function.actual->expr;
1152 if (e->expr_type == EXPR_VARIABLE
1153 && e->symtree != NULL
1154 && e->symtree->n.sym == var)
1156 expr2 = expr2->value.op.op2;
1157 var_on_left = true;
1159 else
1161 e = expr2->value.op.op2;
1162 if (e->expr_type == EXPR_FUNCTION
1163 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
1164 e = e->value.function.actual->expr;
1165 gcc_assert (e->expr_type == EXPR_VARIABLE
1166 && e->symtree != NULL
1167 && e->symtree->n.sym == var);
1168 expr2 = expr2->value.op.op1;
1169 var_on_left = false;
1171 gfc_conv_expr (&rse, expr2);
1172 gfc_add_block_to_block (&block, &rse.pre);
1174 else
1176 gcc_assert (expr2->expr_type == EXPR_FUNCTION);
1177 switch (expr2->value.function.isym->id)
1179 case GFC_ISYM_MIN:
1180 op = MIN_EXPR;
1181 break;
1182 case GFC_ISYM_MAX:
1183 op = MAX_EXPR;
1184 break;
1185 case GFC_ISYM_IAND:
1186 op = BIT_AND_EXPR;
1187 break;
1188 case GFC_ISYM_IOR:
1189 op = BIT_IOR_EXPR;
1190 break;
1191 case GFC_ISYM_IEOR:
1192 op = BIT_XOR_EXPR;
1193 break;
1194 default:
1195 gcc_unreachable ();
1197 e = expr2->value.function.actual->expr;
1198 gcc_assert (e->expr_type == EXPR_VARIABLE
1199 && e->symtree != NULL
1200 && e->symtree->n.sym == var);
1202 gfc_conv_expr (&rse, expr2->value.function.actual->next->expr);
1203 gfc_add_block_to_block (&block, &rse.pre);
1204 if (expr2->value.function.actual->next->next != NULL)
1206 tree accum = gfc_create_var (TREE_TYPE (rse.expr), NULL);
1207 gfc_actual_arglist *arg;
1209 gfc_add_modify (&block, accum, rse.expr);
1210 for (arg = expr2->value.function.actual->next->next; arg;
1211 arg = arg->next)
1213 gfc_init_block (&rse.pre);
1214 gfc_conv_expr (&rse, arg->expr);
1215 gfc_add_block_to_block (&block, &rse.pre);
1216 x = fold_build2_loc (input_location, op, TREE_TYPE (accum),
1217 accum, rse.expr);
1218 gfc_add_modify (&block, accum, x);
1221 rse.expr = accum;
1224 expr2 = expr2->value.function.actual->next->expr;
1227 lhsaddr = save_expr (lhsaddr);
1228 rhs = gfc_evaluate_now (rse.expr, &block);
1230 if (atomic_code->ext.omp_atomic == GFC_OMP_ATOMIC_WRITE)
1231 x = rhs;
1232 else
1234 x = convert (TREE_TYPE (rhs),
1235 build_fold_indirect_ref_loc (input_location, lhsaddr));
1236 if (var_on_left)
1237 x = fold_build2_loc (input_location, op, TREE_TYPE (rhs), x, rhs);
1238 else
1239 x = fold_build2_loc (input_location, op, TREE_TYPE (rhs), rhs, x);
1242 if (TREE_CODE (TREE_TYPE (rhs)) == COMPLEX_TYPE
1243 && TREE_CODE (type) != COMPLEX_TYPE)
1244 x = fold_build1_loc (input_location, REALPART_EXPR,
1245 TREE_TYPE (TREE_TYPE (rhs)), x);
1247 gfc_add_block_to_block (&block, &lse.pre);
1248 gfc_add_block_to_block (&block, &rse.pre);
1250 if (aop == OMP_ATOMIC)
1252 x = build2_v (OMP_ATOMIC, lhsaddr, convert (type, x));
1253 gfc_add_expr_to_block (&block, x);
1255 else
1257 if (aop == OMP_ATOMIC_CAPTURE_NEW)
1259 code = code->next;
1260 expr2 = code->expr2;
1261 if (expr2->expr_type == EXPR_FUNCTION
1262 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
1263 expr2 = expr2->value.function.actual->expr;
1265 gcc_assert (expr2->expr_type == EXPR_VARIABLE);
1266 gfc_conv_expr (&vse, code->expr1);
1267 gfc_add_block_to_block (&block, &vse.pre);
1269 gfc_init_se (&lse, NULL);
1270 gfc_conv_expr (&lse, expr2);
1271 gfc_add_block_to_block (&block, &lse.pre);
1273 x = build2 (aop, type, lhsaddr, convert (type, x));
1274 x = convert (TREE_TYPE (vse.expr), x);
1275 gfc_add_modify (&block, vse.expr, x);
1278 return gfc_finish_block (&block);
1281 static tree
1282 gfc_trans_omp_barrier (void)
1284 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER);
1285 return build_call_expr_loc (input_location, decl, 0);
1288 static tree
1289 gfc_trans_omp_critical (gfc_code *code)
1291 tree name = NULL_TREE, stmt;
1292 if (code->ext.omp_name != NULL)
1293 name = get_identifier (code->ext.omp_name);
1294 stmt = gfc_trans_code (code->block->next);
1295 return build2_loc (input_location, OMP_CRITICAL, void_type_node, stmt, name);
1298 typedef struct dovar_init_d {
1299 tree var;
1300 tree init;
1301 } dovar_init;
1304 static tree
1305 gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock,
1306 gfc_omp_clauses *do_clauses, tree par_clauses)
1308 gfc_se se;
1309 tree dovar, stmt, from, to, step, type, init, cond, incr;
1310 tree count = NULL_TREE, cycle_label, tmp, omp_clauses;
1311 stmtblock_t block;
1312 stmtblock_t body;
1313 gfc_omp_clauses *clauses = code->ext.omp_clauses;
1314 int i, collapse = clauses->collapse;
1315 vec<dovar_init> inits = vNULL;
1316 dovar_init *di;
1317 unsigned ix;
1319 if (collapse <= 0)
1320 collapse = 1;
1322 code = code->block->next;
1323 gcc_assert (code->op == EXEC_DO);
1325 init = make_tree_vec (collapse);
1326 cond = make_tree_vec (collapse);
1327 incr = make_tree_vec (collapse);
1329 if (pblock == NULL)
1331 gfc_start_block (&block);
1332 pblock = &block;
1335 omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc);
1337 for (i = 0; i < collapse; i++)
1339 int simple = 0;
1340 int dovar_found = 0;
1341 tree dovar_decl;
1343 if (clauses)
1345 gfc_namelist *n;
1346 for (n = clauses->lists[OMP_LIST_LASTPRIVATE]; n != NULL;
1347 n = n->next)
1348 if (code->ext.iterator->var->symtree->n.sym == n->sym)
1349 break;
1350 if (n != NULL)
1351 dovar_found = 1;
1352 else if (n == NULL)
1353 for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next)
1354 if (code->ext.iterator->var->symtree->n.sym == n->sym)
1355 break;
1356 if (n != NULL)
1357 dovar_found++;
1360 /* Evaluate all the expressions in the iterator. */
1361 gfc_init_se (&se, NULL);
1362 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1363 gfc_add_block_to_block (pblock, &se.pre);
1364 dovar = se.expr;
1365 type = TREE_TYPE (dovar);
1366 gcc_assert (TREE_CODE (type) == INTEGER_TYPE);
1368 gfc_init_se (&se, NULL);
1369 gfc_conv_expr_val (&se, code->ext.iterator->start);
1370 gfc_add_block_to_block (pblock, &se.pre);
1371 from = gfc_evaluate_now (se.expr, pblock);
1373 gfc_init_se (&se, NULL);
1374 gfc_conv_expr_val (&se, code->ext.iterator->end);
1375 gfc_add_block_to_block (pblock, &se.pre);
1376 to = gfc_evaluate_now (se.expr, pblock);
1378 gfc_init_se (&se, NULL);
1379 gfc_conv_expr_val (&se, code->ext.iterator->step);
1380 gfc_add_block_to_block (pblock, &se.pre);
1381 step = gfc_evaluate_now (se.expr, pblock);
1382 dovar_decl = dovar;
1384 /* Special case simple loops. */
1385 if (TREE_CODE (dovar) == VAR_DECL)
1387 if (integer_onep (step))
1388 simple = 1;
1389 else if (tree_int_cst_equal (step, integer_minus_one_node))
1390 simple = -1;
1392 else
1393 dovar_decl
1394 = gfc_trans_omp_variable (code->ext.iterator->var->symtree->n.sym);
1396 /* Loop body. */
1397 if (simple)
1399 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, dovar, from);
1400 /* The condition should not be folded. */
1401 TREE_VEC_ELT (cond, i) = build2_loc (input_location, simple > 0
1402 ? LE_EXPR : GE_EXPR,
1403 boolean_type_node, dovar, to);
1404 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location, PLUS_EXPR,
1405 type, dovar, step);
1406 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location,
1407 MODIFY_EXPR,
1408 type, dovar,
1409 TREE_VEC_ELT (incr, i));
1411 else
1413 /* STEP is not 1 or -1. Use:
1414 for (count = 0; count < (to + step - from) / step; count++)
1416 dovar = from + count * step;
1417 body;
1418 cycle_label:;
1419 } */
1420 tmp = fold_build2_loc (input_location, MINUS_EXPR, type, step, from);
1421 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, to, tmp);
1422 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR, type, tmp,
1423 step);
1424 tmp = gfc_evaluate_now (tmp, pblock);
1425 count = gfc_create_var (type, "count");
1426 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, count,
1427 build_int_cst (type, 0));
1428 /* The condition should not be folded. */
1429 TREE_VEC_ELT (cond, i) = build2_loc (input_location, LT_EXPR,
1430 boolean_type_node,
1431 count, tmp);
1432 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location, PLUS_EXPR,
1433 type, count,
1434 build_int_cst (type, 1));
1435 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location,
1436 MODIFY_EXPR, type, count,
1437 TREE_VEC_ELT (incr, i));
1439 /* Initialize DOVAR. */
1440 tmp = fold_build2_loc (input_location, MULT_EXPR, type, count, step);
1441 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, from, tmp);
1442 dovar_init e = {dovar, tmp};
1443 inits.safe_push (e);
1446 if (!dovar_found)
1448 tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
1449 OMP_CLAUSE_DECL (tmp) = dovar_decl;
1450 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
1452 else if (dovar_found == 2)
1454 tree c = NULL;
1456 tmp = NULL;
1457 if (!simple)
1459 /* If dovar is lastprivate, but different counter is used,
1460 dovar += step needs to be added to
1461 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
1462 will have the value on entry of the last loop, rather
1463 than value after iterator increment. */
1464 tmp = gfc_evaluate_now (step, pblock);
1465 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, dovar,
1466 tmp);
1467 tmp = fold_build2_loc (input_location, MODIFY_EXPR, type,
1468 dovar, tmp);
1469 for (c = omp_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1470 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1471 && OMP_CLAUSE_DECL (c) == dovar_decl)
1473 OMP_CLAUSE_LASTPRIVATE_STMT (c) = tmp;
1474 break;
1477 if (c == NULL && par_clauses != NULL)
1479 for (c = par_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1480 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1481 && OMP_CLAUSE_DECL (c) == dovar_decl)
1483 tree l = build_omp_clause (input_location,
1484 OMP_CLAUSE_LASTPRIVATE);
1485 OMP_CLAUSE_DECL (l) = dovar_decl;
1486 OMP_CLAUSE_CHAIN (l) = omp_clauses;
1487 OMP_CLAUSE_LASTPRIVATE_STMT (l) = tmp;
1488 omp_clauses = l;
1489 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_SHARED);
1490 break;
1493 gcc_assert (simple || c != NULL);
1495 if (!simple)
1497 tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
1498 OMP_CLAUSE_DECL (tmp) = count;
1499 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
1502 if (i + 1 < collapse)
1503 code = code->block->next;
1506 if (pblock != &block)
1508 pushlevel ();
1509 gfc_start_block (&block);
1512 gfc_start_block (&body);
1514 FOR_EACH_VEC_ELT (inits, ix, di)
1515 gfc_add_modify (&body, di->var, di->init);
1516 inits.release ();
1518 /* Cycle statement is implemented with a goto. Exit statement must not be
1519 present for this loop. */
1520 cycle_label = gfc_build_label_decl (NULL_TREE);
1522 /* Put these labels where they can be found later. */
1524 code->cycle_label = cycle_label;
1525 code->exit_label = NULL_TREE;
1527 /* Main loop body. */
1528 tmp = gfc_trans_omp_code (code->block->next, true);
1529 gfc_add_expr_to_block (&body, tmp);
1531 /* Label for cycle statements (if needed). */
1532 if (TREE_USED (cycle_label))
1534 tmp = build1_v (LABEL_EXPR, cycle_label);
1535 gfc_add_expr_to_block (&body, tmp);
1538 /* End of loop body. */
1539 stmt = make_node (OMP_FOR);
1541 TREE_TYPE (stmt) = void_type_node;
1542 OMP_FOR_BODY (stmt) = gfc_finish_block (&body);
1543 OMP_FOR_CLAUSES (stmt) = omp_clauses;
1544 OMP_FOR_INIT (stmt) = init;
1545 OMP_FOR_COND (stmt) = cond;
1546 OMP_FOR_INCR (stmt) = incr;
1547 gfc_add_expr_to_block (&block, stmt);
1549 return gfc_finish_block (&block);
1552 static tree
1553 gfc_trans_omp_flush (void)
1555 tree decl = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
1556 return build_call_expr_loc (input_location, decl, 0);
1559 static tree
1560 gfc_trans_omp_master (gfc_code *code)
1562 tree stmt = gfc_trans_code (code->block->next);
1563 if (IS_EMPTY_STMT (stmt))
1564 return stmt;
1565 return build1_v (OMP_MASTER, stmt);
1568 static tree
1569 gfc_trans_omp_ordered (gfc_code *code)
1571 return build1_v (OMP_ORDERED, gfc_trans_code (code->block->next));
1574 static tree
1575 gfc_trans_omp_parallel (gfc_code *code)
1577 stmtblock_t block;
1578 tree stmt, omp_clauses;
1580 gfc_start_block (&block);
1581 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1582 code->loc);
1583 stmt = gfc_trans_omp_code (code->block->next, true);
1584 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
1585 omp_clauses);
1586 gfc_add_expr_to_block (&block, stmt);
1587 return gfc_finish_block (&block);
1590 static tree
1591 gfc_trans_omp_parallel_do (gfc_code *code)
1593 stmtblock_t block, *pblock = NULL;
1594 gfc_omp_clauses parallel_clauses, do_clauses;
1595 tree stmt, omp_clauses = NULL_TREE;
1597 gfc_start_block (&block);
1599 memset (&do_clauses, 0, sizeof (do_clauses));
1600 if (code->ext.omp_clauses != NULL)
1602 memcpy (&parallel_clauses, code->ext.omp_clauses,
1603 sizeof (parallel_clauses));
1604 do_clauses.sched_kind = parallel_clauses.sched_kind;
1605 do_clauses.chunk_size = parallel_clauses.chunk_size;
1606 do_clauses.ordered = parallel_clauses.ordered;
1607 do_clauses.collapse = parallel_clauses.collapse;
1608 parallel_clauses.sched_kind = OMP_SCHED_NONE;
1609 parallel_clauses.chunk_size = NULL;
1610 parallel_clauses.ordered = false;
1611 parallel_clauses.collapse = 0;
1612 omp_clauses = gfc_trans_omp_clauses (&block, &parallel_clauses,
1613 code->loc);
1615 do_clauses.nowait = true;
1616 if (!do_clauses.ordered && do_clauses.sched_kind != OMP_SCHED_STATIC)
1617 pblock = &block;
1618 else
1619 pushlevel ();
1620 stmt = gfc_trans_omp_do (code, pblock, &do_clauses, omp_clauses);
1621 if (TREE_CODE (stmt) != BIND_EXPR)
1622 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
1623 else
1624 poplevel (0, 0);
1625 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
1626 omp_clauses);
1627 OMP_PARALLEL_COMBINED (stmt) = 1;
1628 gfc_add_expr_to_block (&block, stmt);
1629 return gfc_finish_block (&block);
1632 static tree
1633 gfc_trans_omp_parallel_sections (gfc_code *code)
1635 stmtblock_t block;
1636 gfc_omp_clauses section_clauses;
1637 tree stmt, omp_clauses;
1639 memset (&section_clauses, 0, sizeof (section_clauses));
1640 section_clauses.nowait = true;
1642 gfc_start_block (&block);
1643 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1644 code->loc);
1645 pushlevel ();
1646 stmt = gfc_trans_omp_sections (code, &section_clauses);
1647 if (TREE_CODE (stmt) != BIND_EXPR)
1648 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
1649 else
1650 poplevel (0, 0);
1651 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
1652 omp_clauses);
1653 OMP_PARALLEL_COMBINED (stmt) = 1;
1654 gfc_add_expr_to_block (&block, stmt);
1655 return gfc_finish_block (&block);
1658 static tree
1659 gfc_trans_omp_parallel_workshare (gfc_code *code)
1661 stmtblock_t block;
1662 gfc_omp_clauses workshare_clauses;
1663 tree stmt, omp_clauses;
1665 memset (&workshare_clauses, 0, sizeof (workshare_clauses));
1666 workshare_clauses.nowait = true;
1668 gfc_start_block (&block);
1669 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1670 code->loc);
1671 pushlevel ();
1672 stmt = gfc_trans_omp_workshare (code, &workshare_clauses);
1673 if (TREE_CODE (stmt) != BIND_EXPR)
1674 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
1675 else
1676 poplevel (0, 0);
1677 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
1678 omp_clauses);
1679 OMP_PARALLEL_COMBINED (stmt) = 1;
1680 gfc_add_expr_to_block (&block, stmt);
1681 return gfc_finish_block (&block);
1684 static tree
1685 gfc_trans_omp_sections (gfc_code *code, gfc_omp_clauses *clauses)
1687 stmtblock_t block, body;
1688 tree omp_clauses, stmt;
1689 bool has_lastprivate = clauses->lists[OMP_LIST_LASTPRIVATE] != NULL;
1691 gfc_start_block (&block);
1693 omp_clauses = gfc_trans_omp_clauses (&block, clauses, code->loc);
1695 gfc_init_block (&body);
1696 for (code = code->block; code; code = code->block)
1698 /* Last section is special because of lastprivate, so even if it
1699 is empty, chain it in. */
1700 stmt = gfc_trans_omp_code (code->next,
1701 has_lastprivate && code->block == NULL);
1702 if (! IS_EMPTY_STMT (stmt))
1704 stmt = build1_v (OMP_SECTION, stmt);
1705 gfc_add_expr_to_block (&body, stmt);
1708 stmt = gfc_finish_block (&body);
1710 stmt = build2_loc (input_location, OMP_SECTIONS, void_type_node, stmt,
1711 omp_clauses);
1712 gfc_add_expr_to_block (&block, stmt);
1714 return gfc_finish_block (&block);
1717 static tree
1718 gfc_trans_omp_single (gfc_code *code, gfc_omp_clauses *clauses)
1720 tree omp_clauses = gfc_trans_omp_clauses (NULL, clauses, code->loc);
1721 tree stmt = gfc_trans_omp_code (code->block->next, true);
1722 stmt = build2_loc (input_location, OMP_SINGLE, void_type_node, stmt,
1723 omp_clauses);
1724 return stmt;
1727 static tree
1728 gfc_trans_omp_task (gfc_code *code)
1730 stmtblock_t block;
1731 tree stmt, omp_clauses;
1733 gfc_start_block (&block);
1734 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1735 code->loc);
1736 stmt = gfc_trans_omp_code (code->block->next, true);
1737 stmt = build2_loc (input_location, OMP_TASK, void_type_node, stmt,
1738 omp_clauses);
1739 gfc_add_expr_to_block (&block, stmt);
1740 return gfc_finish_block (&block);
1743 static tree
1744 gfc_trans_omp_taskwait (void)
1746 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT);
1747 return build_call_expr_loc (input_location, decl, 0);
1750 static tree
1751 gfc_trans_omp_taskyield (void)
1753 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD);
1754 return build_call_expr_loc (input_location, decl, 0);
1757 static tree
1758 gfc_trans_omp_workshare (gfc_code *code, gfc_omp_clauses *clauses)
1760 tree res, tmp, stmt;
1761 stmtblock_t block, *pblock = NULL;
1762 stmtblock_t singleblock;
1763 int saved_ompws_flags;
1764 bool singleblock_in_progress = false;
1765 /* True if previous gfc_code in workshare construct is not workshared. */
1766 bool prev_singleunit;
1768 code = code->block->next;
1770 pushlevel ();
1772 gfc_start_block (&block);
1773 pblock = &block;
1775 ompws_flags = OMPWS_WORKSHARE_FLAG;
1776 prev_singleunit = false;
1778 /* Translate statements one by one to trees until we reach
1779 the end of the workshare construct. Adjacent gfc_codes that
1780 are a single unit of work are clustered and encapsulated in a
1781 single OMP_SINGLE construct. */
1782 for (; code; code = code->next)
1784 if (code->here != 0)
1786 res = gfc_trans_label_here (code);
1787 gfc_add_expr_to_block (pblock, res);
1790 /* No dependence analysis, use for clauses with wait.
1791 If this is the last gfc_code, use default omp_clauses. */
1792 if (code->next == NULL && clauses->nowait)
1793 ompws_flags |= OMPWS_NOWAIT;
1795 /* By default, every gfc_code is a single unit of work. */
1796 ompws_flags |= OMPWS_CURR_SINGLEUNIT;
1797 ompws_flags &= ~OMPWS_SCALARIZER_WS;
1799 switch (code->op)
1801 case EXEC_NOP:
1802 res = NULL_TREE;
1803 break;
1805 case EXEC_ASSIGN:
1806 res = gfc_trans_assign (code);
1807 break;
1809 case EXEC_POINTER_ASSIGN:
1810 res = gfc_trans_pointer_assign (code);
1811 break;
1813 case EXEC_INIT_ASSIGN:
1814 res = gfc_trans_init_assign (code);
1815 break;
1817 case EXEC_FORALL:
1818 res = gfc_trans_forall (code);
1819 break;
1821 case EXEC_WHERE:
1822 res = gfc_trans_where (code);
1823 break;
1825 case EXEC_OMP_ATOMIC:
1826 res = gfc_trans_omp_directive (code);
1827 break;
1829 case EXEC_OMP_PARALLEL:
1830 case EXEC_OMP_PARALLEL_DO:
1831 case EXEC_OMP_PARALLEL_SECTIONS:
1832 case EXEC_OMP_PARALLEL_WORKSHARE:
1833 case EXEC_OMP_CRITICAL:
1834 saved_ompws_flags = ompws_flags;
1835 ompws_flags = 0;
1836 res = gfc_trans_omp_directive (code);
1837 ompws_flags = saved_ompws_flags;
1838 break;
1840 default:
1841 internal_error ("gfc_trans_omp_workshare(): Bad statement code");
1844 gfc_set_backend_locus (&code->loc);
1846 if (res != NULL_TREE && ! IS_EMPTY_STMT (res))
1848 if (prev_singleunit)
1850 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
1851 /* Add current gfc_code to single block. */
1852 gfc_add_expr_to_block (&singleblock, res);
1853 else
1855 /* Finish single block and add it to pblock. */
1856 tmp = gfc_finish_block (&singleblock);
1857 tmp = build2_loc (input_location, OMP_SINGLE,
1858 void_type_node, tmp, NULL_TREE);
1859 gfc_add_expr_to_block (pblock, tmp);
1860 /* Add current gfc_code to pblock. */
1861 gfc_add_expr_to_block (pblock, res);
1862 singleblock_in_progress = false;
1865 else
1867 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
1869 /* Start single block. */
1870 gfc_init_block (&singleblock);
1871 gfc_add_expr_to_block (&singleblock, res);
1872 singleblock_in_progress = true;
1874 else
1875 /* Add the new statement to the block. */
1876 gfc_add_expr_to_block (pblock, res);
1878 prev_singleunit = (ompws_flags & OMPWS_CURR_SINGLEUNIT) != 0;
1882 /* Finish remaining SINGLE block, if we were in the middle of one. */
1883 if (singleblock_in_progress)
1885 /* Finish single block and add it to pblock. */
1886 tmp = gfc_finish_block (&singleblock);
1887 tmp = build2_loc (input_location, OMP_SINGLE, void_type_node, tmp,
1888 clauses->nowait
1889 ? build_omp_clause (input_location, OMP_CLAUSE_NOWAIT)
1890 : NULL_TREE);
1891 gfc_add_expr_to_block (pblock, tmp);
1894 stmt = gfc_finish_block (pblock);
1895 if (TREE_CODE (stmt) != BIND_EXPR)
1897 if (!IS_EMPTY_STMT (stmt))
1899 tree bindblock = poplevel (1, 0);
1900 stmt = build3_v (BIND_EXPR, NULL, stmt, bindblock);
1902 else
1903 poplevel (0, 0);
1905 else
1906 poplevel (0, 0);
1908 if (IS_EMPTY_STMT (stmt) && !clauses->nowait)
1909 stmt = gfc_trans_omp_barrier ();
1911 ompws_flags = 0;
1912 return stmt;
1915 tree
1916 gfc_trans_omp_directive (gfc_code *code)
1918 switch (code->op)
1920 case EXEC_OMP_ATOMIC:
1921 return gfc_trans_omp_atomic (code);
1922 case EXEC_OMP_BARRIER:
1923 return gfc_trans_omp_barrier ();
1924 case EXEC_OMP_CRITICAL:
1925 return gfc_trans_omp_critical (code);
1926 case EXEC_OMP_DO:
1927 return gfc_trans_omp_do (code, NULL, code->ext.omp_clauses, NULL);
1928 case EXEC_OMP_FLUSH:
1929 return gfc_trans_omp_flush ();
1930 case EXEC_OMP_MASTER:
1931 return gfc_trans_omp_master (code);
1932 case EXEC_OMP_ORDERED:
1933 return gfc_trans_omp_ordered (code);
1934 case EXEC_OMP_PARALLEL:
1935 return gfc_trans_omp_parallel (code);
1936 case EXEC_OMP_PARALLEL_DO:
1937 return gfc_trans_omp_parallel_do (code);
1938 case EXEC_OMP_PARALLEL_SECTIONS:
1939 return gfc_trans_omp_parallel_sections (code);
1940 case EXEC_OMP_PARALLEL_WORKSHARE:
1941 return gfc_trans_omp_parallel_workshare (code);
1942 case EXEC_OMP_SECTIONS:
1943 return gfc_trans_omp_sections (code, code->ext.omp_clauses);
1944 case EXEC_OMP_SINGLE:
1945 return gfc_trans_omp_single (code, code->ext.omp_clauses);
1946 case EXEC_OMP_TASK:
1947 return gfc_trans_omp_task (code);
1948 case EXEC_OMP_TASKWAIT:
1949 return gfc_trans_omp_taskwait ();
1950 case EXEC_OMP_TASKYIELD:
1951 return gfc_trans_omp_taskyield ();
1952 case EXEC_OMP_WORKSHARE:
1953 return gfc_trans_omp_workshare (code, code->ext.omp_clauses);
1954 default:
1955 gcc_unreachable ();