* g++.dg/cpp0x/constexpr-53094-2.C: Ignore non-standard ABI
[official-gcc.git] / gcc / fortran / trans-openmp.c
blob988dea92f58ace1a834b74bce83ca8f5e6abba87
1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tree.h"
26 #include "gimple.h" /* For create_tmp_var_raw. */
27 #include "diagnostic-core.h" /* For internal_error. */
28 #include "gfortran.h"
29 #include "trans.h"
30 #include "trans-stmt.h"
31 #include "trans-types.h"
32 #include "trans-array.h"
33 #include "trans-const.h"
34 #include "arith.h"
36 int ompws_flags;
38 /* True if OpenMP should privatize what this DECL points to rather
39 than the DECL itself. */
41 bool
42 gfc_omp_privatize_by_reference (const_tree decl)
44 tree type = TREE_TYPE (decl);
46 if (TREE_CODE (type) == REFERENCE_TYPE
47 && (!DECL_ARTIFICIAL (decl) || TREE_CODE (decl) == PARM_DECL))
48 return true;
50 if (TREE_CODE (type) == POINTER_TYPE)
52 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
53 that have POINTER_TYPE type and don't have GFC_POINTER_TYPE_P
54 set are supposed to be privatized by reference. */
55 if (GFC_POINTER_TYPE_P (type))
56 return false;
58 if (!DECL_ARTIFICIAL (decl)
59 && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
60 return true;
62 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
63 by the frontend. */
64 if (DECL_LANG_SPECIFIC (decl)
65 && GFC_DECL_SAVED_DESCRIPTOR (decl))
66 return true;
69 return false;
72 /* True if OpenMP sharing attribute of DECL is predetermined. */
74 enum omp_clause_default_kind
75 gfc_omp_predetermined_sharing (tree decl)
77 if (DECL_ARTIFICIAL (decl)
78 && ! GFC_DECL_RESULT (decl)
79 && ! (DECL_LANG_SPECIFIC (decl)
80 && GFC_DECL_SAVED_DESCRIPTOR (decl)))
81 return OMP_CLAUSE_DEFAULT_SHARED;
83 /* Cray pointees shouldn't be listed in any clauses and should be
84 gimplified to dereference of the corresponding Cray pointer.
85 Make them all private, so that they are emitted in the debug
86 information. */
87 if (GFC_DECL_CRAY_POINTEE (decl))
88 return OMP_CLAUSE_DEFAULT_PRIVATE;
90 /* Assumed-size arrays are predetermined shared. */
91 if (TREE_CODE (decl) == PARM_DECL
92 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl))
93 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_UNKNOWN
94 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl),
95 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl)) - 1)
96 == NULL)
97 return OMP_CLAUSE_DEFAULT_SHARED;
99 /* Dummy procedures aren't considered variables by OpenMP, thus are
100 disallowed in OpenMP clauses. They are represented as PARM_DECLs
101 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
102 to avoid complaining about their uses with default(none). */
103 if (TREE_CODE (decl) == PARM_DECL
104 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
105 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == FUNCTION_TYPE)
106 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
108 /* COMMON and EQUIVALENCE decls are shared. They
109 are only referenced through DECL_VALUE_EXPR of the variables
110 contained in them. If those are privatized, they will not be
111 gimplified to the COMMON or EQUIVALENCE decls. */
112 if (GFC_DECL_COMMON_OR_EQUIV (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
113 return OMP_CLAUSE_DEFAULT_SHARED;
115 if (GFC_DECL_RESULT (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
116 return OMP_CLAUSE_DEFAULT_SHARED;
118 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
121 /* Return decl that should be used when reporting DEFAULT(NONE)
122 diagnostics. */
124 tree
125 gfc_omp_report_decl (tree decl)
127 if (DECL_ARTIFICIAL (decl)
128 && DECL_LANG_SPECIFIC (decl)
129 && GFC_DECL_SAVED_DESCRIPTOR (decl))
130 return GFC_DECL_SAVED_DESCRIPTOR (decl);
132 return decl;
135 /* Return true if DECL in private clause needs
136 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
137 bool
138 gfc_omp_private_outer_ref (tree decl)
140 tree type = TREE_TYPE (decl);
142 if (GFC_DESCRIPTOR_TYPE_P (type)
143 && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
144 return true;
146 return false;
149 /* Return code to initialize DECL with its default constructor, or
150 NULL if there's nothing to do. */
152 tree
153 gfc_omp_clause_default_ctor (tree clause, tree decl, tree outer)
155 tree type = TREE_TYPE (decl), rank, size, esize, ptr, cond, then_b, else_b;
156 stmtblock_t block, cond_block;
158 if (! GFC_DESCRIPTOR_TYPE_P (type)
159 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
160 return NULL;
162 gcc_assert (outer != NULL);
163 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_PRIVATE
164 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LASTPRIVATE);
166 /* Allocatable arrays in PRIVATE clauses need to be set to
167 "not currently allocated" allocation status if outer
168 array is "not currently allocated", otherwise should be allocated. */
169 gfc_start_block (&block);
171 gfc_init_block (&cond_block);
173 gfc_add_modify (&cond_block, decl, outer);
174 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
175 size = gfc_conv_descriptor_ubound_get (decl, rank);
176 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
177 size, gfc_conv_descriptor_lbound_get (decl, rank));
178 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
179 size, gfc_index_one_node);
180 if (GFC_TYPE_ARRAY_RANK (type) > 1)
181 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
182 size, gfc_conv_descriptor_stride_get (decl, rank));
183 esize = fold_convert (gfc_array_index_type,
184 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
185 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
186 size, esize);
187 size = gfc_evaluate_now (fold_convert (size_type_node, size), &cond_block);
189 ptr = gfc_create_var (pvoid_type_node, NULL);
190 gfc_allocate_using_malloc (&cond_block, ptr, size, NULL_TREE);
191 gfc_conv_descriptor_data_set (&cond_block, decl, ptr);
193 then_b = gfc_finish_block (&cond_block);
195 gfc_init_block (&cond_block);
196 gfc_conv_descriptor_data_set (&cond_block, decl, null_pointer_node);
197 else_b = gfc_finish_block (&cond_block);
199 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
200 fold_convert (pvoid_type_node,
201 gfc_conv_descriptor_data_get (outer)),
202 null_pointer_node);
203 gfc_add_expr_to_block (&block, build3_loc (input_location, COND_EXPR,
204 void_type_node, cond, then_b, else_b));
206 return gfc_finish_block (&block);
209 /* Build and return code for a copy constructor from SRC to DEST. */
211 tree
212 gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src)
214 tree type = TREE_TYPE (dest), ptr, size, esize, rank, call;
215 tree cond, then_b, else_b;
216 stmtblock_t block, cond_block;
218 if (! GFC_DESCRIPTOR_TYPE_P (type)
219 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
220 return build2_v (MODIFY_EXPR, dest, src);
222 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_FIRSTPRIVATE);
224 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
225 and copied from SRC. */
226 gfc_start_block (&block);
228 gfc_init_block (&cond_block);
230 gfc_add_modify (&cond_block, dest, src);
231 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
232 size = gfc_conv_descriptor_ubound_get (dest, rank);
233 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
234 size, gfc_conv_descriptor_lbound_get (dest, rank));
235 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
236 size, gfc_index_one_node);
237 if (GFC_TYPE_ARRAY_RANK (type) > 1)
238 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
239 size, gfc_conv_descriptor_stride_get (dest, rank));
240 esize = fold_convert (gfc_array_index_type,
241 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
242 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
243 size, esize);
244 size = gfc_evaluate_now (fold_convert (size_type_node, size), &cond_block);
246 ptr = gfc_create_var (pvoid_type_node, NULL);
247 gfc_allocate_using_malloc (&cond_block, ptr, size, NULL_TREE);
248 gfc_conv_descriptor_data_set (&cond_block, dest, ptr);
250 call = build_call_expr_loc (input_location,
251 builtin_decl_explicit (BUILT_IN_MEMCPY),
252 3, ptr,
253 fold_convert (pvoid_type_node,
254 gfc_conv_descriptor_data_get (src)),
255 size);
256 gfc_add_expr_to_block (&cond_block, fold_convert (void_type_node, call));
257 then_b = gfc_finish_block (&cond_block);
259 gfc_init_block (&cond_block);
260 gfc_conv_descriptor_data_set (&cond_block, dest, null_pointer_node);
261 else_b = gfc_finish_block (&cond_block);
263 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
264 fold_convert (pvoid_type_node,
265 gfc_conv_descriptor_data_get (src)),
266 null_pointer_node);
267 gfc_add_expr_to_block (&block, build3_loc (input_location, COND_EXPR,
268 void_type_node, cond, then_b, else_b));
270 return gfc_finish_block (&block);
273 /* Similarly, except use an assignment operator instead. */
275 tree
276 gfc_omp_clause_assign_op (tree clause ATTRIBUTE_UNUSED, tree dest, tree src)
278 tree type = TREE_TYPE (dest), rank, size, esize, call;
279 stmtblock_t block;
281 if (! GFC_DESCRIPTOR_TYPE_P (type)
282 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
283 return build2_v (MODIFY_EXPR, dest, src);
285 /* Handle copying allocatable arrays. */
286 gfc_start_block (&block);
288 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
289 size = gfc_conv_descriptor_ubound_get (dest, rank);
290 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
291 size, gfc_conv_descriptor_lbound_get (dest, rank));
292 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
293 size, gfc_index_one_node);
294 if (GFC_TYPE_ARRAY_RANK (type) > 1)
295 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
296 size, gfc_conv_descriptor_stride_get (dest, rank));
297 esize = fold_convert (gfc_array_index_type,
298 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
299 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
300 size, esize);
301 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
302 call = build_call_expr_loc (input_location,
303 builtin_decl_explicit (BUILT_IN_MEMCPY), 3,
304 fold_convert (pvoid_type_node,
305 gfc_conv_descriptor_data_get (dest)),
306 fold_convert (pvoid_type_node,
307 gfc_conv_descriptor_data_get (src)),
308 size);
309 gfc_add_expr_to_block (&block, fold_convert (void_type_node, call));
311 return gfc_finish_block (&block);
314 /* Build and return code destructing DECL. Return NULL if nothing
315 to be done. */
317 tree
318 gfc_omp_clause_dtor (tree clause ATTRIBUTE_UNUSED, tree decl)
320 tree type = TREE_TYPE (decl);
322 if (! GFC_DESCRIPTOR_TYPE_P (type)
323 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
324 return NULL;
326 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
327 to be deallocated if they were allocated. */
328 return gfc_trans_dealloc_allocated (decl, false);
332 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
333 disregarded in OpenMP construct, because it is going to be
334 remapped during OpenMP lowering. SHARED is true if DECL
335 is going to be shared, false if it is going to be privatized. */
337 bool
338 gfc_omp_disregard_value_expr (tree decl, bool shared)
340 if (GFC_DECL_COMMON_OR_EQUIV (decl)
341 && DECL_HAS_VALUE_EXPR_P (decl))
343 tree value = DECL_VALUE_EXPR (decl);
345 if (TREE_CODE (value) == COMPONENT_REF
346 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
347 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
349 /* If variable in COMMON or EQUIVALENCE is privatized, return
350 true, as just that variable is supposed to be privatized,
351 not the whole COMMON or whole EQUIVALENCE.
352 For shared variables in COMMON or EQUIVALENCE, let them be
353 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
354 from the same COMMON or EQUIVALENCE just one sharing of the
355 whole COMMON or EQUIVALENCE is enough. */
356 return ! shared;
360 if (GFC_DECL_RESULT (decl) && DECL_HAS_VALUE_EXPR_P (decl))
361 return ! shared;
363 return false;
366 /* Return true if DECL that is shared iff SHARED is true should
367 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
368 flag set. */
370 bool
371 gfc_omp_private_debug_clause (tree decl, bool shared)
373 if (GFC_DECL_CRAY_POINTEE (decl))
374 return true;
376 if (GFC_DECL_COMMON_OR_EQUIV (decl)
377 && DECL_HAS_VALUE_EXPR_P (decl))
379 tree value = DECL_VALUE_EXPR (decl);
381 if (TREE_CODE (value) == COMPONENT_REF
382 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
383 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
384 return shared;
387 return false;
390 /* Register language specific type size variables as potentially OpenMP
391 firstprivate variables. */
393 void
394 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
396 if (GFC_ARRAY_TYPE_P (type) || GFC_DESCRIPTOR_TYPE_P (type))
398 int r;
400 gcc_assert (TYPE_LANG_SPECIFIC (type) != NULL);
401 for (r = 0; r < GFC_TYPE_ARRAY_RANK (type); r++)
403 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_LBOUND (type, r));
404 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_UBOUND (type, r));
405 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_STRIDE (type, r));
407 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_SIZE (type));
408 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_OFFSET (type));
413 static inline tree
414 gfc_trans_add_clause (tree node, tree tail)
416 OMP_CLAUSE_CHAIN (node) = tail;
417 return node;
420 static tree
421 gfc_trans_omp_variable (gfc_symbol *sym)
423 tree t = gfc_get_symbol_decl (sym);
424 tree parent_decl;
425 int parent_flag;
426 bool return_value;
427 bool alternate_entry;
428 bool entry_master;
430 return_value = sym->attr.function && sym->result == sym;
431 alternate_entry = sym->attr.function && sym->attr.entry
432 && sym->result == sym;
433 entry_master = sym->attr.result
434 && sym->ns->proc_name->attr.entry_master
435 && !gfc_return_by_reference (sym->ns->proc_name);
436 parent_decl = DECL_CONTEXT (current_function_decl);
438 if ((t == parent_decl && return_value)
439 || (sym->ns && sym->ns->proc_name
440 && sym->ns->proc_name->backend_decl == parent_decl
441 && (alternate_entry || entry_master)))
442 parent_flag = 1;
443 else
444 parent_flag = 0;
446 /* Special case for assigning the return value of a function.
447 Self recursive functions must have an explicit return value. */
448 if (return_value && (t == current_function_decl || parent_flag))
449 t = gfc_get_fake_result_decl (sym, parent_flag);
451 /* Similarly for alternate entry points. */
452 else if (alternate_entry
453 && (sym->ns->proc_name->backend_decl == current_function_decl
454 || parent_flag))
456 gfc_entry_list *el = NULL;
458 for (el = sym->ns->entries; el; el = el->next)
459 if (sym == el->sym)
461 t = gfc_get_fake_result_decl (sym, parent_flag);
462 break;
466 else if (entry_master
467 && (sym->ns->proc_name->backend_decl == current_function_decl
468 || parent_flag))
469 t = gfc_get_fake_result_decl (sym, parent_flag);
471 return t;
474 static tree
475 gfc_trans_omp_variable_list (enum omp_clause_code code, gfc_namelist *namelist,
476 tree list)
478 for (; namelist != NULL; namelist = namelist->next)
479 if (namelist->sym->attr.referenced)
481 tree t = gfc_trans_omp_variable (namelist->sym);
482 if (t != error_mark_node)
484 tree node = build_omp_clause (input_location, code);
485 OMP_CLAUSE_DECL (node) = t;
486 list = gfc_trans_add_clause (node, list);
489 return list;
492 static void
493 gfc_trans_omp_array_reduction (tree c, gfc_symbol *sym, locus where)
495 gfc_symtree *root1 = NULL, *root2 = NULL, *root3 = NULL, *root4 = NULL;
496 gfc_symtree *symtree1, *symtree2, *symtree3, *symtree4 = NULL;
497 gfc_symbol init_val_sym, outer_sym, intrinsic_sym;
498 gfc_expr *e1, *e2, *e3, *e4;
499 gfc_ref *ref;
500 tree decl, backend_decl, stmt, type, outer_decl;
501 locus old_loc = gfc_current_locus;
502 const char *iname;
503 gfc_try t;
505 decl = OMP_CLAUSE_DECL (c);
506 gfc_current_locus = where;
507 type = TREE_TYPE (decl);
508 outer_decl = create_tmp_var_raw (type, NULL);
509 if (TREE_CODE (decl) == PARM_DECL
510 && TREE_CODE (type) == REFERENCE_TYPE
511 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type))
512 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type)) == GFC_ARRAY_ALLOCATABLE)
514 decl = build_fold_indirect_ref (decl);
515 type = TREE_TYPE (type);
518 /* Create a fake symbol for init value. */
519 memset (&init_val_sym, 0, sizeof (init_val_sym));
520 init_val_sym.ns = sym->ns;
521 init_val_sym.name = sym->name;
522 init_val_sym.ts = sym->ts;
523 init_val_sym.attr.referenced = 1;
524 init_val_sym.declared_at = where;
525 init_val_sym.attr.flavor = FL_VARIABLE;
526 backend_decl = omp_reduction_init (c, gfc_sym_type (&init_val_sym));
527 init_val_sym.backend_decl = backend_decl;
529 /* Create a fake symbol for the outer array reference. */
530 outer_sym = *sym;
531 outer_sym.as = gfc_copy_array_spec (sym->as);
532 outer_sym.attr.dummy = 0;
533 outer_sym.attr.result = 0;
534 outer_sym.attr.flavor = FL_VARIABLE;
535 outer_sym.backend_decl = outer_decl;
536 if (decl != OMP_CLAUSE_DECL (c))
537 outer_sym.backend_decl = build_fold_indirect_ref (outer_decl);
539 /* Create fake symtrees for it. */
540 symtree1 = gfc_new_symtree (&root1, sym->name);
541 symtree1->n.sym = sym;
542 gcc_assert (symtree1 == root1);
544 symtree2 = gfc_new_symtree (&root2, sym->name);
545 symtree2->n.sym = &init_val_sym;
546 gcc_assert (symtree2 == root2);
548 symtree3 = gfc_new_symtree (&root3, sym->name);
549 symtree3->n.sym = &outer_sym;
550 gcc_assert (symtree3 == root3);
552 /* Create expressions. */
553 e1 = gfc_get_expr ();
554 e1->expr_type = EXPR_VARIABLE;
555 e1->where = where;
556 e1->symtree = symtree1;
557 e1->ts = sym->ts;
558 e1->ref = ref = gfc_get_ref ();
559 ref->type = REF_ARRAY;
560 ref->u.ar.where = where;
561 ref->u.ar.as = sym->as;
562 ref->u.ar.type = AR_FULL;
563 ref->u.ar.dimen = 0;
564 t = gfc_resolve_expr (e1);
565 gcc_assert (t == SUCCESS);
567 e2 = gfc_get_expr ();
568 e2->expr_type = EXPR_VARIABLE;
569 e2->where = where;
570 e2->symtree = symtree2;
571 e2->ts = sym->ts;
572 t = gfc_resolve_expr (e2);
573 gcc_assert (t == SUCCESS);
575 e3 = gfc_copy_expr (e1);
576 e3->symtree = symtree3;
577 t = gfc_resolve_expr (e3);
578 gcc_assert (t == SUCCESS);
580 iname = NULL;
581 switch (OMP_CLAUSE_REDUCTION_CODE (c))
583 case PLUS_EXPR:
584 case MINUS_EXPR:
585 e4 = gfc_add (e3, e1);
586 break;
587 case MULT_EXPR:
588 e4 = gfc_multiply (e3, e1);
589 break;
590 case TRUTH_ANDIF_EXPR:
591 e4 = gfc_and (e3, e1);
592 break;
593 case TRUTH_ORIF_EXPR:
594 e4 = gfc_or (e3, e1);
595 break;
596 case EQ_EXPR:
597 e4 = gfc_eqv (e3, e1);
598 break;
599 case NE_EXPR:
600 e4 = gfc_neqv (e3, e1);
601 break;
602 case MIN_EXPR:
603 iname = "min";
604 break;
605 case MAX_EXPR:
606 iname = "max";
607 break;
608 case BIT_AND_EXPR:
609 iname = "iand";
610 break;
611 case BIT_IOR_EXPR:
612 iname = "ior";
613 break;
614 case BIT_XOR_EXPR:
615 iname = "ieor";
616 break;
617 default:
618 gcc_unreachable ();
620 if (iname != NULL)
622 memset (&intrinsic_sym, 0, sizeof (intrinsic_sym));
623 intrinsic_sym.ns = sym->ns;
624 intrinsic_sym.name = iname;
625 intrinsic_sym.ts = sym->ts;
626 intrinsic_sym.attr.referenced = 1;
627 intrinsic_sym.attr.intrinsic = 1;
628 intrinsic_sym.attr.function = 1;
629 intrinsic_sym.result = &intrinsic_sym;
630 intrinsic_sym.declared_at = where;
632 symtree4 = gfc_new_symtree (&root4, iname);
633 symtree4->n.sym = &intrinsic_sym;
634 gcc_assert (symtree4 == root4);
636 e4 = gfc_get_expr ();
637 e4->expr_type = EXPR_FUNCTION;
638 e4->where = where;
639 e4->symtree = symtree4;
640 e4->value.function.isym = gfc_find_function (iname);
641 e4->value.function.actual = gfc_get_actual_arglist ();
642 e4->value.function.actual->expr = e3;
643 e4->value.function.actual->next = gfc_get_actual_arglist ();
644 e4->value.function.actual->next->expr = e1;
646 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
647 e1 = gfc_copy_expr (e1);
648 e3 = gfc_copy_expr (e3);
649 t = gfc_resolve_expr (e4);
650 gcc_assert (t == SUCCESS);
652 /* Create the init statement list. */
653 pushlevel ();
654 if (GFC_DESCRIPTOR_TYPE_P (type)
655 && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
657 /* If decl is an allocatable array, it needs to be allocated
658 with the same bounds as the outer var. */
659 tree rank, size, esize, ptr;
660 stmtblock_t block;
662 gfc_start_block (&block);
664 gfc_add_modify (&block, decl, outer_sym.backend_decl);
665 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
666 size = gfc_conv_descriptor_ubound_get (decl, rank);
667 size = fold_build2_loc (input_location, MINUS_EXPR,
668 gfc_array_index_type, size,
669 gfc_conv_descriptor_lbound_get (decl, rank));
670 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
671 size, gfc_index_one_node);
672 if (GFC_TYPE_ARRAY_RANK (type) > 1)
673 size = fold_build2_loc (input_location, MULT_EXPR,
674 gfc_array_index_type, size,
675 gfc_conv_descriptor_stride_get (decl, rank));
676 esize = fold_convert (gfc_array_index_type,
677 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
678 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
679 size, esize);
680 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
682 ptr = gfc_create_var (pvoid_type_node, NULL);
683 gfc_allocate_using_malloc (&block, ptr, size, NULL_TREE);
684 gfc_conv_descriptor_data_set (&block, decl, ptr);
686 gfc_add_expr_to_block (&block, gfc_trans_assignment (e1, e2, false,
687 false));
688 stmt = gfc_finish_block (&block);
690 else
691 stmt = gfc_trans_assignment (e1, e2, false, false);
692 if (TREE_CODE (stmt) != BIND_EXPR)
693 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
694 else
695 poplevel (0, 0);
696 OMP_CLAUSE_REDUCTION_INIT (c) = stmt;
698 /* Create the merge statement list. */
699 pushlevel ();
700 if (GFC_DESCRIPTOR_TYPE_P (type)
701 && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
703 /* If decl is an allocatable array, it needs to be deallocated
704 afterwards. */
705 stmtblock_t block;
707 gfc_start_block (&block);
708 gfc_add_expr_to_block (&block, gfc_trans_assignment (e3, e4, false,
709 true));
710 gfc_add_expr_to_block (&block, gfc_trans_dealloc_allocated (decl, false));
711 stmt = gfc_finish_block (&block);
713 else
714 stmt = gfc_trans_assignment (e3, e4, false, true);
715 if (TREE_CODE (stmt) != BIND_EXPR)
716 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
717 else
718 poplevel (0, 0);
719 OMP_CLAUSE_REDUCTION_MERGE (c) = stmt;
721 /* And stick the placeholder VAR_DECL into the clause as well. */
722 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = outer_decl;
724 gfc_current_locus = old_loc;
726 gfc_free_expr (e1);
727 gfc_free_expr (e2);
728 gfc_free_expr (e3);
729 gfc_free_expr (e4);
730 free (symtree1);
731 free (symtree2);
732 free (symtree3);
733 free (symtree4);
734 gfc_free_array_spec (outer_sym.as);
737 static tree
738 gfc_trans_omp_reduction_list (gfc_namelist *namelist, tree list,
739 enum tree_code reduction_code, locus where)
741 for (; namelist != NULL; namelist = namelist->next)
742 if (namelist->sym->attr.referenced)
744 tree t = gfc_trans_omp_variable (namelist->sym);
745 if (t != error_mark_node)
747 tree node = build_omp_clause (where.lb->location,
748 OMP_CLAUSE_REDUCTION);
749 OMP_CLAUSE_DECL (node) = t;
750 OMP_CLAUSE_REDUCTION_CODE (node) = reduction_code;
751 if (namelist->sym->attr.dimension)
752 gfc_trans_omp_array_reduction (node, namelist->sym, where);
753 list = gfc_trans_add_clause (node, list);
756 return list;
759 static tree
760 gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
761 locus where)
763 tree omp_clauses = NULL_TREE, chunk_size, c;
764 int list;
765 enum omp_clause_code clause_code;
766 gfc_se se;
768 if (clauses == NULL)
769 return NULL_TREE;
771 for (list = 0; list < OMP_LIST_NUM; list++)
773 gfc_namelist *n = clauses->lists[list];
775 if (n == NULL)
776 continue;
777 if (list >= OMP_LIST_REDUCTION_FIRST
778 && list <= OMP_LIST_REDUCTION_LAST)
780 enum tree_code reduction_code;
781 switch (list)
783 case OMP_LIST_PLUS:
784 reduction_code = PLUS_EXPR;
785 break;
786 case OMP_LIST_MULT:
787 reduction_code = MULT_EXPR;
788 break;
789 case OMP_LIST_SUB:
790 reduction_code = MINUS_EXPR;
791 break;
792 case OMP_LIST_AND:
793 reduction_code = TRUTH_ANDIF_EXPR;
794 break;
795 case OMP_LIST_OR:
796 reduction_code = TRUTH_ORIF_EXPR;
797 break;
798 case OMP_LIST_EQV:
799 reduction_code = EQ_EXPR;
800 break;
801 case OMP_LIST_NEQV:
802 reduction_code = NE_EXPR;
803 break;
804 case OMP_LIST_MAX:
805 reduction_code = MAX_EXPR;
806 break;
807 case OMP_LIST_MIN:
808 reduction_code = MIN_EXPR;
809 break;
810 case OMP_LIST_IAND:
811 reduction_code = BIT_AND_EXPR;
812 break;
813 case OMP_LIST_IOR:
814 reduction_code = BIT_IOR_EXPR;
815 break;
816 case OMP_LIST_IEOR:
817 reduction_code = BIT_XOR_EXPR;
818 break;
819 default:
820 gcc_unreachable ();
822 omp_clauses
823 = gfc_trans_omp_reduction_list (n, omp_clauses, reduction_code,
824 where);
825 continue;
827 switch (list)
829 case OMP_LIST_PRIVATE:
830 clause_code = OMP_CLAUSE_PRIVATE;
831 goto add_clause;
832 case OMP_LIST_SHARED:
833 clause_code = OMP_CLAUSE_SHARED;
834 goto add_clause;
835 case OMP_LIST_FIRSTPRIVATE:
836 clause_code = OMP_CLAUSE_FIRSTPRIVATE;
837 goto add_clause;
838 case OMP_LIST_LASTPRIVATE:
839 clause_code = OMP_CLAUSE_LASTPRIVATE;
840 goto add_clause;
841 case OMP_LIST_COPYIN:
842 clause_code = OMP_CLAUSE_COPYIN;
843 goto add_clause;
844 case OMP_LIST_COPYPRIVATE:
845 clause_code = OMP_CLAUSE_COPYPRIVATE;
846 /* FALLTHROUGH */
847 add_clause:
848 omp_clauses
849 = gfc_trans_omp_variable_list (clause_code, n, omp_clauses);
850 break;
851 default:
852 break;
856 if (clauses->if_expr)
858 tree if_var;
860 gfc_init_se (&se, NULL);
861 gfc_conv_expr (&se, clauses->if_expr);
862 gfc_add_block_to_block (block, &se.pre);
863 if_var = gfc_evaluate_now (se.expr, block);
864 gfc_add_block_to_block (block, &se.post);
866 c = build_omp_clause (where.lb->location, OMP_CLAUSE_IF);
867 OMP_CLAUSE_IF_EXPR (c) = if_var;
868 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
871 if (clauses->final_expr)
873 tree final_var;
875 gfc_init_se (&se, NULL);
876 gfc_conv_expr (&se, clauses->final_expr);
877 gfc_add_block_to_block (block, &se.pre);
878 final_var = gfc_evaluate_now (se.expr, block);
879 gfc_add_block_to_block (block, &se.post);
881 c = build_omp_clause (where.lb->location, OMP_CLAUSE_FINAL);
882 OMP_CLAUSE_FINAL_EXPR (c) = final_var;
883 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
886 if (clauses->num_threads)
888 tree num_threads;
890 gfc_init_se (&se, NULL);
891 gfc_conv_expr (&se, clauses->num_threads);
892 gfc_add_block_to_block (block, &se.pre);
893 num_threads = gfc_evaluate_now (se.expr, block);
894 gfc_add_block_to_block (block, &se.post);
896 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NUM_THREADS);
897 OMP_CLAUSE_NUM_THREADS_EXPR (c) = num_threads;
898 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
901 chunk_size = NULL_TREE;
902 if (clauses->chunk_size)
904 gfc_init_se (&se, NULL);
905 gfc_conv_expr (&se, clauses->chunk_size);
906 gfc_add_block_to_block (block, &se.pre);
907 chunk_size = gfc_evaluate_now (se.expr, block);
908 gfc_add_block_to_block (block, &se.post);
911 if (clauses->sched_kind != OMP_SCHED_NONE)
913 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SCHEDULE);
914 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = chunk_size;
915 switch (clauses->sched_kind)
917 case OMP_SCHED_STATIC:
918 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_STATIC;
919 break;
920 case OMP_SCHED_DYNAMIC:
921 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_DYNAMIC;
922 break;
923 case OMP_SCHED_GUIDED:
924 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_GUIDED;
925 break;
926 case OMP_SCHED_RUNTIME:
927 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_RUNTIME;
928 break;
929 case OMP_SCHED_AUTO:
930 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_AUTO;
931 break;
932 default:
933 gcc_unreachable ();
935 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
938 if (clauses->default_sharing != OMP_DEFAULT_UNKNOWN)
940 c = build_omp_clause (where.lb->location, OMP_CLAUSE_DEFAULT);
941 switch (clauses->default_sharing)
943 case OMP_DEFAULT_NONE:
944 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_NONE;
945 break;
946 case OMP_DEFAULT_SHARED:
947 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_SHARED;
948 break;
949 case OMP_DEFAULT_PRIVATE:
950 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_PRIVATE;
951 break;
952 case OMP_DEFAULT_FIRSTPRIVATE:
953 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
954 break;
955 default:
956 gcc_unreachable ();
958 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
961 if (clauses->nowait)
963 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NOWAIT);
964 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
967 if (clauses->ordered)
969 c = build_omp_clause (where.lb->location, OMP_CLAUSE_ORDERED);
970 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
973 if (clauses->untied)
975 c = build_omp_clause (where.lb->location, OMP_CLAUSE_UNTIED);
976 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
979 if (clauses->mergeable)
981 c = build_omp_clause (where.lb->location, OMP_CLAUSE_MERGEABLE);
982 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
985 if (clauses->collapse)
987 c = build_omp_clause (where.lb->location, OMP_CLAUSE_COLLAPSE);
988 OMP_CLAUSE_COLLAPSE_EXPR (c)
989 = build_int_cst (integer_type_node, clauses->collapse);
990 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
993 return omp_clauses;
996 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
998 static tree
999 gfc_trans_omp_code (gfc_code *code, bool force_empty)
1001 tree stmt;
1003 pushlevel ();
1004 stmt = gfc_trans_code (code);
1005 if (TREE_CODE (stmt) != BIND_EXPR)
1007 if (!IS_EMPTY_STMT (stmt) || force_empty)
1009 tree block = poplevel (1, 0);
1010 stmt = build3_v (BIND_EXPR, NULL, stmt, block);
1012 else
1013 poplevel (0, 0);
1015 else
1016 poplevel (0, 0);
1017 return stmt;
1021 static tree gfc_trans_omp_sections (gfc_code *, gfc_omp_clauses *);
1022 static tree gfc_trans_omp_workshare (gfc_code *, gfc_omp_clauses *);
1024 static tree
1025 gfc_trans_omp_atomic (gfc_code *code)
1027 gfc_code *atomic_code = code;
1028 gfc_se lse;
1029 gfc_se rse;
1030 gfc_se vse;
1031 gfc_expr *expr2, *e;
1032 gfc_symbol *var;
1033 stmtblock_t block;
1034 tree lhsaddr, type, rhs, x;
1035 enum tree_code op = ERROR_MARK;
1036 enum tree_code aop = OMP_ATOMIC;
1037 bool var_on_left = false;
1039 code = code->block->next;
1040 gcc_assert (code->op == EXEC_ASSIGN);
1041 var = code->expr1->symtree->n.sym;
1043 gfc_init_se (&lse, NULL);
1044 gfc_init_se (&rse, NULL);
1045 gfc_init_se (&vse, NULL);
1046 gfc_start_block (&block);
1048 expr2 = code->expr2;
1049 if (expr2->expr_type == EXPR_FUNCTION
1050 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
1051 expr2 = expr2->value.function.actual->expr;
1053 switch (atomic_code->ext.omp_atomic)
1055 case GFC_OMP_ATOMIC_READ:
1056 gfc_conv_expr (&vse, code->expr1);
1057 gfc_add_block_to_block (&block, &vse.pre);
1059 gfc_conv_expr (&lse, expr2);
1060 gfc_add_block_to_block (&block, &lse.pre);
1061 type = TREE_TYPE (lse.expr);
1062 lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
1064 x = build1 (OMP_ATOMIC_READ, type, lhsaddr);
1065 x = convert (TREE_TYPE (vse.expr), x);
1066 gfc_add_modify (&block, vse.expr, x);
1068 gfc_add_block_to_block (&block, &lse.pre);
1069 gfc_add_block_to_block (&block, &rse.pre);
1071 return gfc_finish_block (&block);
1072 case GFC_OMP_ATOMIC_CAPTURE:
1073 aop = OMP_ATOMIC_CAPTURE_NEW;
1074 if (expr2->expr_type == EXPR_VARIABLE)
1076 aop = OMP_ATOMIC_CAPTURE_OLD;
1077 gfc_conv_expr (&vse, code->expr1);
1078 gfc_add_block_to_block (&block, &vse.pre);
1080 gfc_conv_expr (&lse, expr2);
1081 gfc_add_block_to_block (&block, &lse.pre);
1082 gfc_init_se (&lse, NULL);
1083 code = code->next;
1084 var = code->expr1->symtree->n.sym;
1085 expr2 = code->expr2;
1086 if (expr2->expr_type == EXPR_FUNCTION
1087 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
1088 expr2 = expr2->value.function.actual->expr;
1090 break;
1091 default:
1092 break;
1095 gfc_conv_expr (&lse, code->expr1);
1096 gfc_add_block_to_block (&block, &lse.pre);
1097 type = TREE_TYPE (lse.expr);
1098 lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
1100 if (atomic_code->ext.omp_atomic == GFC_OMP_ATOMIC_WRITE)
1102 gfc_conv_expr (&rse, expr2);
1103 gfc_add_block_to_block (&block, &rse.pre);
1105 else if (expr2->expr_type == EXPR_OP)
1107 gfc_expr *e;
1108 switch (expr2->value.op.op)
1110 case INTRINSIC_PLUS:
1111 op = PLUS_EXPR;
1112 break;
1113 case INTRINSIC_TIMES:
1114 op = MULT_EXPR;
1115 break;
1116 case INTRINSIC_MINUS:
1117 op = MINUS_EXPR;
1118 break;
1119 case INTRINSIC_DIVIDE:
1120 if (expr2->ts.type == BT_INTEGER)
1121 op = TRUNC_DIV_EXPR;
1122 else
1123 op = RDIV_EXPR;
1124 break;
1125 case INTRINSIC_AND:
1126 op = TRUTH_ANDIF_EXPR;
1127 break;
1128 case INTRINSIC_OR:
1129 op = TRUTH_ORIF_EXPR;
1130 break;
1131 case INTRINSIC_EQV:
1132 op = EQ_EXPR;
1133 break;
1134 case INTRINSIC_NEQV:
1135 op = NE_EXPR;
1136 break;
1137 default:
1138 gcc_unreachable ();
1140 e = expr2->value.op.op1;
1141 if (e->expr_type == EXPR_FUNCTION
1142 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
1143 e = e->value.function.actual->expr;
1144 if (e->expr_type == EXPR_VARIABLE
1145 && e->symtree != NULL
1146 && e->symtree->n.sym == var)
1148 expr2 = expr2->value.op.op2;
1149 var_on_left = true;
1151 else
1153 e = expr2->value.op.op2;
1154 if (e->expr_type == EXPR_FUNCTION
1155 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
1156 e = e->value.function.actual->expr;
1157 gcc_assert (e->expr_type == EXPR_VARIABLE
1158 && e->symtree != NULL
1159 && e->symtree->n.sym == var);
1160 expr2 = expr2->value.op.op1;
1161 var_on_left = false;
1163 gfc_conv_expr (&rse, expr2);
1164 gfc_add_block_to_block (&block, &rse.pre);
1166 else
1168 gcc_assert (expr2->expr_type == EXPR_FUNCTION);
1169 switch (expr2->value.function.isym->id)
1171 case GFC_ISYM_MIN:
1172 op = MIN_EXPR;
1173 break;
1174 case GFC_ISYM_MAX:
1175 op = MAX_EXPR;
1176 break;
1177 case GFC_ISYM_IAND:
1178 op = BIT_AND_EXPR;
1179 break;
1180 case GFC_ISYM_IOR:
1181 op = BIT_IOR_EXPR;
1182 break;
1183 case GFC_ISYM_IEOR:
1184 op = BIT_XOR_EXPR;
1185 break;
1186 default:
1187 gcc_unreachable ();
1189 e = expr2->value.function.actual->expr;
1190 gcc_assert (e->expr_type == EXPR_VARIABLE
1191 && e->symtree != NULL
1192 && e->symtree->n.sym == var);
1194 gfc_conv_expr (&rse, expr2->value.function.actual->next->expr);
1195 gfc_add_block_to_block (&block, &rse.pre);
1196 if (expr2->value.function.actual->next->next != NULL)
1198 tree accum = gfc_create_var (TREE_TYPE (rse.expr), NULL);
1199 gfc_actual_arglist *arg;
1201 gfc_add_modify (&block, accum, rse.expr);
1202 for (arg = expr2->value.function.actual->next->next; arg;
1203 arg = arg->next)
1205 gfc_init_block (&rse.pre);
1206 gfc_conv_expr (&rse, arg->expr);
1207 gfc_add_block_to_block (&block, &rse.pre);
1208 x = fold_build2_loc (input_location, op, TREE_TYPE (accum),
1209 accum, rse.expr);
1210 gfc_add_modify (&block, accum, x);
1213 rse.expr = accum;
1216 expr2 = expr2->value.function.actual->next->expr;
1219 lhsaddr = save_expr (lhsaddr);
1220 rhs = gfc_evaluate_now (rse.expr, &block);
1222 if (atomic_code->ext.omp_atomic == GFC_OMP_ATOMIC_WRITE)
1223 x = rhs;
1224 else
1226 x = convert (TREE_TYPE (rhs),
1227 build_fold_indirect_ref_loc (input_location, lhsaddr));
1228 if (var_on_left)
1229 x = fold_build2_loc (input_location, op, TREE_TYPE (rhs), x, rhs);
1230 else
1231 x = fold_build2_loc (input_location, op, TREE_TYPE (rhs), rhs, x);
1234 if (TREE_CODE (TREE_TYPE (rhs)) == COMPLEX_TYPE
1235 && TREE_CODE (type) != COMPLEX_TYPE)
1236 x = fold_build1_loc (input_location, REALPART_EXPR,
1237 TREE_TYPE (TREE_TYPE (rhs)), x);
1239 gfc_add_block_to_block (&block, &lse.pre);
1240 gfc_add_block_to_block (&block, &rse.pre);
1242 if (aop == OMP_ATOMIC)
1244 x = build2_v (OMP_ATOMIC, lhsaddr, convert (type, x));
1245 gfc_add_expr_to_block (&block, x);
1247 else
1249 if (aop == OMP_ATOMIC_CAPTURE_NEW)
1251 code = code->next;
1252 expr2 = code->expr2;
1253 if (expr2->expr_type == EXPR_FUNCTION
1254 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
1255 expr2 = expr2->value.function.actual->expr;
1257 gcc_assert (expr2->expr_type == EXPR_VARIABLE);
1258 gfc_conv_expr (&vse, code->expr1);
1259 gfc_add_block_to_block (&block, &vse.pre);
1261 gfc_init_se (&lse, NULL);
1262 gfc_conv_expr (&lse, expr2);
1263 gfc_add_block_to_block (&block, &lse.pre);
1265 x = build2 (aop, type, lhsaddr, convert (type, x));
1266 x = convert (TREE_TYPE (vse.expr), x);
1267 gfc_add_modify (&block, vse.expr, x);
1270 return gfc_finish_block (&block);
1273 static tree
1274 gfc_trans_omp_barrier (void)
1276 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER);
1277 return build_call_expr_loc (input_location, decl, 0);
1280 static tree
1281 gfc_trans_omp_critical (gfc_code *code)
1283 tree name = NULL_TREE, stmt;
1284 if (code->ext.omp_name != NULL)
1285 name = get_identifier (code->ext.omp_name);
1286 stmt = gfc_trans_code (code->block->next);
1287 return build2_loc (input_location, OMP_CRITICAL, void_type_node, stmt, name);
1290 typedef struct dovar_init_d {
1291 tree var;
1292 tree init;
1293 } dovar_init;
1296 static tree
1297 gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock,
1298 gfc_omp_clauses *do_clauses, tree par_clauses)
1300 gfc_se se;
1301 tree dovar, stmt, from, to, step, type, init, cond, incr;
1302 tree count = NULL_TREE, cycle_label, tmp, omp_clauses;
1303 stmtblock_t block;
1304 stmtblock_t body;
1305 gfc_omp_clauses *clauses = code->ext.omp_clauses;
1306 int i, collapse = clauses->collapse;
1307 vec<dovar_init> inits = vNULL;
1308 dovar_init *di;
1309 unsigned ix;
1311 if (collapse <= 0)
1312 collapse = 1;
1314 code = code->block->next;
1315 gcc_assert (code->op == EXEC_DO);
1317 init = make_tree_vec (collapse);
1318 cond = make_tree_vec (collapse);
1319 incr = make_tree_vec (collapse);
1321 if (pblock == NULL)
1323 gfc_start_block (&block);
1324 pblock = &block;
1327 omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc);
1329 for (i = 0; i < collapse; i++)
1331 int simple = 0;
1332 int dovar_found = 0;
1333 tree dovar_decl;
1335 if (clauses)
1337 gfc_namelist *n;
1338 for (n = clauses->lists[OMP_LIST_LASTPRIVATE]; n != NULL;
1339 n = n->next)
1340 if (code->ext.iterator->var->symtree->n.sym == n->sym)
1341 break;
1342 if (n != NULL)
1343 dovar_found = 1;
1344 else if (n == NULL)
1345 for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next)
1346 if (code->ext.iterator->var->symtree->n.sym == n->sym)
1347 break;
1348 if (n != NULL)
1349 dovar_found++;
1352 /* Evaluate all the expressions in the iterator. */
1353 gfc_init_se (&se, NULL);
1354 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1355 gfc_add_block_to_block (pblock, &se.pre);
1356 dovar = se.expr;
1357 type = TREE_TYPE (dovar);
1358 gcc_assert (TREE_CODE (type) == INTEGER_TYPE);
1360 gfc_init_se (&se, NULL);
1361 gfc_conv_expr_val (&se, code->ext.iterator->start);
1362 gfc_add_block_to_block (pblock, &se.pre);
1363 from = gfc_evaluate_now (se.expr, pblock);
1365 gfc_init_se (&se, NULL);
1366 gfc_conv_expr_val (&se, code->ext.iterator->end);
1367 gfc_add_block_to_block (pblock, &se.pre);
1368 to = gfc_evaluate_now (se.expr, pblock);
1370 gfc_init_se (&se, NULL);
1371 gfc_conv_expr_val (&se, code->ext.iterator->step);
1372 gfc_add_block_to_block (pblock, &se.pre);
1373 step = gfc_evaluate_now (se.expr, pblock);
1374 dovar_decl = dovar;
1376 /* Special case simple loops. */
1377 if (TREE_CODE (dovar) == VAR_DECL)
1379 if (integer_onep (step))
1380 simple = 1;
1381 else if (tree_int_cst_equal (step, integer_minus_one_node))
1382 simple = -1;
1384 else
1385 dovar_decl
1386 = gfc_trans_omp_variable (code->ext.iterator->var->symtree->n.sym);
1388 /* Loop body. */
1389 if (simple)
1391 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, dovar, from);
1392 /* The condition should not be folded. */
1393 TREE_VEC_ELT (cond, i) = build2_loc (input_location, simple > 0
1394 ? LE_EXPR : GE_EXPR,
1395 boolean_type_node, dovar, to);
1396 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location, PLUS_EXPR,
1397 type, dovar, step);
1398 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location,
1399 MODIFY_EXPR,
1400 type, dovar,
1401 TREE_VEC_ELT (incr, i));
1403 else
1405 /* STEP is not 1 or -1. Use:
1406 for (count = 0; count < (to + step - from) / step; count++)
1408 dovar = from + count * step;
1409 body;
1410 cycle_label:;
1411 } */
1412 tmp = fold_build2_loc (input_location, MINUS_EXPR, type, step, from);
1413 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, to, tmp);
1414 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR, type, tmp,
1415 step);
1416 tmp = gfc_evaluate_now (tmp, pblock);
1417 count = gfc_create_var (type, "count");
1418 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, count,
1419 build_int_cst (type, 0));
1420 /* The condition should not be folded. */
1421 TREE_VEC_ELT (cond, i) = build2_loc (input_location, LT_EXPR,
1422 boolean_type_node,
1423 count, tmp);
1424 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location, PLUS_EXPR,
1425 type, count,
1426 build_int_cst (type, 1));
1427 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location,
1428 MODIFY_EXPR, type, count,
1429 TREE_VEC_ELT (incr, i));
1431 /* Initialize DOVAR. */
1432 tmp = fold_build2_loc (input_location, MULT_EXPR, type, count, step);
1433 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, from, tmp);
1434 dovar_init e = {dovar, tmp};
1435 inits.safe_push (e);
1438 if (!dovar_found)
1440 tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
1441 OMP_CLAUSE_DECL (tmp) = dovar_decl;
1442 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
1444 else if (dovar_found == 2)
1446 tree c = NULL;
1448 tmp = NULL;
1449 if (!simple)
1451 /* If dovar is lastprivate, but different counter is used,
1452 dovar += step needs to be added to
1453 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
1454 will have the value on entry of the last loop, rather
1455 than value after iterator increment. */
1456 tmp = gfc_evaluate_now (step, pblock);
1457 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, dovar,
1458 tmp);
1459 tmp = fold_build2_loc (input_location, MODIFY_EXPR, type,
1460 dovar, tmp);
1461 for (c = omp_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1462 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1463 && OMP_CLAUSE_DECL (c) == dovar_decl)
1465 OMP_CLAUSE_LASTPRIVATE_STMT (c) = tmp;
1466 break;
1469 if (c == NULL && par_clauses != NULL)
1471 for (c = par_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1472 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1473 && OMP_CLAUSE_DECL (c) == dovar_decl)
1475 tree l = build_omp_clause (input_location,
1476 OMP_CLAUSE_LASTPRIVATE);
1477 OMP_CLAUSE_DECL (l) = dovar_decl;
1478 OMP_CLAUSE_CHAIN (l) = omp_clauses;
1479 OMP_CLAUSE_LASTPRIVATE_STMT (l) = tmp;
1480 omp_clauses = l;
1481 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_SHARED);
1482 break;
1485 gcc_assert (simple || c != NULL);
1487 if (!simple)
1489 tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
1490 OMP_CLAUSE_DECL (tmp) = count;
1491 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
1494 if (i + 1 < collapse)
1495 code = code->block->next;
1498 if (pblock != &block)
1500 pushlevel ();
1501 gfc_start_block (&block);
1504 gfc_start_block (&body);
1506 FOR_EACH_VEC_ELT (inits, ix, di)
1507 gfc_add_modify (&body, di->var, di->init);
1508 inits.release ();
1510 /* Cycle statement is implemented with a goto. Exit statement must not be
1511 present for this loop. */
1512 cycle_label = gfc_build_label_decl (NULL_TREE);
1514 /* Put these labels where they can be found later. */
1516 code->cycle_label = cycle_label;
1517 code->exit_label = NULL_TREE;
1519 /* Main loop body. */
1520 tmp = gfc_trans_omp_code (code->block->next, true);
1521 gfc_add_expr_to_block (&body, tmp);
1523 /* Label for cycle statements (if needed). */
1524 if (TREE_USED (cycle_label))
1526 tmp = build1_v (LABEL_EXPR, cycle_label);
1527 gfc_add_expr_to_block (&body, tmp);
1530 /* End of loop body. */
1531 stmt = make_node (OMP_FOR);
1533 TREE_TYPE (stmt) = void_type_node;
1534 OMP_FOR_BODY (stmt) = gfc_finish_block (&body);
1535 OMP_FOR_CLAUSES (stmt) = omp_clauses;
1536 OMP_FOR_INIT (stmt) = init;
1537 OMP_FOR_COND (stmt) = cond;
1538 OMP_FOR_INCR (stmt) = incr;
1539 gfc_add_expr_to_block (&block, stmt);
1541 return gfc_finish_block (&block);
1544 static tree
1545 gfc_trans_omp_flush (void)
1547 tree decl = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
1548 return build_call_expr_loc (input_location, decl, 0);
1551 static tree
1552 gfc_trans_omp_master (gfc_code *code)
1554 tree stmt = gfc_trans_code (code->block->next);
1555 if (IS_EMPTY_STMT (stmt))
1556 return stmt;
1557 return build1_v (OMP_MASTER, stmt);
1560 static tree
1561 gfc_trans_omp_ordered (gfc_code *code)
1563 return build1_v (OMP_ORDERED, gfc_trans_code (code->block->next));
1566 static tree
1567 gfc_trans_omp_parallel (gfc_code *code)
1569 stmtblock_t block;
1570 tree stmt, omp_clauses;
1572 gfc_start_block (&block);
1573 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1574 code->loc);
1575 stmt = gfc_trans_omp_code (code->block->next, true);
1576 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
1577 omp_clauses);
1578 gfc_add_expr_to_block (&block, stmt);
1579 return gfc_finish_block (&block);
1582 static tree
1583 gfc_trans_omp_parallel_do (gfc_code *code)
1585 stmtblock_t block, *pblock = NULL;
1586 gfc_omp_clauses parallel_clauses, do_clauses;
1587 tree stmt, omp_clauses = NULL_TREE;
1589 gfc_start_block (&block);
1591 memset (&do_clauses, 0, sizeof (do_clauses));
1592 if (code->ext.omp_clauses != NULL)
1594 memcpy (&parallel_clauses, code->ext.omp_clauses,
1595 sizeof (parallel_clauses));
1596 do_clauses.sched_kind = parallel_clauses.sched_kind;
1597 do_clauses.chunk_size = parallel_clauses.chunk_size;
1598 do_clauses.ordered = parallel_clauses.ordered;
1599 do_clauses.collapse = parallel_clauses.collapse;
1600 parallel_clauses.sched_kind = OMP_SCHED_NONE;
1601 parallel_clauses.chunk_size = NULL;
1602 parallel_clauses.ordered = false;
1603 parallel_clauses.collapse = 0;
1604 omp_clauses = gfc_trans_omp_clauses (&block, &parallel_clauses,
1605 code->loc);
1607 do_clauses.nowait = true;
1608 if (!do_clauses.ordered && do_clauses.sched_kind != OMP_SCHED_STATIC)
1609 pblock = &block;
1610 else
1611 pushlevel ();
1612 stmt = gfc_trans_omp_do (code, pblock, &do_clauses, omp_clauses);
1613 if (TREE_CODE (stmt) != BIND_EXPR)
1614 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
1615 else
1616 poplevel (0, 0);
1617 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
1618 omp_clauses);
1619 OMP_PARALLEL_COMBINED (stmt) = 1;
1620 gfc_add_expr_to_block (&block, stmt);
1621 return gfc_finish_block (&block);
1624 static tree
1625 gfc_trans_omp_parallel_sections (gfc_code *code)
1627 stmtblock_t block;
1628 gfc_omp_clauses section_clauses;
1629 tree stmt, omp_clauses;
1631 memset (&section_clauses, 0, sizeof (section_clauses));
1632 section_clauses.nowait = true;
1634 gfc_start_block (&block);
1635 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1636 code->loc);
1637 pushlevel ();
1638 stmt = gfc_trans_omp_sections (code, &section_clauses);
1639 if (TREE_CODE (stmt) != BIND_EXPR)
1640 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
1641 else
1642 poplevel (0, 0);
1643 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
1644 omp_clauses);
1645 OMP_PARALLEL_COMBINED (stmt) = 1;
1646 gfc_add_expr_to_block (&block, stmt);
1647 return gfc_finish_block (&block);
1650 static tree
1651 gfc_trans_omp_parallel_workshare (gfc_code *code)
1653 stmtblock_t block;
1654 gfc_omp_clauses workshare_clauses;
1655 tree stmt, omp_clauses;
1657 memset (&workshare_clauses, 0, sizeof (workshare_clauses));
1658 workshare_clauses.nowait = true;
1660 gfc_start_block (&block);
1661 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1662 code->loc);
1663 pushlevel ();
1664 stmt = gfc_trans_omp_workshare (code, &workshare_clauses);
1665 if (TREE_CODE (stmt) != BIND_EXPR)
1666 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
1667 else
1668 poplevel (0, 0);
1669 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
1670 omp_clauses);
1671 OMP_PARALLEL_COMBINED (stmt) = 1;
1672 gfc_add_expr_to_block (&block, stmt);
1673 return gfc_finish_block (&block);
1676 static tree
1677 gfc_trans_omp_sections (gfc_code *code, gfc_omp_clauses *clauses)
1679 stmtblock_t block, body;
1680 tree omp_clauses, stmt;
1681 bool has_lastprivate = clauses->lists[OMP_LIST_LASTPRIVATE] != NULL;
1683 gfc_start_block (&block);
1685 omp_clauses = gfc_trans_omp_clauses (&block, clauses, code->loc);
1687 gfc_init_block (&body);
1688 for (code = code->block; code; code = code->block)
1690 /* Last section is special because of lastprivate, so even if it
1691 is empty, chain it in. */
1692 stmt = gfc_trans_omp_code (code->next,
1693 has_lastprivate && code->block == NULL);
1694 if (! IS_EMPTY_STMT (stmt))
1696 stmt = build1_v (OMP_SECTION, stmt);
1697 gfc_add_expr_to_block (&body, stmt);
1700 stmt = gfc_finish_block (&body);
1702 stmt = build2_loc (input_location, OMP_SECTIONS, void_type_node, stmt,
1703 omp_clauses);
1704 gfc_add_expr_to_block (&block, stmt);
1706 return gfc_finish_block (&block);
1709 static tree
1710 gfc_trans_omp_single (gfc_code *code, gfc_omp_clauses *clauses)
1712 tree omp_clauses = gfc_trans_omp_clauses (NULL, clauses, code->loc);
1713 tree stmt = gfc_trans_omp_code (code->block->next, true);
1714 stmt = build2_loc (input_location, OMP_SINGLE, void_type_node, stmt,
1715 omp_clauses);
1716 return stmt;
1719 static tree
1720 gfc_trans_omp_task (gfc_code *code)
1722 stmtblock_t block;
1723 tree stmt, omp_clauses;
1725 gfc_start_block (&block);
1726 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1727 code->loc);
1728 stmt = gfc_trans_omp_code (code->block->next, true);
1729 stmt = build2_loc (input_location, OMP_TASK, void_type_node, stmt,
1730 omp_clauses);
1731 gfc_add_expr_to_block (&block, stmt);
1732 return gfc_finish_block (&block);
1735 static tree
1736 gfc_trans_omp_taskwait (void)
1738 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT);
1739 return build_call_expr_loc (input_location, decl, 0);
1742 static tree
1743 gfc_trans_omp_taskyield (void)
1745 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD);
1746 return build_call_expr_loc (input_location, decl, 0);
1749 static tree
1750 gfc_trans_omp_workshare (gfc_code *code, gfc_omp_clauses *clauses)
1752 tree res, tmp, stmt;
1753 stmtblock_t block, *pblock = NULL;
1754 stmtblock_t singleblock;
1755 int saved_ompws_flags;
1756 bool singleblock_in_progress = false;
1757 /* True if previous gfc_code in workshare construct is not workshared. */
1758 bool prev_singleunit;
1760 code = code->block->next;
1762 pushlevel ();
1764 gfc_start_block (&block);
1765 pblock = &block;
1767 ompws_flags = OMPWS_WORKSHARE_FLAG;
1768 prev_singleunit = false;
1770 /* Translate statements one by one to trees until we reach
1771 the end of the workshare construct. Adjacent gfc_codes that
1772 are a single unit of work are clustered and encapsulated in a
1773 single OMP_SINGLE construct. */
1774 for (; code; code = code->next)
1776 if (code->here != 0)
1778 res = gfc_trans_label_here (code);
1779 gfc_add_expr_to_block (pblock, res);
1782 /* No dependence analysis, use for clauses with wait.
1783 If this is the last gfc_code, use default omp_clauses. */
1784 if (code->next == NULL && clauses->nowait)
1785 ompws_flags |= OMPWS_NOWAIT;
1787 /* By default, every gfc_code is a single unit of work. */
1788 ompws_flags |= OMPWS_CURR_SINGLEUNIT;
1789 ompws_flags &= ~OMPWS_SCALARIZER_WS;
1791 switch (code->op)
1793 case EXEC_NOP:
1794 res = NULL_TREE;
1795 break;
1797 case EXEC_ASSIGN:
1798 res = gfc_trans_assign (code);
1799 break;
1801 case EXEC_POINTER_ASSIGN:
1802 res = gfc_trans_pointer_assign (code);
1803 break;
1805 case EXEC_INIT_ASSIGN:
1806 res = gfc_trans_init_assign (code);
1807 break;
1809 case EXEC_FORALL:
1810 res = gfc_trans_forall (code);
1811 break;
1813 case EXEC_WHERE:
1814 res = gfc_trans_where (code);
1815 break;
1817 case EXEC_OMP_ATOMIC:
1818 res = gfc_trans_omp_directive (code);
1819 break;
1821 case EXEC_OMP_PARALLEL:
1822 case EXEC_OMP_PARALLEL_DO:
1823 case EXEC_OMP_PARALLEL_SECTIONS:
1824 case EXEC_OMP_PARALLEL_WORKSHARE:
1825 case EXEC_OMP_CRITICAL:
1826 saved_ompws_flags = ompws_flags;
1827 ompws_flags = 0;
1828 res = gfc_trans_omp_directive (code);
1829 ompws_flags = saved_ompws_flags;
1830 break;
1832 default:
1833 internal_error ("gfc_trans_omp_workshare(): Bad statement code");
1836 gfc_set_backend_locus (&code->loc);
1838 if (res != NULL_TREE && ! IS_EMPTY_STMT (res))
1840 if (prev_singleunit)
1842 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
1843 /* Add current gfc_code to single block. */
1844 gfc_add_expr_to_block (&singleblock, res);
1845 else
1847 /* Finish single block and add it to pblock. */
1848 tmp = gfc_finish_block (&singleblock);
1849 tmp = build2_loc (input_location, OMP_SINGLE,
1850 void_type_node, tmp, NULL_TREE);
1851 gfc_add_expr_to_block (pblock, tmp);
1852 /* Add current gfc_code to pblock. */
1853 gfc_add_expr_to_block (pblock, res);
1854 singleblock_in_progress = false;
1857 else
1859 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
1861 /* Start single block. */
1862 gfc_init_block (&singleblock);
1863 gfc_add_expr_to_block (&singleblock, res);
1864 singleblock_in_progress = true;
1866 else
1867 /* Add the new statement to the block. */
1868 gfc_add_expr_to_block (pblock, res);
1870 prev_singleunit = (ompws_flags & OMPWS_CURR_SINGLEUNIT) != 0;
1874 /* Finish remaining SINGLE block, if we were in the middle of one. */
1875 if (singleblock_in_progress)
1877 /* Finish single block and add it to pblock. */
1878 tmp = gfc_finish_block (&singleblock);
1879 tmp = build2_loc (input_location, OMP_SINGLE, void_type_node, tmp,
1880 clauses->nowait
1881 ? build_omp_clause (input_location, OMP_CLAUSE_NOWAIT)
1882 : NULL_TREE);
1883 gfc_add_expr_to_block (pblock, tmp);
1886 stmt = gfc_finish_block (pblock);
1887 if (TREE_CODE (stmt) != BIND_EXPR)
1889 if (!IS_EMPTY_STMT (stmt))
1891 tree bindblock = poplevel (1, 0);
1892 stmt = build3_v (BIND_EXPR, NULL, stmt, bindblock);
1894 else
1895 poplevel (0, 0);
1897 else
1898 poplevel (0, 0);
1900 if (IS_EMPTY_STMT (stmt) && !clauses->nowait)
1901 stmt = gfc_trans_omp_barrier ();
1903 ompws_flags = 0;
1904 return stmt;
1907 tree
1908 gfc_trans_omp_directive (gfc_code *code)
1910 switch (code->op)
1912 case EXEC_OMP_ATOMIC:
1913 return gfc_trans_omp_atomic (code);
1914 case EXEC_OMP_BARRIER:
1915 return gfc_trans_omp_barrier ();
1916 case EXEC_OMP_CRITICAL:
1917 return gfc_trans_omp_critical (code);
1918 case EXEC_OMP_DO:
1919 return gfc_trans_omp_do (code, NULL, code->ext.omp_clauses, NULL);
1920 case EXEC_OMP_FLUSH:
1921 return gfc_trans_omp_flush ();
1922 case EXEC_OMP_MASTER:
1923 return gfc_trans_omp_master (code);
1924 case EXEC_OMP_ORDERED:
1925 return gfc_trans_omp_ordered (code);
1926 case EXEC_OMP_PARALLEL:
1927 return gfc_trans_omp_parallel (code);
1928 case EXEC_OMP_PARALLEL_DO:
1929 return gfc_trans_omp_parallel_do (code);
1930 case EXEC_OMP_PARALLEL_SECTIONS:
1931 return gfc_trans_omp_parallel_sections (code);
1932 case EXEC_OMP_PARALLEL_WORKSHARE:
1933 return gfc_trans_omp_parallel_workshare (code);
1934 case EXEC_OMP_SECTIONS:
1935 return gfc_trans_omp_sections (code, code->ext.omp_clauses);
1936 case EXEC_OMP_SINGLE:
1937 return gfc_trans_omp_single (code, code->ext.omp_clauses);
1938 case EXEC_OMP_TASK:
1939 return gfc_trans_omp_task (code);
1940 case EXEC_OMP_TASKWAIT:
1941 return gfc_trans_omp_taskwait ();
1942 case EXEC_OMP_TASKYIELD:
1943 return gfc_trans_omp_taskyield ();
1944 case EXEC_OMP_WORKSHARE:
1945 return gfc_trans_omp_workshare (code, code->ext.omp_clauses);
1946 default:
1947 gcc_unreachable ();