* builtins.c (expand_builtin_memcpy_with_bounds): Use target hook
[official-gcc.git] / gcc / fortran / trans-openmp.c
blob038c3e9a6e4095faba6da93874f6a39fd11dffc7
1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tree.h"
26 #include "gimple-expr.h"
27 #include "gimplify.h" /* For create_tmp_var_raw. */
28 #include "stringpool.h"
29 #include "gfortran.h"
30 #include "diagnostic-core.h" /* For internal_error. */
31 #include "trans.h"
32 #include "trans-stmt.h"
33 #include "trans-types.h"
34 #include "trans-array.h"
35 #include "trans-const.h"
36 #include "arith.h"
37 #include "omp-low.h"
39 int ompws_flags;
41 /* True if OpenMP should privatize what this DECL points to rather
42 than the DECL itself. */
44 bool
45 gfc_omp_privatize_by_reference (const_tree decl)
47 tree type = TREE_TYPE (decl);
49 if (TREE_CODE (type) == REFERENCE_TYPE
50 && (!DECL_ARTIFICIAL (decl) || TREE_CODE (decl) == PARM_DECL))
51 return true;
53 if (TREE_CODE (type) == POINTER_TYPE)
55 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
56 that have POINTER_TYPE type and aren't scalar pointers, scalar
57 allocatables, Cray pointees or C pointers are supposed to be
58 privatized by reference. */
59 if (GFC_DECL_GET_SCALAR_POINTER (decl)
60 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl)
61 || GFC_DECL_CRAY_POINTEE (decl)
62 || VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
63 return false;
65 if (!DECL_ARTIFICIAL (decl)
66 && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
67 return true;
69 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
70 by the frontend. */
71 if (DECL_LANG_SPECIFIC (decl)
72 && GFC_DECL_SAVED_DESCRIPTOR (decl))
73 return true;
76 return false;
79 /* True if OpenMP sharing attribute of DECL is predetermined. */
81 enum omp_clause_default_kind
82 gfc_omp_predetermined_sharing (tree decl)
84 /* Associate names preserve the association established during ASSOCIATE.
85 As they are implemented either as pointers to the selector or array
86 descriptor and shouldn't really change in the ASSOCIATE region,
87 this decl can be either shared or firstprivate. If it is a pointer,
88 use firstprivate, as it is cheaper that way, otherwise make it shared. */
89 if (GFC_DECL_ASSOCIATE_VAR_P (decl))
91 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
92 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
93 else
94 return OMP_CLAUSE_DEFAULT_SHARED;
97 if (DECL_ARTIFICIAL (decl)
98 && ! GFC_DECL_RESULT (decl)
99 && ! (DECL_LANG_SPECIFIC (decl)
100 && GFC_DECL_SAVED_DESCRIPTOR (decl)))
101 return OMP_CLAUSE_DEFAULT_SHARED;
103 /* Cray pointees shouldn't be listed in any clauses and should be
104 gimplified to dereference of the corresponding Cray pointer.
105 Make them all private, so that they are emitted in the debug
106 information. */
107 if (GFC_DECL_CRAY_POINTEE (decl))
108 return OMP_CLAUSE_DEFAULT_PRIVATE;
110 /* Assumed-size arrays are predetermined shared. */
111 if (TREE_CODE (decl) == PARM_DECL
112 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl))
113 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_UNKNOWN
114 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl),
115 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl)) - 1)
116 == NULL)
117 return OMP_CLAUSE_DEFAULT_SHARED;
119 /* Dummy procedures aren't considered variables by OpenMP, thus are
120 disallowed in OpenMP clauses. They are represented as PARM_DECLs
121 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
122 to avoid complaining about their uses with default(none). */
123 if (TREE_CODE (decl) == PARM_DECL
124 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
125 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == FUNCTION_TYPE)
126 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
128 /* COMMON and EQUIVALENCE decls are shared. They
129 are only referenced through DECL_VALUE_EXPR of the variables
130 contained in them. If those are privatized, they will not be
131 gimplified to the COMMON or EQUIVALENCE decls. */
132 if (GFC_DECL_COMMON_OR_EQUIV (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
133 return OMP_CLAUSE_DEFAULT_SHARED;
135 if (GFC_DECL_RESULT (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
136 return OMP_CLAUSE_DEFAULT_SHARED;
138 /* These are either array or derived parameters, or vtables.
139 In the former cases, the OpenMP standard doesn't consider them to be
140 variables at all (they can't be redefined), but they can nevertheless appear
141 in parallel/task regions and for default(none) purposes treat them as shared.
142 For vtables likely the same handling is desirable. */
143 if (TREE_CODE (decl) == VAR_DECL
144 && TREE_READONLY (decl)
145 && TREE_STATIC (decl))
146 return OMP_CLAUSE_DEFAULT_SHARED;
148 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
151 /* Return decl that should be used when reporting DEFAULT(NONE)
152 diagnostics. */
154 tree
155 gfc_omp_report_decl (tree decl)
157 if (DECL_ARTIFICIAL (decl)
158 && DECL_LANG_SPECIFIC (decl)
159 && GFC_DECL_SAVED_DESCRIPTOR (decl))
160 return GFC_DECL_SAVED_DESCRIPTOR (decl);
162 return decl;
165 /* Return true if TYPE has any allocatable components. */
167 static bool
168 gfc_has_alloc_comps (tree type, tree decl)
170 tree field, ftype;
172 if (POINTER_TYPE_P (type))
174 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl))
175 type = TREE_TYPE (type);
176 else if (GFC_DECL_GET_SCALAR_POINTER (decl))
177 return false;
180 while (GFC_DESCRIPTOR_TYPE_P (type) || GFC_ARRAY_TYPE_P (type))
181 type = gfc_get_element_type (type);
183 if (TREE_CODE (type) != RECORD_TYPE)
184 return false;
186 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
188 ftype = TREE_TYPE (field);
189 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
190 return true;
191 if (GFC_DESCRIPTOR_TYPE_P (ftype)
192 && GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
193 return true;
194 if (gfc_has_alloc_comps (ftype, field))
195 return true;
197 return false;
200 /* Return true if DECL in private clause needs
201 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
202 bool
203 gfc_omp_private_outer_ref (tree decl)
205 tree type = TREE_TYPE (decl);
207 if (GFC_DESCRIPTOR_TYPE_P (type)
208 && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
209 return true;
211 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl))
212 return true;
214 if (gfc_omp_privatize_by_reference (decl))
215 type = TREE_TYPE (type);
217 if (gfc_has_alloc_comps (type, decl))
218 return true;
220 return false;
223 /* Callback for gfc_omp_unshare_expr. */
225 static tree
226 gfc_omp_unshare_expr_r (tree *tp, int *walk_subtrees, void *)
228 tree t = *tp;
229 enum tree_code code = TREE_CODE (t);
231 /* Stop at types, decls, constants like copy_tree_r. */
232 if (TREE_CODE_CLASS (code) == tcc_type
233 || TREE_CODE_CLASS (code) == tcc_declaration
234 || TREE_CODE_CLASS (code) == tcc_constant
235 || code == BLOCK)
236 *walk_subtrees = 0;
237 else if (handled_component_p (t)
238 || TREE_CODE (t) == MEM_REF)
240 *tp = unshare_expr (t);
241 *walk_subtrees = 0;
244 return NULL_TREE;
247 /* Unshare in expr anything that the FE which normally doesn't
248 care much about tree sharing (because during gimplification
249 everything is unshared) could cause problems with tree sharing
250 at omp-low.c time. */
252 static tree
253 gfc_omp_unshare_expr (tree expr)
255 walk_tree (&expr, gfc_omp_unshare_expr_r, NULL, NULL);
256 return expr;
259 enum walk_alloc_comps
261 WALK_ALLOC_COMPS_DTOR,
262 WALK_ALLOC_COMPS_DEFAULT_CTOR,
263 WALK_ALLOC_COMPS_COPY_CTOR
266 /* Handle allocatable components in OpenMP clauses. */
268 static tree
269 gfc_walk_alloc_comps (tree decl, tree dest, tree var,
270 enum walk_alloc_comps kind)
272 stmtblock_t block, tmpblock;
273 tree type = TREE_TYPE (decl), then_b, tem, field;
274 gfc_init_block (&block);
276 if (GFC_ARRAY_TYPE_P (type) || GFC_DESCRIPTOR_TYPE_P (type))
278 if (GFC_DESCRIPTOR_TYPE_P (type))
280 gfc_init_block (&tmpblock);
281 tem = gfc_full_array_size (&tmpblock, decl,
282 GFC_TYPE_ARRAY_RANK (type));
283 then_b = gfc_finish_block (&tmpblock);
284 gfc_add_expr_to_block (&block, gfc_omp_unshare_expr (then_b));
285 tem = gfc_omp_unshare_expr (tem);
286 tem = fold_build2_loc (input_location, MINUS_EXPR,
287 gfc_array_index_type, tem,
288 gfc_index_one_node);
290 else
292 if (!TYPE_DOMAIN (type)
293 || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE
294 || TYPE_MIN_VALUE (TYPE_DOMAIN (type)) == error_mark_node
295 || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == error_mark_node)
297 tem = fold_build2 (EXACT_DIV_EXPR, sizetype,
298 TYPE_SIZE_UNIT (type),
299 TYPE_SIZE_UNIT (TREE_TYPE (type)));
300 tem = size_binop (MINUS_EXPR, tem, size_one_node);
302 else
303 tem = array_type_nelts (type);
304 tem = fold_convert (gfc_array_index_type, tem);
307 tree nelems = gfc_evaluate_now (tem, &block);
308 tree index = gfc_create_var (gfc_array_index_type, "S");
310 gfc_init_block (&tmpblock);
311 tem = gfc_conv_array_data (decl);
312 tree declvar = build_fold_indirect_ref_loc (input_location, tem);
313 tree declvref = gfc_build_array_ref (declvar, index, NULL);
314 tree destvar, destvref = NULL_TREE;
315 if (dest)
317 tem = gfc_conv_array_data (dest);
318 destvar = build_fold_indirect_ref_loc (input_location, tem);
319 destvref = gfc_build_array_ref (destvar, index, NULL);
321 gfc_add_expr_to_block (&tmpblock,
322 gfc_walk_alloc_comps (declvref, destvref,
323 var, kind));
325 gfc_loopinfo loop;
326 gfc_init_loopinfo (&loop);
327 loop.dimen = 1;
328 loop.from[0] = gfc_index_zero_node;
329 loop.loopvar[0] = index;
330 loop.to[0] = nelems;
331 gfc_trans_scalarizing_loops (&loop, &tmpblock);
332 gfc_add_block_to_block (&block, &loop.pre);
333 return gfc_finish_block (&block);
335 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (var))
337 decl = build_fold_indirect_ref_loc (input_location, decl);
338 if (dest)
339 dest = build_fold_indirect_ref_loc (input_location, dest);
340 type = TREE_TYPE (decl);
343 gcc_assert (TREE_CODE (type) == RECORD_TYPE);
344 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
346 tree ftype = TREE_TYPE (field);
347 tree declf, destf = NULL_TREE;
348 bool has_alloc_comps = gfc_has_alloc_comps (ftype, field);
349 if ((!GFC_DESCRIPTOR_TYPE_P (ftype)
350 || GFC_TYPE_ARRAY_AKIND (ftype) != GFC_ARRAY_ALLOCATABLE)
351 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (field)
352 && !has_alloc_comps)
353 continue;
354 declf = fold_build3_loc (input_location, COMPONENT_REF, ftype,
355 decl, field, NULL_TREE);
356 if (dest)
357 destf = fold_build3_loc (input_location, COMPONENT_REF, ftype,
358 dest, field, NULL_TREE);
360 tem = NULL_TREE;
361 switch (kind)
363 case WALK_ALLOC_COMPS_DTOR:
364 break;
365 case WALK_ALLOC_COMPS_DEFAULT_CTOR:
366 if (GFC_DESCRIPTOR_TYPE_P (ftype)
367 && GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
369 gfc_add_modify (&block, unshare_expr (destf),
370 unshare_expr (declf));
371 tem = gfc_duplicate_allocatable_nocopy
372 (destf, declf, ftype,
373 GFC_TYPE_ARRAY_RANK (ftype));
375 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
376 tem = gfc_duplicate_allocatable_nocopy (destf, declf, ftype, 0);
377 break;
378 case WALK_ALLOC_COMPS_COPY_CTOR:
379 if (GFC_DESCRIPTOR_TYPE_P (ftype)
380 && GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
381 tem = gfc_duplicate_allocatable (destf, declf, ftype,
382 GFC_TYPE_ARRAY_RANK (ftype));
383 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
384 tem = gfc_duplicate_allocatable (destf, declf, ftype, 0);
385 break;
387 if (tem)
388 gfc_add_expr_to_block (&block, gfc_omp_unshare_expr (tem));
389 if (has_alloc_comps)
391 gfc_init_block (&tmpblock);
392 gfc_add_expr_to_block (&tmpblock,
393 gfc_walk_alloc_comps (declf, destf,
394 field, kind));
395 then_b = gfc_finish_block (&tmpblock);
396 if (GFC_DESCRIPTOR_TYPE_P (ftype)
397 && GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
398 tem = gfc_conv_descriptor_data_get (unshare_expr (declf));
399 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
400 tem = unshare_expr (declf);
401 else
402 tem = NULL_TREE;
403 if (tem)
405 tem = fold_convert (pvoid_type_node, tem);
406 tem = fold_build2_loc (input_location, NE_EXPR,
407 boolean_type_node, tem,
408 null_pointer_node);
409 then_b = build3_loc (input_location, COND_EXPR, void_type_node,
410 tem, then_b,
411 build_empty_stmt (input_location));
413 gfc_add_expr_to_block (&block, then_b);
415 if (kind == WALK_ALLOC_COMPS_DTOR)
417 if (GFC_DESCRIPTOR_TYPE_P (ftype)
418 && GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
420 tem = gfc_trans_dealloc_allocated (unshare_expr (declf),
421 false, NULL);
422 gfc_add_expr_to_block (&block, gfc_omp_unshare_expr (tem));
424 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
426 tem = gfc_call_free (unshare_expr (declf));
427 gfc_add_expr_to_block (&block, gfc_omp_unshare_expr (tem));
432 return gfc_finish_block (&block);
435 /* Return code to initialize DECL with its default constructor, or
436 NULL if there's nothing to do. */
438 tree
439 gfc_omp_clause_default_ctor (tree clause, tree decl, tree outer)
441 tree type = TREE_TYPE (decl), size, ptr, cond, then_b, else_b;
442 stmtblock_t block, cond_block;
444 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_PRIVATE
445 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LASTPRIVATE
446 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LINEAR
447 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_REDUCTION);
449 if ((! GFC_DESCRIPTOR_TYPE_P (type)
450 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
451 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause)))
453 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
455 gcc_assert (outer);
456 gfc_start_block (&block);
457 tree tem = gfc_walk_alloc_comps (outer, decl,
458 OMP_CLAUSE_DECL (clause),
459 WALK_ALLOC_COMPS_DEFAULT_CTOR);
460 gfc_add_expr_to_block (&block, tem);
461 return gfc_finish_block (&block);
463 return NULL_TREE;
466 gcc_assert (outer != NULL_TREE);
468 /* Allocatable arrays and scalars in PRIVATE clauses need to be set to
469 "not currently allocated" allocation status if outer
470 array is "not currently allocated", otherwise should be allocated. */
471 gfc_start_block (&block);
473 gfc_init_block (&cond_block);
475 if (GFC_DESCRIPTOR_TYPE_P (type))
477 gfc_add_modify (&cond_block, decl, outer);
478 tree rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
479 size = gfc_conv_descriptor_ubound_get (decl, rank);
480 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
481 size,
482 gfc_conv_descriptor_lbound_get (decl, rank));
483 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
484 size, gfc_index_one_node);
485 if (GFC_TYPE_ARRAY_RANK (type) > 1)
486 size = fold_build2_loc (input_location, MULT_EXPR,
487 gfc_array_index_type, size,
488 gfc_conv_descriptor_stride_get (decl, rank));
489 tree esize = fold_convert (gfc_array_index_type,
490 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
491 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
492 size, esize);
493 size = unshare_expr (size);
494 size = gfc_evaluate_now (fold_convert (size_type_node, size),
495 &cond_block);
497 else
498 size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (type)));
499 ptr = gfc_create_var (pvoid_type_node, NULL);
500 gfc_allocate_using_malloc (&cond_block, ptr, size, NULL_TREE);
501 if (GFC_DESCRIPTOR_TYPE_P (type))
502 gfc_conv_descriptor_data_set (&cond_block, unshare_expr (decl), ptr);
503 else
504 gfc_add_modify (&cond_block, unshare_expr (decl),
505 fold_convert (TREE_TYPE (decl), ptr));
506 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
508 tree tem = gfc_walk_alloc_comps (outer, decl,
509 OMP_CLAUSE_DECL (clause),
510 WALK_ALLOC_COMPS_DEFAULT_CTOR);
511 gfc_add_expr_to_block (&cond_block, tem);
513 then_b = gfc_finish_block (&cond_block);
515 /* Reduction clause requires allocated ALLOCATABLE. */
516 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_REDUCTION)
518 gfc_init_block (&cond_block);
519 if (GFC_DESCRIPTOR_TYPE_P (type))
520 gfc_conv_descriptor_data_set (&cond_block, unshare_expr (decl),
521 null_pointer_node);
522 else
523 gfc_add_modify (&cond_block, unshare_expr (decl),
524 build_zero_cst (TREE_TYPE (decl)));
525 else_b = gfc_finish_block (&cond_block);
527 tree tem = fold_convert (pvoid_type_node,
528 GFC_DESCRIPTOR_TYPE_P (type)
529 ? gfc_conv_descriptor_data_get (outer) : outer);
530 tem = unshare_expr (tem);
531 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
532 tem, null_pointer_node);
533 gfc_add_expr_to_block (&block,
534 build3_loc (input_location, COND_EXPR,
535 void_type_node, cond, then_b,
536 else_b));
538 else
539 gfc_add_expr_to_block (&block, then_b);
541 return gfc_finish_block (&block);
544 /* Build and return code for a copy constructor from SRC to DEST. */
546 tree
547 gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src)
549 tree type = TREE_TYPE (dest), ptr, size, call;
550 tree cond, then_b, else_b;
551 stmtblock_t block, cond_block;
553 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_FIRSTPRIVATE
554 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LINEAR);
556 if ((! GFC_DESCRIPTOR_TYPE_P (type)
557 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
558 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause)))
560 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
562 gfc_start_block (&block);
563 gfc_add_modify (&block, dest, src);
564 tree tem = gfc_walk_alloc_comps (src, dest, OMP_CLAUSE_DECL (clause),
565 WALK_ALLOC_COMPS_COPY_CTOR);
566 gfc_add_expr_to_block (&block, tem);
567 return gfc_finish_block (&block);
569 else
570 return build2_v (MODIFY_EXPR, dest, src);
573 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
574 and copied from SRC. */
575 gfc_start_block (&block);
577 gfc_init_block (&cond_block);
579 gfc_add_modify (&cond_block, dest, src);
580 if (GFC_DESCRIPTOR_TYPE_P (type))
582 tree rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
583 size = gfc_conv_descriptor_ubound_get (dest, rank);
584 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
585 size,
586 gfc_conv_descriptor_lbound_get (dest, rank));
587 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
588 size, gfc_index_one_node);
589 if (GFC_TYPE_ARRAY_RANK (type) > 1)
590 size = fold_build2_loc (input_location, MULT_EXPR,
591 gfc_array_index_type, size,
592 gfc_conv_descriptor_stride_get (dest, rank));
593 tree esize = fold_convert (gfc_array_index_type,
594 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
595 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
596 size, esize);
597 size = unshare_expr (size);
598 size = gfc_evaluate_now (fold_convert (size_type_node, size),
599 &cond_block);
601 else
602 size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (type)));
603 ptr = gfc_create_var (pvoid_type_node, NULL);
604 gfc_allocate_using_malloc (&cond_block, ptr, size, NULL_TREE);
605 if (GFC_DESCRIPTOR_TYPE_P (type))
606 gfc_conv_descriptor_data_set (&cond_block, unshare_expr (dest), ptr);
607 else
608 gfc_add_modify (&cond_block, unshare_expr (dest),
609 fold_convert (TREE_TYPE (dest), ptr));
611 tree srcptr = GFC_DESCRIPTOR_TYPE_P (type)
612 ? gfc_conv_descriptor_data_get (src) : src;
613 srcptr = unshare_expr (srcptr);
614 srcptr = fold_convert (pvoid_type_node, srcptr);
615 call = build_call_expr_loc (input_location,
616 builtin_decl_explicit (BUILT_IN_MEMCPY), 3, ptr,
617 srcptr, size);
618 gfc_add_expr_to_block (&cond_block, fold_convert (void_type_node, call));
619 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
621 tree tem = gfc_walk_alloc_comps (src, dest,
622 OMP_CLAUSE_DECL (clause),
623 WALK_ALLOC_COMPS_COPY_CTOR);
624 gfc_add_expr_to_block (&cond_block, tem);
626 then_b = gfc_finish_block (&cond_block);
628 gfc_init_block (&cond_block);
629 if (GFC_DESCRIPTOR_TYPE_P (type))
630 gfc_conv_descriptor_data_set (&cond_block, unshare_expr (dest),
631 null_pointer_node);
632 else
633 gfc_add_modify (&cond_block, unshare_expr (dest),
634 build_zero_cst (TREE_TYPE (dest)));
635 else_b = gfc_finish_block (&cond_block);
637 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
638 unshare_expr (srcptr), null_pointer_node);
639 gfc_add_expr_to_block (&block,
640 build3_loc (input_location, COND_EXPR,
641 void_type_node, cond, then_b, else_b));
643 return gfc_finish_block (&block);
646 /* Similarly, except use an intrinsic or pointer assignment operator
647 instead. */
649 tree
650 gfc_omp_clause_assign_op (tree clause, tree dest, tree src)
652 tree type = TREE_TYPE (dest), ptr, size, call, nonalloc;
653 tree cond, then_b, else_b;
654 stmtblock_t block, cond_block, cond_block2, inner_block;
656 if ((! GFC_DESCRIPTOR_TYPE_P (type)
657 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
658 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause)))
660 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
662 gfc_start_block (&block);
663 /* First dealloc any allocatable components in DEST. */
664 tree tem = gfc_walk_alloc_comps (dest, NULL_TREE,
665 OMP_CLAUSE_DECL (clause),
666 WALK_ALLOC_COMPS_DTOR);
667 gfc_add_expr_to_block (&block, tem);
668 /* Then copy over toplevel data. */
669 gfc_add_modify (&block, dest, src);
670 /* Finally allocate any allocatable components and copy. */
671 tem = gfc_walk_alloc_comps (src, dest, OMP_CLAUSE_DECL (clause),
672 WALK_ALLOC_COMPS_COPY_CTOR);
673 gfc_add_expr_to_block (&block, tem);
674 return gfc_finish_block (&block);
676 else
677 return build2_v (MODIFY_EXPR, dest, src);
680 gfc_start_block (&block);
682 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
684 then_b = gfc_walk_alloc_comps (dest, NULL_TREE, OMP_CLAUSE_DECL (clause),
685 WALK_ALLOC_COMPS_DTOR);
686 tree tem = fold_convert (pvoid_type_node,
687 GFC_DESCRIPTOR_TYPE_P (type)
688 ? gfc_conv_descriptor_data_get (dest) : dest);
689 tem = unshare_expr (tem);
690 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
691 tem, null_pointer_node);
692 tem = build3_loc (input_location, COND_EXPR, void_type_node, cond,
693 then_b, build_empty_stmt (input_location));
694 gfc_add_expr_to_block (&block, tem);
697 gfc_init_block (&cond_block);
699 if (GFC_DESCRIPTOR_TYPE_P (type))
701 tree rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
702 size = gfc_conv_descriptor_ubound_get (src, rank);
703 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
704 size,
705 gfc_conv_descriptor_lbound_get (src, rank));
706 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
707 size, gfc_index_one_node);
708 if (GFC_TYPE_ARRAY_RANK (type) > 1)
709 size = fold_build2_loc (input_location, MULT_EXPR,
710 gfc_array_index_type, size,
711 gfc_conv_descriptor_stride_get (src, rank));
712 tree esize = fold_convert (gfc_array_index_type,
713 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
714 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
715 size, esize);
716 size = unshare_expr (size);
717 size = gfc_evaluate_now (fold_convert (size_type_node, size),
718 &cond_block);
720 else
721 size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (type)));
722 ptr = gfc_create_var (pvoid_type_node, NULL);
724 tree destptr = GFC_DESCRIPTOR_TYPE_P (type)
725 ? gfc_conv_descriptor_data_get (dest) : dest;
726 destptr = unshare_expr (destptr);
727 destptr = fold_convert (pvoid_type_node, destptr);
728 gfc_add_modify (&cond_block, ptr, destptr);
730 nonalloc = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
731 destptr, null_pointer_node);
732 cond = nonalloc;
733 if (GFC_DESCRIPTOR_TYPE_P (type))
735 int i;
736 for (i = 0; i < GFC_TYPE_ARRAY_RANK (type); i++)
738 tree rank = gfc_rank_cst[i];
739 tree tem = gfc_conv_descriptor_ubound_get (src, rank);
740 tem = fold_build2_loc (input_location, MINUS_EXPR,
741 gfc_array_index_type, tem,
742 gfc_conv_descriptor_lbound_get (src, rank));
743 tem = fold_build2_loc (input_location, PLUS_EXPR,
744 gfc_array_index_type, tem,
745 gfc_conv_descriptor_lbound_get (dest, rank));
746 tem = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
747 tem, gfc_conv_descriptor_ubound_get (dest,
748 rank));
749 cond = fold_build2_loc (input_location, TRUTH_ORIF_EXPR,
750 boolean_type_node, cond, tem);
754 gfc_init_block (&cond_block2);
756 if (GFC_DESCRIPTOR_TYPE_P (type))
758 gfc_init_block (&inner_block);
759 gfc_allocate_using_malloc (&inner_block, ptr, size, NULL_TREE);
760 then_b = gfc_finish_block (&inner_block);
762 gfc_init_block (&inner_block);
763 gfc_add_modify (&inner_block, ptr,
764 gfc_call_realloc (&inner_block, ptr, size));
765 else_b = gfc_finish_block (&inner_block);
767 gfc_add_expr_to_block (&cond_block2,
768 build3_loc (input_location, COND_EXPR,
769 void_type_node,
770 unshare_expr (nonalloc),
771 then_b, else_b));
772 gfc_add_modify (&cond_block2, dest, src);
773 gfc_conv_descriptor_data_set (&cond_block2, unshare_expr (dest), ptr);
775 else
777 gfc_allocate_using_malloc (&cond_block2, ptr, size, NULL_TREE);
778 gfc_add_modify (&cond_block2, unshare_expr (dest),
779 fold_convert (type, ptr));
781 then_b = gfc_finish_block (&cond_block2);
782 else_b = build_empty_stmt (input_location);
784 gfc_add_expr_to_block (&cond_block,
785 build3_loc (input_location, COND_EXPR,
786 void_type_node, unshare_expr (cond),
787 then_b, else_b));
789 tree srcptr = GFC_DESCRIPTOR_TYPE_P (type)
790 ? gfc_conv_descriptor_data_get (src) : src;
791 srcptr = unshare_expr (srcptr);
792 srcptr = fold_convert (pvoid_type_node, srcptr);
793 call = build_call_expr_loc (input_location,
794 builtin_decl_explicit (BUILT_IN_MEMCPY), 3, ptr,
795 srcptr, size);
796 gfc_add_expr_to_block (&cond_block, fold_convert (void_type_node, call));
797 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
799 tree tem = gfc_walk_alloc_comps (src, dest,
800 OMP_CLAUSE_DECL (clause),
801 WALK_ALLOC_COMPS_COPY_CTOR);
802 gfc_add_expr_to_block (&cond_block, tem);
804 then_b = gfc_finish_block (&cond_block);
806 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_COPYIN)
808 gfc_init_block (&cond_block);
809 if (GFC_DESCRIPTOR_TYPE_P (type))
810 gfc_add_expr_to_block (&cond_block,
811 gfc_trans_dealloc_allocated (unshare_expr (dest),
812 false, NULL));
813 else
815 destptr = gfc_evaluate_now (destptr, &cond_block);
816 gfc_add_expr_to_block (&cond_block, gfc_call_free (destptr));
817 gfc_add_modify (&cond_block, unshare_expr (dest),
818 build_zero_cst (TREE_TYPE (dest)));
820 else_b = gfc_finish_block (&cond_block);
822 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
823 unshare_expr (srcptr), null_pointer_node);
824 gfc_add_expr_to_block (&block,
825 build3_loc (input_location, COND_EXPR,
826 void_type_node, cond,
827 then_b, else_b));
829 else
830 gfc_add_expr_to_block (&block, then_b);
832 return gfc_finish_block (&block);
835 static void
836 gfc_omp_linear_clause_add_loop (stmtblock_t *block, tree dest, tree src,
837 tree add, tree nelems)
839 stmtblock_t tmpblock;
840 tree desta, srca, index = gfc_create_var (gfc_array_index_type, "S");
841 nelems = gfc_evaluate_now (nelems, block);
843 gfc_init_block (&tmpblock);
844 if (TREE_CODE (TREE_TYPE (dest)) == ARRAY_TYPE)
846 desta = gfc_build_array_ref (dest, index, NULL);
847 srca = gfc_build_array_ref (src, index, NULL);
849 else
851 gcc_assert (POINTER_TYPE_P (TREE_TYPE (dest)));
852 tree idx = fold_build2 (MULT_EXPR, sizetype,
853 fold_convert (sizetype, index),
854 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dest))));
855 desta = build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR,
856 TREE_TYPE (dest), dest,
857 idx));
858 srca = build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR,
859 TREE_TYPE (src), src,
860 idx));
862 gfc_add_modify (&tmpblock, desta,
863 fold_build2 (PLUS_EXPR, TREE_TYPE (desta),
864 srca, add));
866 gfc_loopinfo loop;
867 gfc_init_loopinfo (&loop);
868 loop.dimen = 1;
869 loop.from[0] = gfc_index_zero_node;
870 loop.loopvar[0] = index;
871 loop.to[0] = nelems;
872 gfc_trans_scalarizing_loops (&loop, &tmpblock);
873 gfc_add_block_to_block (block, &loop.pre);
876 /* Build and return code for a constructor of DEST that initializes
877 it to SRC plus ADD (ADD is scalar integer). */
879 tree
880 gfc_omp_clause_linear_ctor (tree clause, tree dest, tree src, tree add)
882 tree type = TREE_TYPE (dest), ptr, size, nelems = NULL_TREE;
883 stmtblock_t block;
885 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LINEAR);
887 gfc_start_block (&block);
888 add = gfc_evaluate_now (add, &block);
890 if ((! GFC_DESCRIPTOR_TYPE_P (type)
891 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
892 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause)))
894 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
895 if (!TYPE_DOMAIN (type)
896 || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE
897 || TYPE_MIN_VALUE (TYPE_DOMAIN (type)) == error_mark_node
898 || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == error_mark_node)
900 nelems = fold_build2 (EXACT_DIV_EXPR, sizetype,
901 TYPE_SIZE_UNIT (type),
902 TYPE_SIZE_UNIT (TREE_TYPE (type)));
903 nelems = size_binop (MINUS_EXPR, nelems, size_one_node);
905 else
906 nelems = array_type_nelts (type);
907 nelems = fold_convert (gfc_array_index_type, nelems);
909 gfc_omp_linear_clause_add_loop (&block, dest, src, add, nelems);
910 return gfc_finish_block (&block);
913 /* Allocatable arrays in LINEAR clauses need to be allocated
914 and copied from SRC. */
915 gfc_add_modify (&block, dest, src);
916 if (GFC_DESCRIPTOR_TYPE_P (type))
918 tree rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
919 size = gfc_conv_descriptor_ubound_get (dest, rank);
920 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
921 size,
922 gfc_conv_descriptor_lbound_get (dest, rank));
923 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
924 size, gfc_index_one_node);
925 if (GFC_TYPE_ARRAY_RANK (type) > 1)
926 size = fold_build2_loc (input_location, MULT_EXPR,
927 gfc_array_index_type, size,
928 gfc_conv_descriptor_stride_get (dest, rank));
929 tree esize = fold_convert (gfc_array_index_type,
930 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
931 nelems = gfc_evaluate_now (unshare_expr (size), &block);
932 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
933 nelems, unshare_expr (esize));
934 size = gfc_evaluate_now (fold_convert (size_type_node, size),
935 &block);
936 nelems = fold_build2_loc (input_location, MINUS_EXPR,
937 gfc_array_index_type, nelems,
938 gfc_index_one_node);
940 else
941 size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (type)));
942 ptr = gfc_create_var (pvoid_type_node, NULL);
943 gfc_allocate_using_malloc (&block, ptr, size, NULL_TREE);
944 if (GFC_DESCRIPTOR_TYPE_P (type))
946 gfc_conv_descriptor_data_set (&block, unshare_expr (dest), ptr);
947 tree etype = gfc_get_element_type (type);
948 ptr = fold_convert (build_pointer_type (etype), ptr);
949 tree srcptr = gfc_conv_descriptor_data_get (unshare_expr (src));
950 srcptr = fold_convert (build_pointer_type (etype), srcptr);
951 gfc_omp_linear_clause_add_loop (&block, ptr, srcptr, add, nelems);
953 else
955 gfc_add_modify (&block, unshare_expr (dest),
956 fold_convert (TREE_TYPE (dest), ptr));
957 ptr = fold_convert (TREE_TYPE (dest), ptr);
958 tree dstm = build_fold_indirect_ref (ptr);
959 tree srcm = build_fold_indirect_ref (unshare_expr (src));
960 gfc_add_modify (&block, dstm,
961 fold_build2 (PLUS_EXPR, TREE_TYPE (add), srcm, add));
963 return gfc_finish_block (&block);
966 /* Build and return code destructing DECL. Return NULL if nothing
967 to be done. */
969 tree
970 gfc_omp_clause_dtor (tree clause, tree decl)
972 tree type = TREE_TYPE (decl), tem;
974 if ((! GFC_DESCRIPTOR_TYPE_P (type)
975 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
976 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause)))
978 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
979 return gfc_walk_alloc_comps (decl, NULL_TREE,
980 OMP_CLAUSE_DECL (clause),
981 WALK_ALLOC_COMPS_DTOR);
982 return NULL_TREE;
985 if (GFC_DESCRIPTOR_TYPE_P (type))
986 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
987 to be deallocated if they were allocated. */
988 tem = gfc_trans_dealloc_allocated (decl, false, NULL);
989 else
990 tem = gfc_call_free (decl);
991 tem = gfc_omp_unshare_expr (tem);
993 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
995 stmtblock_t block;
996 tree then_b;
998 gfc_init_block (&block);
999 gfc_add_expr_to_block (&block,
1000 gfc_walk_alloc_comps (decl, NULL_TREE,
1001 OMP_CLAUSE_DECL (clause),
1002 WALK_ALLOC_COMPS_DTOR));
1003 gfc_add_expr_to_block (&block, tem);
1004 then_b = gfc_finish_block (&block);
1006 tem = fold_convert (pvoid_type_node,
1007 GFC_DESCRIPTOR_TYPE_P (type)
1008 ? gfc_conv_descriptor_data_get (decl) : decl);
1009 tem = unshare_expr (tem);
1010 tree cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
1011 tem, null_pointer_node);
1012 tem = build3_loc (input_location, COND_EXPR, void_type_node, cond,
1013 then_b, build_empty_stmt (input_location));
1015 return tem;
1019 void
1020 gfc_omp_finish_clause (tree c, gimple_seq *pre_p)
1022 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
1023 return;
1025 tree decl = OMP_CLAUSE_DECL (c);
1026 tree c2 = NULL_TREE, c3 = NULL_TREE, c4 = NULL_TREE;
1027 if (POINTER_TYPE_P (TREE_TYPE (decl)))
1029 if (!gfc_omp_privatize_by_reference (decl)
1030 && !GFC_DECL_GET_SCALAR_POINTER (decl)
1031 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (decl)
1032 && !GFC_DECL_CRAY_POINTEE (decl)
1033 && !GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
1034 return;
1035 tree orig_decl = decl;
1036 c4 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
1037 OMP_CLAUSE_MAP_KIND (c4) = OMP_CLAUSE_MAP_POINTER;
1038 OMP_CLAUSE_DECL (c4) = decl;
1039 OMP_CLAUSE_SIZE (c4) = size_int (0);
1040 decl = build_fold_indirect_ref (decl);
1041 OMP_CLAUSE_DECL (c) = decl;
1042 OMP_CLAUSE_SIZE (c) = NULL_TREE;
1043 if (TREE_CODE (TREE_TYPE (orig_decl)) == REFERENCE_TYPE
1044 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl)
1045 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl)))
1047 c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
1048 OMP_CLAUSE_MAP_KIND (c3) = OMP_CLAUSE_MAP_POINTER;
1049 OMP_CLAUSE_DECL (c3) = unshare_expr (decl);
1050 OMP_CLAUSE_SIZE (c3) = size_int (0);
1051 decl = build_fold_indirect_ref (decl);
1052 OMP_CLAUSE_DECL (c) = decl;
1055 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
1057 stmtblock_t block;
1058 gfc_start_block (&block);
1059 tree type = TREE_TYPE (decl);
1060 tree ptr = gfc_conv_descriptor_data_get (decl);
1061 ptr = fold_convert (build_pointer_type (char_type_node), ptr);
1062 ptr = build_fold_indirect_ref (ptr);
1063 OMP_CLAUSE_DECL (c) = ptr;
1064 c2 = build_omp_clause (input_location, OMP_CLAUSE_MAP);
1065 OMP_CLAUSE_MAP_KIND (c2) = OMP_CLAUSE_MAP_TO_PSET;
1066 OMP_CLAUSE_DECL (c2) = decl;
1067 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (type);
1068 c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
1069 OMP_CLAUSE_MAP_KIND (c3) = OMP_CLAUSE_MAP_POINTER;
1070 OMP_CLAUSE_DECL (c3) = gfc_conv_descriptor_data_get (decl);
1071 OMP_CLAUSE_SIZE (c3) = size_int (0);
1072 tree size = create_tmp_var (gfc_array_index_type, NULL);
1073 tree elemsz = TYPE_SIZE_UNIT (gfc_get_element_type (type));
1074 elemsz = fold_convert (gfc_array_index_type, elemsz);
1075 if (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER
1076 || GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER_CONT)
1078 stmtblock_t cond_block;
1079 tree tem, then_b, else_b, zero, cond;
1081 gfc_init_block (&cond_block);
1082 tem = gfc_full_array_size (&cond_block, decl,
1083 GFC_TYPE_ARRAY_RANK (type));
1084 gfc_add_modify (&cond_block, size, tem);
1085 gfc_add_modify (&cond_block, size,
1086 fold_build2 (MULT_EXPR, gfc_array_index_type,
1087 size, elemsz));
1088 then_b = gfc_finish_block (&cond_block);
1089 gfc_init_block (&cond_block);
1090 zero = build_int_cst (gfc_array_index_type, 0);
1091 gfc_add_modify (&cond_block, size, zero);
1092 else_b = gfc_finish_block (&cond_block);
1093 tem = gfc_conv_descriptor_data_get (decl);
1094 tem = fold_convert (pvoid_type_node, tem);
1095 cond = fold_build2_loc (input_location, NE_EXPR,
1096 boolean_type_node, tem, null_pointer_node);
1097 gfc_add_expr_to_block (&block, build3_loc (input_location, COND_EXPR,
1098 void_type_node, cond,
1099 then_b, else_b));
1101 else
1103 gfc_add_modify (&block, size,
1104 gfc_full_array_size (&block, decl,
1105 GFC_TYPE_ARRAY_RANK (type)));
1106 gfc_add_modify (&block, size,
1107 fold_build2 (MULT_EXPR, gfc_array_index_type,
1108 size, elemsz));
1110 OMP_CLAUSE_SIZE (c) = size;
1111 tree stmt = gfc_finish_block (&block);
1112 gimplify_and_add (stmt, pre_p);
1114 tree last = c;
1115 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
1116 OMP_CLAUSE_SIZE (c)
1117 = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
1118 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
1119 if (c2)
1121 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (last);
1122 OMP_CLAUSE_CHAIN (last) = c2;
1123 last = c2;
1125 if (c3)
1127 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (last);
1128 OMP_CLAUSE_CHAIN (last) = c3;
1129 last = c3;
1131 if (c4)
1133 OMP_CLAUSE_CHAIN (c4) = OMP_CLAUSE_CHAIN (last);
1134 OMP_CLAUSE_CHAIN (last) = c4;
1135 last = c4;
1140 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1141 disregarded in OpenMP construct, because it is going to be
1142 remapped during OpenMP lowering. SHARED is true if DECL
1143 is going to be shared, false if it is going to be privatized. */
1145 bool
1146 gfc_omp_disregard_value_expr (tree decl, bool shared)
1148 if (GFC_DECL_COMMON_OR_EQUIV (decl)
1149 && DECL_HAS_VALUE_EXPR_P (decl))
1151 tree value = DECL_VALUE_EXPR (decl);
1153 if (TREE_CODE (value) == COMPONENT_REF
1154 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
1155 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
1157 /* If variable in COMMON or EQUIVALENCE is privatized, return
1158 true, as just that variable is supposed to be privatized,
1159 not the whole COMMON or whole EQUIVALENCE.
1160 For shared variables in COMMON or EQUIVALENCE, let them be
1161 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
1162 from the same COMMON or EQUIVALENCE just one sharing of the
1163 whole COMMON or EQUIVALENCE is enough. */
1164 return ! shared;
1168 if (GFC_DECL_RESULT (decl) && DECL_HAS_VALUE_EXPR_P (decl))
1169 return ! shared;
1171 return false;
1174 /* Return true if DECL that is shared iff SHARED is true should
1175 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
1176 flag set. */
1178 bool
1179 gfc_omp_private_debug_clause (tree decl, bool shared)
1181 if (GFC_DECL_CRAY_POINTEE (decl))
1182 return true;
1184 if (GFC_DECL_COMMON_OR_EQUIV (decl)
1185 && DECL_HAS_VALUE_EXPR_P (decl))
1187 tree value = DECL_VALUE_EXPR (decl);
1189 if (TREE_CODE (value) == COMPONENT_REF
1190 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
1191 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
1192 return shared;
1195 return false;
1198 /* Register language specific type size variables as potentially OpenMP
1199 firstprivate variables. */
1201 void
1202 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
1204 if (GFC_ARRAY_TYPE_P (type) || GFC_DESCRIPTOR_TYPE_P (type))
1206 int r;
1208 gcc_assert (TYPE_LANG_SPECIFIC (type) != NULL);
1209 for (r = 0; r < GFC_TYPE_ARRAY_RANK (type); r++)
1211 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_LBOUND (type, r));
1212 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_UBOUND (type, r));
1213 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_STRIDE (type, r));
1215 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_SIZE (type));
1216 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_OFFSET (type));
1221 static inline tree
1222 gfc_trans_add_clause (tree node, tree tail)
1224 OMP_CLAUSE_CHAIN (node) = tail;
1225 return node;
1228 static tree
1229 gfc_trans_omp_variable (gfc_symbol *sym, bool declare_simd)
1231 if (declare_simd)
1233 int cnt = 0;
1234 gfc_symbol *proc_sym;
1235 gfc_formal_arglist *f;
1237 gcc_assert (sym->attr.dummy);
1238 proc_sym = sym->ns->proc_name;
1239 if (proc_sym->attr.entry_master)
1240 ++cnt;
1241 if (gfc_return_by_reference (proc_sym))
1243 ++cnt;
1244 if (proc_sym->ts.type == BT_CHARACTER)
1245 ++cnt;
1247 for (f = gfc_sym_get_dummy_args (proc_sym); f; f = f->next)
1248 if (f->sym == sym)
1249 break;
1250 else if (f->sym)
1251 ++cnt;
1252 gcc_assert (f);
1253 return build_int_cst (integer_type_node, cnt);
1256 tree t = gfc_get_symbol_decl (sym);
1257 tree parent_decl;
1258 int parent_flag;
1259 bool return_value;
1260 bool alternate_entry;
1261 bool entry_master;
1263 return_value = sym->attr.function && sym->result == sym;
1264 alternate_entry = sym->attr.function && sym->attr.entry
1265 && sym->result == sym;
1266 entry_master = sym->attr.result
1267 && sym->ns->proc_name->attr.entry_master
1268 && !gfc_return_by_reference (sym->ns->proc_name);
1269 parent_decl = current_function_decl
1270 ? DECL_CONTEXT (current_function_decl) : NULL_TREE;
1272 if ((t == parent_decl && return_value)
1273 || (sym->ns && sym->ns->proc_name
1274 && sym->ns->proc_name->backend_decl == parent_decl
1275 && (alternate_entry || entry_master)))
1276 parent_flag = 1;
1277 else
1278 parent_flag = 0;
1280 /* Special case for assigning the return value of a function.
1281 Self recursive functions must have an explicit return value. */
1282 if (return_value && (t == current_function_decl || parent_flag))
1283 t = gfc_get_fake_result_decl (sym, parent_flag);
1285 /* Similarly for alternate entry points. */
1286 else if (alternate_entry
1287 && (sym->ns->proc_name->backend_decl == current_function_decl
1288 || parent_flag))
1290 gfc_entry_list *el = NULL;
1292 for (el = sym->ns->entries; el; el = el->next)
1293 if (sym == el->sym)
1295 t = gfc_get_fake_result_decl (sym, parent_flag);
1296 break;
1300 else if (entry_master
1301 && (sym->ns->proc_name->backend_decl == current_function_decl
1302 || parent_flag))
1303 t = gfc_get_fake_result_decl (sym, parent_flag);
1305 return t;
1308 static tree
1309 gfc_trans_omp_variable_list (enum omp_clause_code code,
1310 gfc_omp_namelist *namelist, tree list,
1311 bool declare_simd)
1313 for (; namelist != NULL; namelist = namelist->next)
1314 if (namelist->sym->attr.referenced || declare_simd)
1316 tree t = gfc_trans_omp_variable (namelist->sym, declare_simd);
1317 if (t != error_mark_node)
1319 tree node = build_omp_clause (input_location, code);
1320 OMP_CLAUSE_DECL (node) = t;
1321 list = gfc_trans_add_clause (node, list);
1324 return list;
1327 struct omp_udr_find_orig_data
1329 gfc_omp_udr *omp_udr;
1330 bool omp_orig_seen;
1333 static int
1334 omp_udr_find_orig (gfc_expr **e, int *walk_subtrees ATTRIBUTE_UNUSED,
1335 void *data)
1337 struct omp_udr_find_orig_data *cd = (struct omp_udr_find_orig_data *) data;
1338 if ((*e)->expr_type == EXPR_VARIABLE
1339 && (*e)->symtree->n.sym == cd->omp_udr->omp_orig)
1340 cd->omp_orig_seen = true;
1342 return 0;
1345 static void
1346 gfc_trans_omp_array_reduction_or_udr (tree c, gfc_omp_namelist *n, locus where)
1348 gfc_symbol *sym = n->sym;
1349 gfc_symtree *root1 = NULL, *root2 = NULL, *root3 = NULL, *root4 = NULL;
1350 gfc_symtree *symtree1, *symtree2, *symtree3, *symtree4 = NULL;
1351 gfc_symbol init_val_sym, outer_sym, intrinsic_sym;
1352 gfc_symbol omp_var_copy[4];
1353 gfc_expr *e1, *e2, *e3, *e4;
1354 gfc_ref *ref;
1355 tree decl, backend_decl, stmt, type, outer_decl;
1356 locus old_loc = gfc_current_locus;
1357 const char *iname;
1358 bool t;
1359 gfc_omp_udr *udr = n->udr ? n->udr->udr : NULL;
1361 decl = OMP_CLAUSE_DECL (c);
1362 gfc_current_locus = where;
1363 type = TREE_TYPE (decl);
1364 outer_decl = create_tmp_var_raw (type, NULL);
1365 if (TREE_CODE (decl) == PARM_DECL
1366 && TREE_CODE (type) == REFERENCE_TYPE
1367 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type))
1368 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type)) == GFC_ARRAY_ALLOCATABLE)
1370 decl = build_fold_indirect_ref (decl);
1371 type = TREE_TYPE (type);
1374 /* Create a fake symbol for init value. */
1375 memset (&init_val_sym, 0, sizeof (init_val_sym));
1376 init_val_sym.ns = sym->ns;
1377 init_val_sym.name = sym->name;
1378 init_val_sym.ts = sym->ts;
1379 init_val_sym.attr.referenced = 1;
1380 init_val_sym.declared_at = where;
1381 init_val_sym.attr.flavor = FL_VARIABLE;
1382 if (OMP_CLAUSE_REDUCTION_CODE (c) != ERROR_MARK)
1383 backend_decl = omp_reduction_init (c, gfc_sym_type (&init_val_sym));
1384 else if (udr->initializer_ns)
1385 backend_decl = NULL;
1386 else
1387 switch (sym->ts.type)
1389 case BT_LOGICAL:
1390 case BT_INTEGER:
1391 case BT_REAL:
1392 case BT_COMPLEX:
1393 backend_decl = build_zero_cst (gfc_sym_type (&init_val_sym));
1394 break;
1395 default:
1396 backend_decl = NULL_TREE;
1397 break;
1399 init_val_sym.backend_decl = backend_decl;
1401 /* Create a fake symbol for the outer array reference. */
1402 outer_sym = *sym;
1403 if (sym->as)
1404 outer_sym.as = gfc_copy_array_spec (sym->as);
1405 outer_sym.attr.dummy = 0;
1406 outer_sym.attr.result = 0;
1407 outer_sym.attr.flavor = FL_VARIABLE;
1408 outer_sym.backend_decl = outer_decl;
1409 if (decl != OMP_CLAUSE_DECL (c))
1410 outer_sym.backend_decl = build_fold_indirect_ref (outer_decl);
1412 /* Create fake symtrees for it. */
1413 symtree1 = gfc_new_symtree (&root1, sym->name);
1414 symtree1->n.sym = sym;
1415 gcc_assert (symtree1 == root1);
1417 symtree2 = gfc_new_symtree (&root2, sym->name);
1418 symtree2->n.sym = &init_val_sym;
1419 gcc_assert (symtree2 == root2);
1421 symtree3 = gfc_new_symtree (&root3, sym->name);
1422 symtree3->n.sym = &outer_sym;
1423 gcc_assert (symtree3 == root3);
1425 memset (omp_var_copy, 0, sizeof omp_var_copy);
1426 if (udr)
1428 omp_var_copy[0] = *udr->omp_out;
1429 omp_var_copy[1] = *udr->omp_in;
1430 *udr->omp_out = outer_sym;
1431 *udr->omp_in = *sym;
1432 if (udr->initializer_ns)
1434 omp_var_copy[2] = *udr->omp_priv;
1435 omp_var_copy[3] = *udr->omp_orig;
1436 *udr->omp_priv = *sym;
1437 *udr->omp_orig = outer_sym;
1441 /* Create expressions. */
1442 e1 = gfc_get_expr ();
1443 e1->expr_type = EXPR_VARIABLE;
1444 e1->where = where;
1445 e1->symtree = symtree1;
1446 e1->ts = sym->ts;
1447 if (sym->attr.dimension)
1449 e1->ref = ref = gfc_get_ref ();
1450 ref->type = REF_ARRAY;
1451 ref->u.ar.where = where;
1452 ref->u.ar.as = sym->as;
1453 ref->u.ar.type = AR_FULL;
1454 ref->u.ar.dimen = 0;
1456 t = gfc_resolve_expr (e1);
1457 gcc_assert (t);
1459 e2 = NULL;
1460 if (backend_decl != NULL_TREE)
1462 e2 = gfc_get_expr ();
1463 e2->expr_type = EXPR_VARIABLE;
1464 e2->where = where;
1465 e2->symtree = symtree2;
1466 e2->ts = sym->ts;
1467 t = gfc_resolve_expr (e2);
1468 gcc_assert (t);
1470 else if (udr->initializer_ns == NULL)
1472 gcc_assert (sym->ts.type == BT_DERIVED);
1473 e2 = gfc_default_initializer (&sym->ts);
1474 gcc_assert (e2);
1475 t = gfc_resolve_expr (e2);
1476 gcc_assert (t);
1478 else if (n->udr->initializer->op == EXEC_ASSIGN)
1480 e2 = gfc_copy_expr (n->udr->initializer->expr2);
1481 t = gfc_resolve_expr (e2);
1482 gcc_assert (t);
1484 if (udr && udr->initializer_ns)
1486 struct omp_udr_find_orig_data cd;
1487 cd.omp_udr = udr;
1488 cd.omp_orig_seen = false;
1489 gfc_code_walker (&n->udr->initializer,
1490 gfc_dummy_code_callback, omp_udr_find_orig, &cd);
1491 if (cd.omp_orig_seen)
1492 OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c) = 1;
1495 e3 = gfc_copy_expr (e1);
1496 e3->symtree = symtree3;
1497 t = gfc_resolve_expr (e3);
1498 gcc_assert (t);
1500 iname = NULL;
1501 e4 = NULL;
1502 switch (OMP_CLAUSE_REDUCTION_CODE (c))
1504 case PLUS_EXPR:
1505 case MINUS_EXPR:
1506 e4 = gfc_add (e3, e1);
1507 break;
1508 case MULT_EXPR:
1509 e4 = gfc_multiply (e3, e1);
1510 break;
1511 case TRUTH_ANDIF_EXPR:
1512 e4 = gfc_and (e3, e1);
1513 break;
1514 case TRUTH_ORIF_EXPR:
1515 e4 = gfc_or (e3, e1);
1516 break;
1517 case EQ_EXPR:
1518 e4 = gfc_eqv (e3, e1);
1519 break;
1520 case NE_EXPR:
1521 e4 = gfc_neqv (e3, e1);
1522 break;
1523 case MIN_EXPR:
1524 iname = "min";
1525 break;
1526 case MAX_EXPR:
1527 iname = "max";
1528 break;
1529 case BIT_AND_EXPR:
1530 iname = "iand";
1531 break;
1532 case BIT_IOR_EXPR:
1533 iname = "ior";
1534 break;
1535 case BIT_XOR_EXPR:
1536 iname = "ieor";
1537 break;
1538 case ERROR_MARK:
1539 if (n->udr->combiner->op == EXEC_ASSIGN)
1541 gfc_free_expr (e3);
1542 e3 = gfc_copy_expr (n->udr->combiner->expr1);
1543 e4 = gfc_copy_expr (n->udr->combiner->expr2);
1544 t = gfc_resolve_expr (e3);
1545 gcc_assert (t);
1546 t = gfc_resolve_expr (e4);
1547 gcc_assert (t);
1549 break;
1550 default:
1551 gcc_unreachable ();
1553 if (iname != NULL)
1555 memset (&intrinsic_sym, 0, sizeof (intrinsic_sym));
1556 intrinsic_sym.ns = sym->ns;
1557 intrinsic_sym.name = iname;
1558 intrinsic_sym.ts = sym->ts;
1559 intrinsic_sym.attr.referenced = 1;
1560 intrinsic_sym.attr.intrinsic = 1;
1561 intrinsic_sym.attr.function = 1;
1562 intrinsic_sym.result = &intrinsic_sym;
1563 intrinsic_sym.declared_at = where;
1565 symtree4 = gfc_new_symtree (&root4, iname);
1566 symtree4->n.sym = &intrinsic_sym;
1567 gcc_assert (symtree4 == root4);
1569 e4 = gfc_get_expr ();
1570 e4->expr_type = EXPR_FUNCTION;
1571 e4->where = where;
1572 e4->symtree = symtree4;
1573 e4->value.function.actual = gfc_get_actual_arglist ();
1574 e4->value.function.actual->expr = e3;
1575 e4->value.function.actual->next = gfc_get_actual_arglist ();
1576 e4->value.function.actual->next->expr = e1;
1578 if (OMP_CLAUSE_REDUCTION_CODE (c) != ERROR_MARK)
1580 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
1581 e1 = gfc_copy_expr (e1);
1582 e3 = gfc_copy_expr (e3);
1583 t = gfc_resolve_expr (e4);
1584 gcc_assert (t);
1587 /* Create the init statement list. */
1588 pushlevel ();
1589 if (e2)
1590 stmt = gfc_trans_assignment (e1, e2, false, false);
1591 else
1592 stmt = gfc_trans_call (n->udr->initializer, false,
1593 NULL_TREE, NULL_TREE, false);
1594 if (TREE_CODE (stmt) != BIND_EXPR)
1595 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
1596 else
1597 poplevel (0, 0);
1598 OMP_CLAUSE_REDUCTION_INIT (c) = stmt;
1600 /* Create the merge statement list. */
1601 pushlevel ();
1602 if (e4)
1603 stmt = gfc_trans_assignment (e3, e4, false, true);
1604 else
1605 stmt = gfc_trans_call (n->udr->combiner, false,
1606 NULL_TREE, NULL_TREE, false);
1607 if (TREE_CODE (stmt) != BIND_EXPR)
1608 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
1609 else
1610 poplevel (0, 0);
1611 OMP_CLAUSE_REDUCTION_MERGE (c) = stmt;
1613 /* And stick the placeholder VAR_DECL into the clause as well. */
1614 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = outer_decl;
1616 gfc_current_locus = old_loc;
1618 gfc_free_expr (e1);
1619 if (e2)
1620 gfc_free_expr (e2);
1621 gfc_free_expr (e3);
1622 if (e4)
1623 gfc_free_expr (e4);
1624 free (symtree1);
1625 free (symtree2);
1626 free (symtree3);
1627 free (symtree4);
1628 if (outer_sym.as)
1629 gfc_free_array_spec (outer_sym.as);
1631 if (udr)
1633 *udr->omp_out = omp_var_copy[0];
1634 *udr->omp_in = omp_var_copy[1];
1635 if (udr->initializer_ns)
1637 *udr->omp_priv = omp_var_copy[2];
1638 *udr->omp_orig = omp_var_copy[3];
1643 static tree
1644 gfc_trans_omp_reduction_list (gfc_omp_namelist *namelist, tree list,
1645 locus where)
1647 for (; namelist != NULL; namelist = namelist->next)
1648 if (namelist->sym->attr.referenced)
1650 tree t = gfc_trans_omp_variable (namelist->sym, false);
1651 if (t != error_mark_node)
1653 tree node = build_omp_clause (where.lb->location,
1654 OMP_CLAUSE_REDUCTION);
1655 OMP_CLAUSE_DECL (node) = t;
1656 switch (namelist->u.reduction_op)
1658 case OMP_REDUCTION_PLUS:
1659 OMP_CLAUSE_REDUCTION_CODE (node) = PLUS_EXPR;
1660 break;
1661 case OMP_REDUCTION_MINUS:
1662 OMP_CLAUSE_REDUCTION_CODE (node) = MINUS_EXPR;
1663 break;
1664 case OMP_REDUCTION_TIMES:
1665 OMP_CLAUSE_REDUCTION_CODE (node) = MULT_EXPR;
1666 break;
1667 case OMP_REDUCTION_AND:
1668 OMP_CLAUSE_REDUCTION_CODE (node) = TRUTH_ANDIF_EXPR;
1669 break;
1670 case OMP_REDUCTION_OR:
1671 OMP_CLAUSE_REDUCTION_CODE (node) = TRUTH_ORIF_EXPR;
1672 break;
1673 case OMP_REDUCTION_EQV:
1674 OMP_CLAUSE_REDUCTION_CODE (node) = EQ_EXPR;
1675 break;
1676 case OMP_REDUCTION_NEQV:
1677 OMP_CLAUSE_REDUCTION_CODE (node) = NE_EXPR;
1678 break;
1679 case OMP_REDUCTION_MAX:
1680 OMP_CLAUSE_REDUCTION_CODE (node) = MAX_EXPR;
1681 break;
1682 case OMP_REDUCTION_MIN:
1683 OMP_CLAUSE_REDUCTION_CODE (node) = MIN_EXPR;
1684 break;
1685 case OMP_REDUCTION_IAND:
1686 OMP_CLAUSE_REDUCTION_CODE (node) = BIT_AND_EXPR;
1687 break;
1688 case OMP_REDUCTION_IOR:
1689 OMP_CLAUSE_REDUCTION_CODE (node) = BIT_IOR_EXPR;
1690 break;
1691 case OMP_REDUCTION_IEOR:
1692 OMP_CLAUSE_REDUCTION_CODE (node) = BIT_XOR_EXPR;
1693 break;
1694 case OMP_REDUCTION_USER:
1695 OMP_CLAUSE_REDUCTION_CODE (node) = ERROR_MARK;
1696 break;
1697 default:
1698 gcc_unreachable ();
1700 if (namelist->sym->attr.dimension
1701 || namelist->u.reduction_op == OMP_REDUCTION_USER
1702 || namelist->sym->attr.allocatable)
1703 gfc_trans_omp_array_reduction_or_udr (node, namelist, where);
1704 list = gfc_trans_add_clause (node, list);
1707 return list;
1710 static tree
1711 gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
1712 locus where, bool declare_simd = false)
1714 tree omp_clauses = NULL_TREE, chunk_size, c;
1715 int list;
1716 enum omp_clause_code clause_code;
1717 gfc_se se;
1719 if (clauses == NULL)
1720 return NULL_TREE;
1722 for (list = 0; list < OMP_LIST_NUM; list++)
1724 gfc_omp_namelist *n = clauses->lists[list];
1726 if (n == NULL)
1727 continue;
1728 switch (list)
1730 case OMP_LIST_REDUCTION:
1731 omp_clauses = gfc_trans_omp_reduction_list (n, omp_clauses, where);
1732 break;
1733 case OMP_LIST_PRIVATE:
1734 clause_code = OMP_CLAUSE_PRIVATE;
1735 goto add_clause;
1736 case OMP_LIST_SHARED:
1737 clause_code = OMP_CLAUSE_SHARED;
1738 goto add_clause;
1739 case OMP_LIST_FIRSTPRIVATE:
1740 clause_code = OMP_CLAUSE_FIRSTPRIVATE;
1741 goto add_clause;
1742 case OMP_LIST_LASTPRIVATE:
1743 clause_code = OMP_CLAUSE_LASTPRIVATE;
1744 goto add_clause;
1745 case OMP_LIST_COPYIN:
1746 clause_code = OMP_CLAUSE_COPYIN;
1747 goto add_clause;
1748 case OMP_LIST_COPYPRIVATE:
1749 clause_code = OMP_CLAUSE_COPYPRIVATE;
1750 goto add_clause;
1751 case OMP_LIST_UNIFORM:
1752 clause_code = OMP_CLAUSE_UNIFORM;
1753 /* FALLTHROUGH */
1754 add_clause:
1755 omp_clauses
1756 = gfc_trans_omp_variable_list (clause_code, n, omp_clauses,
1757 declare_simd);
1758 break;
1759 case OMP_LIST_ALIGNED:
1760 for (; n != NULL; n = n->next)
1761 if (n->sym->attr.referenced || declare_simd)
1763 tree t = gfc_trans_omp_variable (n->sym, declare_simd);
1764 if (t != error_mark_node)
1766 tree node = build_omp_clause (input_location,
1767 OMP_CLAUSE_ALIGNED);
1768 OMP_CLAUSE_DECL (node) = t;
1769 if (n->expr)
1771 tree alignment_var;
1773 if (block == NULL)
1774 alignment_var = gfc_conv_constant_to_tree (n->expr);
1775 else
1777 gfc_init_se (&se, NULL);
1778 gfc_conv_expr (&se, n->expr);
1779 gfc_add_block_to_block (block, &se.pre);
1780 alignment_var = gfc_evaluate_now (se.expr, block);
1781 gfc_add_block_to_block (block, &se.post);
1783 OMP_CLAUSE_ALIGNED_ALIGNMENT (node) = alignment_var;
1785 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
1788 break;
1789 case OMP_LIST_LINEAR:
1791 gfc_expr *last_step_expr = NULL;
1792 tree last_step = NULL_TREE;
1794 for (; n != NULL; n = n->next)
1796 if (n->expr)
1798 last_step_expr = n->expr;
1799 last_step = NULL_TREE;
1801 if (n->sym->attr.referenced || declare_simd)
1803 tree t = gfc_trans_omp_variable (n->sym, declare_simd);
1804 if (t != error_mark_node)
1806 tree node = build_omp_clause (input_location,
1807 OMP_CLAUSE_LINEAR);
1808 OMP_CLAUSE_DECL (node) = t;
1809 if (last_step_expr && last_step == NULL_TREE)
1811 if (block == NULL)
1812 last_step
1813 = gfc_conv_constant_to_tree (last_step_expr);
1814 else
1816 gfc_init_se (&se, NULL);
1817 gfc_conv_expr (&se, last_step_expr);
1818 gfc_add_block_to_block (block, &se.pre);
1819 last_step = gfc_evaluate_now (se.expr, block);
1820 gfc_add_block_to_block (block, &se.post);
1823 OMP_CLAUSE_LINEAR_STEP (node)
1824 = fold_convert (gfc_typenode_for_spec (&n->sym->ts),
1825 last_step);
1826 if (n->sym->attr.dimension || n->sym->attr.allocatable)
1827 OMP_CLAUSE_LINEAR_ARRAY (node) = 1;
1828 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
1833 break;
1834 case OMP_LIST_DEPEND:
1835 for (; n != NULL; n = n->next)
1837 if (!n->sym->attr.referenced)
1838 continue;
1840 tree node = build_omp_clause (input_location, OMP_CLAUSE_DEPEND);
1841 if (n->expr == NULL || n->expr->ref->u.ar.type == AR_FULL)
1843 tree decl = gfc_get_symbol_decl (n->sym);
1844 if (gfc_omp_privatize_by_reference (decl))
1845 decl = build_fold_indirect_ref (decl);
1846 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
1848 decl = gfc_conv_descriptor_data_get (decl);
1849 decl = fold_convert (build_pointer_type (char_type_node),
1850 decl);
1851 decl = build_fold_indirect_ref (decl);
1853 else if (DECL_P (decl))
1854 TREE_ADDRESSABLE (decl) = 1;
1855 OMP_CLAUSE_DECL (node) = decl;
1857 else
1859 tree ptr;
1860 gfc_init_se (&se, NULL);
1861 if (n->expr->ref->u.ar.type == AR_ELEMENT)
1863 gfc_conv_expr_reference (&se, n->expr);
1864 ptr = se.expr;
1866 else
1868 gfc_conv_expr_descriptor (&se, n->expr);
1869 ptr = gfc_conv_array_data (se.expr);
1871 gfc_add_block_to_block (block, &se.pre);
1872 gfc_add_block_to_block (block, &se.post);
1873 ptr = fold_convert (build_pointer_type (char_type_node),
1874 ptr);
1875 OMP_CLAUSE_DECL (node) = build_fold_indirect_ref (ptr);
1877 switch (n->u.depend_op)
1879 case OMP_DEPEND_IN:
1880 OMP_CLAUSE_DEPEND_KIND (node) = OMP_CLAUSE_DEPEND_IN;
1881 break;
1882 case OMP_DEPEND_OUT:
1883 OMP_CLAUSE_DEPEND_KIND (node) = OMP_CLAUSE_DEPEND_OUT;
1884 break;
1885 case OMP_DEPEND_INOUT:
1886 OMP_CLAUSE_DEPEND_KIND (node) = OMP_CLAUSE_DEPEND_INOUT;
1887 break;
1888 default:
1889 gcc_unreachable ();
1891 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
1893 break;
1894 case OMP_LIST_MAP:
1895 for (; n != NULL; n = n->next)
1897 if (!n->sym->attr.referenced)
1898 continue;
1900 tree node = build_omp_clause (input_location, OMP_CLAUSE_MAP);
1901 tree node2 = NULL_TREE;
1902 tree node3 = NULL_TREE;
1903 tree node4 = NULL_TREE;
1904 tree decl = gfc_get_symbol_decl (n->sym);
1905 if (DECL_P (decl))
1906 TREE_ADDRESSABLE (decl) = 1;
1907 if (n->expr == NULL || n->expr->ref->u.ar.type == AR_FULL)
1909 if (POINTER_TYPE_P (TREE_TYPE (decl))
1910 && (gfc_omp_privatize_by_reference (decl)
1911 || GFC_DECL_GET_SCALAR_POINTER (decl)
1912 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl)
1913 || GFC_DECL_CRAY_POINTEE (decl)
1914 || GFC_DESCRIPTOR_TYPE_P
1915 (TREE_TYPE (TREE_TYPE (decl)))))
1917 tree orig_decl = decl;
1918 node4 = build_omp_clause (input_location,
1919 OMP_CLAUSE_MAP);
1920 OMP_CLAUSE_MAP_KIND (node4) = OMP_CLAUSE_MAP_POINTER;
1921 OMP_CLAUSE_DECL (node4) = decl;
1922 OMP_CLAUSE_SIZE (node4) = size_int (0);
1923 decl = build_fold_indirect_ref (decl);
1924 if (TREE_CODE (TREE_TYPE (orig_decl)) == REFERENCE_TYPE
1925 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl)
1926 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl)))
1928 node3 = build_omp_clause (input_location,
1929 OMP_CLAUSE_MAP);
1930 OMP_CLAUSE_MAP_KIND (node3) = OMP_CLAUSE_MAP_POINTER;
1931 OMP_CLAUSE_DECL (node3) = decl;
1932 OMP_CLAUSE_SIZE (node3) = size_int (0);
1933 decl = build_fold_indirect_ref (decl);
1936 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
1938 tree type = TREE_TYPE (decl);
1939 tree ptr = gfc_conv_descriptor_data_get (decl);
1940 ptr = fold_convert (build_pointer_type (char_type_node),
1941 ptr);
1942 ptr = build_fold_indirect_ref (ptr);
1943 OMP_CLAUSE_DECL (node) = ptr;
1944 node2 = build_omp_clause (input_location,
1945 OMP_CLAUSE_MAP);
1946 OMP_CLAUSE_MAP_KIND (node2) = OMP_CLAUSE_MAP_TO_PSET;
1947 OMP_CLAUSE_DECL (node2) = decl;
1948 OMP_CLAUSE_SIZE (node2) = TYPE_SIZE_UNIT (type);
1949 node3 = build_omp_clause (input_location,
1950 OMP_CLAUSE_MAP);
1951 OMP_CLAUSE_MAP_KIND (node3) = OMP_CLAUSE_MAP_POINTER;
1952 OMP_CLAUSE_DECL (node3)
1953 = gfc_conv_descriptor_data_get (decl);
1954 OMP_CLAUSE_SIZE (node3) = size_int (0);
1955 if (n->sym->attr.pointer)
1957 stmtblock_t cond_block;
1958 tree size
1959 = gfc_create_var (gfc_array_index_type, NULL);
1960 tree tem, then_b, else_b, zero, cond;
1962 gfc_init_block (&cond_block);
1964 = gfc_full_array_size (&cond_block, decl,
1965 GFC_TYPE_ARRAY_RANK (type));
1966 gfc_add_modify (&cond_block, size, tem);
1967 then_b = gfc_finish_block (&cond_block);
1968 gfc_init_block (&cond_block);
1969 zero = build_int_cst (gfc_array_index_type, 0);
1970 gfc_add_modify (&cond_block, size, zero);
1971 else_b = gfc_finish_block (&cond_block);
1972 tem = gfc_conv_descriptor_data_get (decl);
1973 tem = fold_convert (pvoid_type_node, tem);
1974 cond = fold_build2_loc (input_location, NE_EXPR,
1975 boolean_type_node,
1976 tem, null_pointer_node);
1977 gfc_add_expr_to_block (block,
1978 build3_loc (input_location,
1979 COND_EXPR,
1980 void_type_node,
1981 cond, then_b,
1982 else_b));
1983 OMP_CLAUSE_SIZE (node) = size;
1985 else
1986 OMP_CLAUSE_SIZE (node)
1987 = gfc_full_array_size (block, decl,
1988 GFC_TYPE_ARRAY_RANK (type));
1989 tree elemsz
1990 = TYPE_SIZE_UNIT (gfc_get_element_type (type));
1991 elemsz = fold_convert (gfc_array_index_type, elemsz);
1992 OMP_CLAUSE_SIZE (node)
1993 = fold_build2 (MULT_EXPR, gfc_array_index_type,
1994 OMP_CLAUSE_SIZE (node), elemsz);
1996 else
1997 OMP_CLAUSE_DECL (node) = decl;
1999 else
2001 tree ptr, ptr2;
2002 gfc_init_se (&se, NULL);
2003 if (n->expr->ref->u.ar.type == AR_ELEMENT)
2005 gfc_conv_expr_reference (&se, n->expr);
2006 gfc_add_block_to_block (block, &se.pre);
2007 ptr = se.expr;
2008 OMP_CLAUSE_SIZE (node)
2009 = TYPE_SIZE_UNIT (TREE_TYPE (ptr));
2011 else
2013 gfc_conv_expr_descriptor (&se, n->expr);
2014 ptr = gfc_conv_array_data (se.expr);
2015 tree type = TREE_TYPE (se.expr);
2016 gfc_add_block_to_block (block, &se.pre);
2017 OMP_CLAUSE_SIZE (node)
2018 = gfc_full_array_size (block, se.expr,
2019 GFC_TYPE_ARRAY_RANK (type));
2020 tree elemsz
2021 = TYPE_SIZE_UNIT (gfc_get_element_type (type));
2022 elemsz = fold_convert (gfc_array_index_type, elemsz);
2023 OMP_CLAUSE_SIZE (node)
2024 = fold_build2 (MULT_EXPR, gfc_array_index_type,
2025 OMP_CLAUSE_SIZE (node), elemsz);
2027 gfc_add_block_to_block (block, &se.post);
2028 ptr = fold_convert (build_pointer_type (char_type_node),
2029 ptr);
2030 OMP_CLAUSE_DECL (node) = build_fold_indirect_ref (ptr);
2032 if (POINTER_TYPE_P (TREE_TYPE (decl))
2033 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
2035 node4 = build_omp_clause (input_location,
2036 OMP_CLAUSE_MAP);
2037 OMP_CLAUSE_MAP_KIND (node4) = OMP_CLAUSE_MAP_POINTER;
2038 OMP_CLAUSE_DECL (node4) = decl;
2039 OMP_CLAUSE_SIZE (node4) = size_int (0);
2040 decl = build_fold_indirect_ref (decl);
2042 ptr = fold_convert (sizetype, ptr);
2043 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
2045 tree type = TREE_TYPE (decl);
2046 ptr2 = gfc_conv_descriptor_data_get (decl);
2047 node2 = build_omp_clause (input_location,
2048 OMP_CLAUSE_MAP);
2049 OMP_CLAUSE_MAP_KIND (node2) = OMP_CLAUSE_MAP_TO_PSET;
2050 OMP_CLAUSE_DECL (node2) = decl;
2051 OMP_CLAUSE_SIZE (node2) = TYPE_SIZE_UNIT (type);
2052 node3 = build_omp_clause (input_location,
2053 OMP_CLAUSE_MAP);
2054 OMP_CLAUSE_MAP_KIND (node3) = OMP_CLAUSE_MAP_POINTER;
2055 OMP_CLAUSE_DECL (node3)
2056 = gfc_conv_descriptor_data_get (decl);
2058 else
2060 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
2061 ptr2 = build_fold_addr_expr (decl);
2062 else
2064 gcc_assert (POINTER_TYPE_P (TREE_TYPE (decl)));
2065 ptr2 = decl;
2067 node3 = build_omp_clause (input_location,
2068 OMP_CLAUSE_MAP);
2069 OMP_CLAUSE_MAP_KIND (node3) = OMP_CLAUSE_MAP_POINTER;
2070 OMP_CLAUSE_DECL (node3) = decl;
2072 ptr2 = fold_convert (sizetype, ptr2);
2073 OMP_CLAUSE_SIZE (node3)
2074 = fold_build2 (MINUS_EXPR, sizetype, ptr, ptr2);
2076 switch (n->u.map_op)
2078 case OMP_MAP_ALLOC:
2079 OMP_CLAUSE_MAP_KIND (node) = OMP_CLAUSE_MAP_ALLOC;
2080 break;
2081 case OMP_MAP_TO:
2082 OMP_CLAUSE_MAP_KIND (node) = OMP_CLAUSE_MAP_TO;
2083 break;
2084 case OMP_MAP_FROM:
2085 OMP_CLAUSE_MAP_KIND (node) = OMP_CLAUSE_MAP_FROM;
2086 break;
2087 case OMP_MAP_TOFROM:
2088 OMP_CLAUSE_MAP_KIND (node) = OMP_CLAUSE_MAP_TOFROM;
2089 break;
2090 default:
2091 gcc_unreachable ();
2093 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
2094 if (node2)
2095 omp_clauses = gfc_trans_add_clause (node2, omp_clauses);
2096 if (node3)
2097 omp_clauses = gfc_trans_add_clause (node3, omp_clauses);
2098 if (node4)
2099 omp_clauses = gfc_trans_add_clause (node4, omp_clauses);
2101 break;
2102 case OMP_LIST_TO:
2103 case OMP_LIST_FROM:
2104 for (; n != NULL; n = n->next)
2106 if (!n->sym->attr.referenced)
2107 continue;
2109 tree node = build_omp_clause (input_location,
2110 list == OMP_LIST_TO
2111 ? OMP_CLAUSE_TO : OMP_CLAUSE_FROM);
2112 if (n->expr == NULL || n->expr->ref->u.ar.type == AR_FULL)
2114 tree decl = gfc_get_symbol_decl (n->sym);
2115 if (gfc_omp_privatize_by_reference (decl))
2116 decl = build_fold_indirect_ref (decl);
2117 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
2119 tree type = TREE_TYPE (decl);
2120 tree ptr = gfc_conv_descriptor_data_get (decl);
2121 ptr = fold_convert (build_pointer_type (char_type_node),
2122 ptr);
2123 ptr = build_fold_indirect_ref (ptr);
2124 OMP_CLAUSE_DECL (node) = ptr;
2125 OMP_CLAUSE_SIZE (node)
2126 = gfc_full_array_size (block, decl,
2127 GFC_TYPE_ARRAY_RANK (type));
2128 tree elemsz
2129 = TYPE_SIZE_UNIT (gfc_get_element_type (type));
2130 elemsz = fold_convert (gfc_array_index_type, elemsz);
2131 OMP_CLAUSE_SIZE (node)
2132 = fold_build2 (MULT_EXPR, gfc_array_index_type,
2133 OMP_CLAUSE_SIZE (node), elemsz);
2135 else
2136 OMP_CLAUSE_DECL (node) = decl;
2138 else
2140 tree ptr;
2141 gfc_init_se (&se, NULL);
2142 if (n->expr->ref->u.ar.type == AR_ELEMENT)
2144 gfc_conv_expr_reference (&se, n->expr);
2145 ptr = se.expr;
2146 gfc_add_block_to_block (block, &se.pre);
2147 OMP_CLAUSE_SIZE (node)
2148 = TYPE_SIZE_UNIT (TREE_TYPE (ptr));
2150 else
2152 gfc_conv_expr_descriptor (&se, n->expr);
2153 ptr = gfc_conv_array_data (se.expr);
2154 tree type = TREE_TYPE (se.expr);
2155 gfc_add_block_to_block (block, &se.pre);
2156 OMP_CLAUSE_SIZE (node)
2157 = gfc_full_array_size (block, se.expr,
2158 GFC_TYPE_ARRAY_RANK (type));
2159 tree elemsz
2160 = TYPE_SIZE_UNIT (gfc_get_element_type (type));
2161 elemsz = fold_convert (gfc_array_index_type, elemsz);
2162 OMP_CLAUSE_SIZE (node)
2163 = fold_build2 (MULT_EXPR, gfc_array_index_type,
2164 OMP_CLAUSE_SIZE (node), elemsz);
2166 gfc_add_block_to_block (block, &se.post);
2167 ptr = fold_convert (build_pointer_type (char_type_node),
2168 ptr);
2169 OMP_CLAUSE_DECL (node) = build_fold_indirect_ref (ptr);
2171 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
2173 break;
2174 default:
2175 break;
2179 if (clauses->if_expr)
2181 tree if_var;
2183 gfc_init_se (&se, NULL);
2184 gfc_conv_expr (&se, clauses->if_expr);
2185 gfc_add_block_to_block (block, &se.pre);
2186 if_var = gfc_evaluate_now (se.expr, block);
2187 gfc_add_block_to_block (block, &se.post);
2189 c = build_omp_clause (where.lb->location, OMP_CLAUSE_IF);
2190 OMP_CLAUSE_IF_EXPR (c) = if_var;
2191 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2194 if (clauses->final_expr)
2196 tree final_var;
2198 gfc_init_se (&se, NULL);
2199 gfc_conv_expr (&se, clauses->final_expr);
2200 gfc_add_block_to_block (block, &se.pre);
2201 final_var = gfc_evaluate_now (se.expr, block);
2202 gfc_add_block_to_block (block, &se.post);
2204 c = build_omp_clause (where.lb->location, OMP_CLAUSE_FINAL);
2205 OMP_CLAUSE_FINAL_EXPR (c) = final_var;
2206 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2209 if (clauses->num_threads)
2211 tree num_threads;
2213 gfc_init_se (&se, NULL);
2214 gfc_conv_expr (&se, clauses->num_threads);
2215 gfc_add_block_to_block (block, &se.pre);
2216 num_threads = gfc_evaluate_now (se.expr, block);
2217 gfc_add_block_to_block (block, &se.post);
2219 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NUM_THREADS);
2220 OMP_CLAUSE_NUM_THREADS_EXPR (c) = num_threads;
2221 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2224 chunk_size = NULL_TREE;
2225 if (clauses->chunk_size)
2227 gfc_init_se (&se, NULL);
2228 gfc_conv_expr (&se, clauses->chunk_size);
2229 gfc_add_block_to_block (block, &se.pre);
2230 chunk_size = gfc_evaluate_now (se.expr, block);
2231 gfc_add_block_to_block (block, &se.post);
2234 if (clauses->sched_kind != OMP_SCHED_NONE)
2236 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SCHEDULE);
2237 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = chunk_size;
2238 switch (clauses->sched_kind)
2240 case OMP_SCHED_STATIC:
2241 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_STATIC;
2242 break;
2243 case OMP_SCHED_DYNAMIC:
2244 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_DYNAMIC;
2245 break;
2246 case OMP_SCHED_GUIDED:
2247 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_GUIDED;
2248 break;
2249 case OMP_SCHED_RUNTIME:
2250 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_RUNTIME;
2251 break;
2252 case OMP_SCHED_AUTO:
2253 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_AUTO;
2254 break;
2255 default:
2256 gcc_unreachable ();
2258 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2261 if (clauses->default_sharing != OMP_DEFAULT_UNKNOWN)
2263 c = build_omp_clause (where.lb->location, OMP_CLAUSE_DEFAULT);
2264 switch (clauses->default_sharing)
2266 case OMP_DEFAULT_NONE:
2267 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_NONE;
2268 break;
2269 case OMP_DEFAULT_SHARED:
2270 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_SHARED;
2271 break;
2272 case OMP_DEFAULT_PRIVATE:
2273 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_PRIVATE;
2274 break;
2275 case OMP_DEFAULT_FIRSTPRIVATE:
2276 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
2277 break;
2278 default:
2279 gcc_unreachable ();
2281 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2284 if (clauses->nowait)
2286 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NOWAIT);
2287 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2290 if (clauses->ordered)
2292 c = build_omp_clause (where.lb->location, OMP_CLAUSE_ORDERED);
2293 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2296 if (clauses->untied)
2298 c = build_omp_clause (where.lb->location, OMP_CLAUSE_UNTIED);
2299 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2302 if (clauses->mergeable)
2304 c = build_omp_clause (where.lb->location, OMP_CLAUSE_MERGEABLE);
2305 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2308 if (clauses->collapse)
2310 c = build_omp_clause (where.lb->location, OMP_CLAUSE_COLLAPSE);
2311 OMP_CLAUSE_COLLAPSE_EXPR (c)
2312 = build_int_cst (integer_type_node, clauses->collapse);
2313 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2316 if (clauses->inbranch)
2318 c = build_omp_clause (where.lb->location, OMP_CLAUSE_INBRANCH);
2319 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2322 if (clauses->notinbranch)
2324 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NOTINBRANCH);
2325 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2328 switch (clauses->cancel)
2330 case OMP_CANCEL_UNKNOWN:
2331 break;
2332 case OMP_CANCEL_PARALLEL:
2333 c = build_omp_clause (where.lb->location, OMP_CLAUSE_PARALLEL);
2334 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2335 break;
2336 case OMP_CANCEL_SECTIONS:
2337 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SECTIONS);
2338 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2339 break;
2340 case OMP_CANCEL_DO:
2341 c = build_omp_clause (where.lb->location, OMP_CLAUSE_FOR);
2342 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2343 break;
2344 case OMP_CANCEL_TASKGROUP:
2345 c = build_omp_clause (where.lb->location, OMP_CLAUSE_TASKGROUP);
2346 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2347 break;
2350 if (clauses->proc_bind != OMP_PROC_BIND_UNKNOWN)
2352 c = build_omp_clause (where.lb->location, OMP_CLAUSE_PROC_BIND);
2353 switch (clauses->proc_bind)
2355 case OMP_PROC_BIND_MASTER:
2356 OMP_CLAUSE_PROC_BIND_KIND (c) = OMP_CLAUSE_PROC_BIND_MASTER;
2357 break;
2358 case OMP_PROC_BIND_SPREAD:
2359 OMP_CLAUSE_PROC_BIND_KIND (c) = OMP_CLAUSE_PROC_BIND_SPREAD;
2360 break;
2361 case OMP_PROC_BIND_CLOSE:
2362 OMP_CLAUSE_PROC_BIND_KIND (c) = OMP_CLAUSE_PROC_BIND_CLOSE;
2363 break;
2364 default:
2365 gcc_unreachable ();
2367 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2370 if (clauses->safelen_expr)
2372 tree safelen_var;
2374 gfc_init_se (&se, NULL);
2375 gfc_conv_expr (&se, clauses->safelen_expr);
2376 gfc_add_block_to_block (block, &se.pre);
2377 safelen_var = gfc_evaluate_now (se.expr, block);
2378 gfc_add_block_to_block (block, &se.post);
2380 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SAFELEN);
2381 OMP_CLAUSE_SAFELEN_EXPR (c) = safelen_var;
2382 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2385 if (clauses->simdlen_expr)
2387 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SIMDLEN);
2388 OMP_CLAUSE_SIMDLEN_EXPR (c)
2389 = gfc_conv_constant_to_tree (clauses->simdlen_expr);
2390 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2393 if (clauses->num_teams)
2395 tree num_teams;
2397 gfc_init_se (&se, NULL);
2398 gfc_conv_expr (&se, clauses->num_teams);
2399 gfc_add_block_to_block (block, &se.pre);
2400 num_teams = gfc_evaluate_now (se.expr, block);
2401 gfc_add_block_to_block (block, &se.post);
2403 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NUM_TEAMS);
2404 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
2405 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2408 if (clauses->device)
2410 tree device;
2412 gfc_init_se (&se, NULL);
2413 gfc_conv_expr (&se, clauses->device);
2414 gfc_add_block_to_block (block, &se.pre);
2415 device = gfc_evaluate_now (se.expr, block);
2416 gfc_add_block_to_block (block, &se.post);
2418 c = build_omp_clause (where.lb->location, OMP_CLAUSE_DEVICE);
2419 OMP_CLAUSE_DEVICE_ID (c) = device;
2420 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2423 if (clauses->thread_limit)
2425 tree thread_limit;
2427 gfc_init_se (&se, NULL);
2428 gfc_conv_expr (&se, clauses->thread_limit);
2429 gfc_add_block_to_block (block, &se.pre);
2430 thread_limit = gfc_evaluate_now (se.expr, block);
2431 gfc_add_block_to_block (block, &se.post);
2433 c = build_omp_clause (where.lb->location, OMP_CLAUSE_THREAD_LIMIT);
2434 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
2435 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2438 chunk_size = NULL_TREE;
2439 if (clauses->dist_chunk_size)
2441 gfc_init_se (&se, NULL);
2442 gfc_conv_expr (&se, clauses->dist_chunk_size);
2443 gfc_add_block_to_block (block, &se.pre);
2444 chunk_size = gfc_evaluate_now (se.expr, block);
2445 gfc_add_block_to_block (block, &se.post);
2448 if (clauses->dist_sched_kind != OMP_SCHED_NONE)
2450 c = build_omp_clause (where.lb->location, OMP_CLAUSE_DIST_SCHEDULE);
2451 OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c) = chunk_size;
2452 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2455 return nreverse (omp_clauses);
2458 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
2460 static tree
2461 gfc_trans_omp_code (gfc_code *code, bool force_empty)
2463 tree stmt;
2465 pushlevel ();
2466 stmt = gfc_trans_code (code);
2467 if (TREE_CODE (stmt) != BIND_EXPR)
2469 if (!IS_EMPTY_STMT (stmt) || force_empty)
2471 tree block = poplevel (1, 0);
2472 stmt = build3_v (BIND_EXPR, NULL, stmt, block);
2474 else
2475 poplevel (0, 0);
2477 else
2478 poplevel (0, 0);
2479 return stmt;
2483 static tree gfc_trans_omp_sections (gfc_code *, gfc_omp_clauses *);
2484 static tree gfc_trans_omp_workshare (gfc_code *, gfc_omp_clauses *);
2486 static tree
2487 gfc_trans_omp_atomic (gfc_code *code)
2489 gfc_code *atomic_code = code;
2490 gfc_se lse;
2491 gfc_se rse;
2492 gfc_se vse;
2493 gfc_expr *expr2, *e;
2494 gfc_symbol *var;
2495 stmtblock_t block;
2496 tree lhsaddr, type, rhs, x;
2497 enum tree_code op = ERROR_MARK;
2498 enum tree_code aop = OMP_ATOMIC;
2499 bool var_on_left = false;
2500 bool seq_cst = (atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_SEQ_CST) != 0;
2502 code = code->block->next;
2503 gcc_assert (code->op == EXEC_ASSIGN);
2504 var = code->expr1->symtree->n.sym;
2506 gfc_init_se (&lse, NULL);
2507 gfc_init_se (&rse, NULL);
2508 gfc_init_se (&vse, NULL);
2509 gfc_start_block (&block);
2511 expr2 = code->expr2;
2512 if (expr2->expr_type == EXPR_FUNCTION
2513 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
2514 expr2 = expr2->value.function.actual->expr;
2516 switch (atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_MASK)
2518 case GFC_OMP_ATOMIC_READ:
2519 gfc_conv_expr (&vse, code->expr1);
2520 gfc_add_block_to_block (&block, &vse.pre);
2522 gfc_conv_expr (&lse, expr2);
2523 gfc_add_block_to_block (&block, &lse.pre);
2524 type = TREE_TYPE (lse.expr);
2525 lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
2527 x = build1 (OMP_ATOMIC_READ, type, lhsaddr);
2528 OMP_ATOMIC_SEQ_CST (x) = seq_cst;
2529 x = convert (TREE_TYPE (vse.expr), x);
2530 gfc_add_modify (&block, vse.expr, x);
2532 gfc_add_block_to_block (&block, &lse.pre);
2533 gfc_add_block_to_block (&block, &rse.pre);
2535 return gfc_finish_block (&block);
2536 case GFC_OMP_ATOMIC_CAPTURE:
2537 aop = OMP_ATOMIC_CAPTURE_NEW;
2538 if (expr2->expr_type == EXPR_VARIABLE)
2540 aop = OMP_ATOMIC_CAPTURE_OLD;
2541 gfc_conv_expr (&vse, code->expr1);
2542 gfc_add_block_to_block (&block, &vse.pre);
2544 gfc_conv_expr (&lse, expr2);
2545 gfc_add_block_to_block (&block, &lse.pre);
2546 gfc_init_se (&lse, NULL);
2547 code = code->next;
2548 var = code->expr1->symtree->n.sym;
2549 expr2 = code->expr2;
2550 if (expr2->expr_type == EXPR_FUNCTION
2551 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
2552 expr2 = expr2->value.function.actual->expr;
2554 break;
2555 default:
2556 break;
2559 gfc_conv_expr (&lse, code->expr1);
2560 gfc_add_block_to_block (&block, &lse.pre);
2561 type = TREE_TYPE (lse.expr);
2562 lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
2564 if (((atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_MASK)
2565 == GFC_OMP_ATOMIC_WRITE)
2566 || (atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_SWAP))
2568 gfc_conv_expr (&rse, expr2);
2569 gfc_add_block_to_block (&block, &rse.pre);
2571 else if (expr2->expr_type == EXPR_OP)
2573 gfc_expr *e;
2574 switch (expr2->value.op.op)
2576 case INTRINSIC_PLUS:
2577 op = PLUS_EXPR;
2578 break;
2579 case INTRINSIC_TIMES:
2580 op = MULT_EXPR;
2581 break;
2582 case INTRINSIC_MINUS:
2583 op = MINUS_EXPR;
2584 break;
2585 case INTRINSIC_DIVIDE:
2586 if (expr2->ts.type == BT_INTEGER)
2587 op = TRUNC_DIV_EXPR;
2588 else
2589 op = RDIV_EXPR;
2590 break;
2591 case INTRINSIC_AND:
2592 op = TRUTH_ANDIF_EXPR;
2593 break;
2594 case INTRINSIC_OR:
2595 op = TRUTH_ORIF_EXPR;
2596 break;
2597 case INTRINSIC_EQV:
2598 op = EQ_EXPR;
2599 break;
2600 case INTRINSIC_NEQV:
2601 op = NE_EXPR;
2602 break;
2603 default:
2604 gcc_unreachable ();
2606 e = expr2->value.op.op1;
2607 if (e->expr_type == EXPR_FUNCTION
2608 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
2609 e = e->value.function.actual->expr;
2610 if (e->expr_type == EXPR_VARIABLE
2611 && e->symtree != NULL
2612 && e->symtree->n.sym == var)
2614 expr2 = expr2->value.op.op2;
2615 var_on_left = true;
2617 else
2619 e = expr2->value.op.op2;
2620 if (e->expr_type == EXPR_FUNCTION
2621 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
2622 e = e->value.function.actual->expr;
2623 gcc_assert (e->expr_type == EXPR_VARIABLE
2624 && e->symtree != NULL
2625 && e->symtree->n.sym == var);
2626 expr2 = expr2->value.op.op1;
2627 var_on_left = false;
2629 gfc_conv_expr (&rse, expr2);
2630 gfc_add_block_to_block (&block, &rse.pre);
2632 else
2634 gcc_assert (expr2->expr_type == EXPR_FUNCTION);
2635 switch (expr2->value.function.isym->id)
2637 case GFC_ISYM_MIN:
2638 op = MIN_EXPR;
2639 break;
2640 case GFC_ISYM_MAX:
2641 op = MAX_EXPR;
2642 break;
2643 case GFC_ISYM_IAND:
2644 op = BIT_AND_EXPR;
2645 break;
2646 case GFC_ISYM_IOR:
2647 op = BIT_IOR_EXPR;
2648 break;
2649 case GFC_ISYM_IEOR:
2650 op = BIT_XOR_EXPR;
2651 break;
2652 default:
2653 gcc_unreachable ();
2655 e = expr2->value.function.actual->expr;
2656 gcc_assert (e->expr_type == EXPR_VARIABLE
2657 && e->symtree != NULL
2658 && e->symtree->n.sym == var);
2660 gfc_conv_expr (&rse, expr2->value.function.actual->next->expr);
2661 gfc_add_block_to_block (&block, &rse.pre);
2662 if (expr2->value.function.actual->next->next != NULL)
2664 tree accum = gfc_create_var (TREE_TYPE (rse.expr), NULL);
2665 gfc_actual_arglist *arg;
2667 gfc_add_modify (&block, accum, rse.expr);
2668 for (arg = expr2->value.function.actual->next->next; arg;
2669 arg = arg->next)
2671 gfc_init_block (&rse.pre);
2672 gfc_conv_expr (&rse, arg->expr);
2673 gfc_add_block_to_block (&block, &rse.pre);
2674 x = fold_build2_loc (input_location, op, TREE_TYPE (accum),
2675 accum, rse.expr);
2676 gfc_add_modify (&block, accum, x);
2679 rse.expr = accum;
2682 expr2 = expr2->value.function.actual->next->expr;
2685 lhsaddr = save_expr (lhsaddr);
2686 rhs = gfc_evaluate_now (rse.expr, &block);
2688 if (((atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_MASK)
2689 == GFC_OMP_ATOMIC_WRITE)
2690 || (atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_SWAP))
2691 x = rhs;
2692 else
2694 x = convert (TREE_TYPE (rhs),
2695 build_fold_indirect_ref_loc (input_location, lhsaddr));
2696 if (var_on_left)
2697 x = fold_build2_loc (input_location, op, TREE_TYPE (rhs), x, rhs);
2698 else
2699 x = fold_build2_loc (input_location, op, TREE_TYPE (rhs), rhs, x);
2702 if (TREE_CODE (TREE_TYPE (rhs)) == COMPLEX_TYPE
2703 && TREE_CODE (type) != COMPLEX_TYPE)
2704 x = fold_build1_loc (input_location, REALPART_EXPR,
2705 TREE_TYPE (TREE_TYPE (rhs)), x);
2707 gfc_add_block_to_block (&block, &lse.pre);
2708 gfc_add_block_to_block (&block, &rse.pre);
2710 if (aop == OMP_ATOMIC)
2712 x = build2_v (OMP_ATOMIC, lhsaddr, convert (type, x));
2713 OMP_ATOMIC_SEQ_CST (x) = seq_cst;
2714 gfc_add_expr_to_block (&block, x);
2716 else
2718 if (aop == OMP_ATOMIC_CAPTURE_NEW)
2720 code = code->next;
2721 expr2 = code->expr2;
2722 if (expr2->expr_type == EXPR_FUNCTION
2723 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
2724 expr2 = expr2->value.function.actual->expr;
2726 gcc_assert (expr2->expr_type == EXPR_VARIABLE);
2727 gfc_conv_expr (&vse, code->expr1);
2728 gfc_add_block_to_block (&block, &vse.pre);
2730 gfc_init_se (&lse, NULL);
2731 gfc_conv_expr (&lse, expr2);
2732 gfc_add_block_to_block (&block, &lse.pre);
2734 x = build2 (aop, type, lhsaddr, convert (type, x));
2735 OMP_ATOMIC_SEQ_CST (x) = seq_cst;
2736 x = convert (TREE_TYPE (vse.expr), x);
2737 gfc_add_modify (&block, vse.expr, x);
2740 return gfc_finish_block (&block);
2743 static tree
2744 gfc_trans_omp_barrier (void)
2746 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER);
2747 return build_call_expr_loc (input_location, decl, 0);
2750 static tree
2751 gfc_trans_omp_cancel (gfc_code *code)
2753 int mask = 0;
2754 tree ifc = boolean_true_node;
2755 stmtblock_t block;
2756 switch (code->ext.omp_clauses->cancel)
2758 case OMP_CANCEL_PARALLEL: mask = 1; break;
2759 case OMP_CANCEL_DO: mask = 2; break;
2760 case OMP_CANCEL_SECTIONS: mask = 4; break;
2761 case OMP_CANCEL_TASKGROUP: mask = 8; break;
2762 default: gcc_unreachable ();
2764 gfc_start_block (&block);
2765 if (code->ext.omp_clauses->if_expr)
2767 gfc_se se;
2768 tree if_var;
2770 gfc_init_se (&se, NULL);
2771 gfc_conv_expr (&se, code->ext.omp_clauses->if_expr);
2772 gfc_add_block_to_block (&block, &se.pre);
2773 if_var = gfc_evaluate_now (se.expr, &block);
2774 gfc_add_block_to_block (&block, &se.post);
2775 tree type = TREE_TYPE (if_var);
2776 ifc = fold_build2_loc (input_location, NE_EXPR,
2777 boolean_type_node, if_var,
2778 build_zero_cst (type));
2780 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
2781 tree c_bool_type = TREE_TYPE (TREE_TYPE (decl));
2782 ifc = fold_convert (c_bool_type, ifc);
2783 gfc_add_expr_to_block (&block,
2784 build_call_expr_loc (input_location, decl, 2,
2785 build_int_cst (integer_type_node,
2786 mask), ifc));
2787 return gfc_finish_block (&block);
2790 static tree
2791 gfc_trans_omp_cancellation_point (gfc_code *code)
2793 int mask = 0;
2794 switch (code->ext.omp_clauses->cancel)
2796 case OMP_CANCEL_PARALLEL: mask = 1; break;
2797 case OMP_CANCEL_DO: mask = 2; break;
2798 case OMP_CANCEL_SECTIONS: mask = 4; break;
2799 case OMP_CANCEL_TASKGROUP: mask = 8; break;
2800 default: gcc_unreachable ();
2802 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_CANCELLATION_POINT);
2803 return build_call_expr_loc (input_location, decl, 1,
2804 build_int_cst (integer_type_node, mask));
2807 static tree
2808 gfc_trans_omp_critical (gfc_code *code)
2810 tree name = NULL_TREE, stmt;
2811 if (code->ext.omp_name != NULL)
2812 name = get_identifier (code->ext.omp_name);
2813 stmt = gfc_trans_code (code->block->next);
2814 return build2_loc (input_location, OMP_CRITICAL, void_type_node, stmt, name);
2817 typedef struct dovar_init_d {
2818 tree var;
2819 tree init;
2820 } dovar_init;
2823 static tree
2824 gfc_trans_omp_do (gfc_code *code, gfc_exec_op op, stmtblock_t *pblock,
2825 gfc_omp_clauses *do_clauses, tree par_clauses)
2827 gfc_se se;
2828 tree dovar, stmt, from, to, step, type, init, cond, incr;
2829 tree count = NULL_TREE, cycle_label, tmp, omp_clauses;
2830 stmtblock_t block;
2831 stmtblock_t body;
2832 gfc_omp_clauses *clauses = code->ext.omp_clauses;
2833 int i, collapse = clauses->collapse;
2834 vec<dovar_init> inits = vNULL;
2835 dovar_init *di;
2836 unsigned ix;
2838 if (collapse <= 0)
2839 collapse = 1;
2841 code = code->block->next;
2842 gcc_assert (code->op == EXEC_DO);
2844 init = make_tree_vec (collapse);
2845 cond = make_tree_vec (collapse);
2846 incr = make_tree_vec (collapse);
2848 if (pblock == NULL)
2850 gfc_start_block (&block);
2851 pblock = &block;
2854 omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc);
2856 for (i = 0; i < collapse; i++)
2858 int simple = 0;
2859 int dovar_found = 0;
2860 tree dovar_decl;
2862 if (clauses)
2864 gfc_omp_namelist *n = NULL;
2865 if (op != EXEC_OMP_DISTRIBUTE)
2866 for (n = clauses->lists[(op == EXEC_OMP_SIMD && collapse == 1)
2867 ? OMP_LIST_LINEAR : OMP_LIST_LASTPRIVATE];
2868 n != NULL; n = n->next)
2869 if (code->ext.iterator->var->symtree->n.sym == n->sym)
2870 break;
2871 if (n != NULL)
2872 dovar_found = 1;
2873 else if (n == NULL && op != EXEC_OMP_SIMD)
2874 for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next)
2875 if (code->ext.iterator->var->symtree->n.sym == n->sym)
2876 break;
2877 if (n != NULL)
2878 dovar_found++;
2881 /* Evaluate all the expressions in the iterator. */
2882 gfc_init_se (&se, NULL);
2883 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
2884 gfc_add_block_to_block (pblock, &se.pre);
2885 dovar = se.expr;
2886 type = TREE_TYPE (dovar);
2887 gcc_assert (TREE_CODE (type) == INTEGER_TYPE);
2889 gfc_init_se (&se, NULL);
2890 gfc_conv_expr_val (&se, code->ext.iterator->start);
2891 gfc_add_block_to_block (pblock, &se.pre);
2892 from = gfc_evaluate_now (se.expr, pblock);
2894 gfc_init_se (&se, NULL);
2895 gfc_conv_expr_val (&se, code->ext.iterator->end);
2896 gfc_add_block_to_block (pblock, &se.pre);
2897 to = gfc_evaluate_now (se.expr, pblock);
2899 gfc_init_se (&se, NULL);
2900 gfc_conv_expr_val (&se, code->ext.iterator->step);
2901 gfc_add_block_to_block (pblock, &se.pre);
2902 step = gfc_evaluate_now (se.expr, pblock);
2903 dovar_decl = dovar;
2905 /* Special case simple loops. */
2906 if (TREE_CODE (dovar) == VAR_DECL)
2908 if (integer_onep (step))
2909 simple = 1;
2910 else if (tree_int_cst_equal (step, integer_minus_one_node))
2911 simple = -1;
2913 else
2914 dovar_decl
2915 = gfc_trans_omp_variable (code->ext.iterator->var->symtree->n.sym,
2916 false);
2918 /* Loop body. */
2919 if (simple)
2921 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, dovar, from);
2922 /* The condition should not be folded. */
2923 TREE_VEC_ELT (cond, i) = build2_loc (input_location, simple > 0
2924 ? LE_EXPR : GE_EXPR,
2925 boolean_type_node, dovar, to);
2926 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location, PLUS_EXPR,
2927 type, dovar, step);
2928 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location,
2929 MODIFY_EXPR,
2930 type, dovar,
2931 TREE_VEC_ELT (incr, i));
2933 else
2935 /* STEP is not 1 or -1. Use:
2936 for (count = 0; count < (to + step - from) / step; count++)
2938 dovar = from + count * step;
2939 body;
2940 cycle_label:;
2941 } */
2942 tmp = fold_build2_loc (input_location, MINUS_EXPR, type, step, from);
2943 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, to, tmp);
2944 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR, type, tmp,
2945 step);
2946 tmp = gfc_evaluate_now (tmp, pblock);
2947 count = gfc_create_var (type, "count");
2948 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, count,
2949 build_int_cst (type, 0));
2950 /* The condition should not be folded. */
2951 TREE_VEC_ELT (cond, i) = build2_loc (input_location, LT_EXPR,
2952 boolean_type_node,
2953 count, tmp);
2954 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location, PLUS_EXPR,
2955 type, count,
2956 build_int_cst (type, 1));
2957 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location,
2958 MODIFY_EXPR, type, count,
2959 TREE_VEC_ELT (incr, i));
2961 /* Initialize DOVAR. */
2962 tmp = fold_build2_loc (input_location, MULT_EXPR, type, count, step);
2963 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, from, tmp);
2964 dovar_init e = {dovar, tmp};
2965 inits.safe_push (e);
2968 if (!dovar_found)
2970 if (op == EXEC_OMP_SIMD)
2972 if (collapse == 1)
2974 tmp = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
2975 OMP_CLAUSE_LINEAR_STEP (tmp) = step;
2977 else
2978 tmp = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
2979 if (!simple)
2980 dovar_found = 2;
2982 else
2983 tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
2984 OMP_CLAUSE_DECL (tmp) = dovar_decl;
2985 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
2987 if (dovar_found == 2)
2989 tree c = NULL;
2991 tmp = NULL;
2992 if (!simple)
2994 /* If dovar is lastprivate, but different counter is used,
2995 dovar += step needs to be added to
2996 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
2997 will have the value on entry of the last loop, rather
2998 than value after iterator increment. */
2999 tmp = gfc_evaluate_now (step, pblock);
3000 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, dovar,
3001 tmp);
3002 tmp = fold_build2_loc (input_location, MODIFY_EXPR, type,
3003 dovar, tmp);
3004 for (c = omp_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3005 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
3006 && OMP_CLAUSE_DECL (c) == dovar_decl)
3008 OMP_CLAUSE_LASTPRIVATE_STMT (c) = tmp;
3009 break;
3011 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
3012 && OMP_CLAUSE_DECL (c) == dovar_decl)
3014 OMP_CLAUSE_LINEAR_STMT (c) = tmp;
3015 break;
3018 if (c == NULL && op == EXEC_OMP_DO && par_clauses != NULL)
3020 for (c = par_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3021 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
3022 && OMP_CLAUSE_DECL (c) == dovar_decl)
3024 tree l = build_omp_clause (input_location,
3025 OMP_CLAUSE_LASTPRIVATE);
3026 OMP_CLAUSE_DECL (l) = dovar_decl;
3027 OMP_CLAUSE_CHAIN (l) = omp_clauses;
3028 OMP_CLAUSE_LASTPRIVATE_STMT (l) = tmp;
3029 omp_clauses = l;
3030 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_SHARED);
3031 break;
3034 gcc_assert (simple || c != NULL);
3036 if (!simple)
3038 if (op != EXEC_OMP_SIMD)
3039 tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
3040 else if (collapse == 1)
3042 tmp = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
3043 OMP_CLAUSE_LINEAR_STEP (tmp) = step;
3044 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp) = 1;
3045 OMP_CLAUSE_LINEAR_NO_COPYOUT (tmp) = 1;
3047 else
3048 tmp = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
3049 OMP_CLAUSE_DECL (tmp) = count;
3050 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
3053 if (i + 1 < collapse)
3054 code = code->block->next;
3057 if (pblock != &block)
3059 pushlevel ();
3060 gfc_start_block (&block);
3063 gfc_start_block (&body);
3065 FOR_EACH_VEC_ELT (inits, ix, di)
3066 gfc_add_modify (&body, di->var, di->init);
3067 inits.release ();
3069 /* Cycle statement is implemented with a goto. Exit statement must not be
3070 present for this loop. */
3071 cycle_label = gfc_build_label_decl (NULL_TREE);
3073 /* Put these labels where they can be found later. */
3075 code->cycle_label = cycle_label;
3076 code->exit_label = NULL_TREE;
3078 /* Main loop body. */
3079 tmp = gfc_trans_omp_code (code->block->next, true);
3080 gfc_add_expr_to_block (&body, tmp);
3082 /* Label for cycle statements (if needed). */
3083 if (TREE_USED (cycle_label))
3085 tmp = build1_v (LABEL_EXPR, cycle_label);
3086 gfc_add_expr_to_block (&body, tmp);
3089 /* End of loop body. */
3090 switch (op)
3092 case EXEC_OMP_SIMD: stmt = make_node (OMP_SIMD); break;
3093 case EXEC_OMP_DO: stmt = make_node (OMP_FOR); break;
3094 case EXEC_OMP_DISTRIBUTE: stmt = make_node (OMP_DISTRIBUTE); break;
3095 default: gcc_unreachable ();
3098 TREE_TYPE (stmt) = void_type_node;
3099 OMP_FOR_BODY (stmt) = gfc_finish_block (&body);
3100 OMP_FOR_CLAUSES (stmt) = omp_clauses;
3101 OMP_FOR_INIT (stmt) = init;
3102 OMP_FOR_COND (stmt) = cond;
3103 OMP_FOR_INCR (stmt) = incr;
3104 gfc_add_expr_to_block (&block, stmt);
3106 return gfc_finish_block (&block);
3109 static tree
3110 gfc_trans_omp_flush (void)
3112 tree decl = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
3113 return build_call_expr_loc (input_location, decl, 0);
3116 static tree
3117 gfc_trans_omp_master (gfc_code *code)
3119 tree stmt = gfc_trans_code (code->block->next);
3120 if (IS_EMPTY_STMT (stmt))
3121 return stmt;
3122 return build1_v (OMP_MASTER, stmt);
3125 static tree
3126 gfc_trans_omp_ordered (gfc_code *code)
3128 return build1_v (OMP_ORDERED, gfc_trans_code (code->block->next));
3131 static tree
3132 gfc_trans_omp_parallel (gfc_code *code)
3134 stmtblock_t block;
3135 tree stmt, omp_clauses;
3137 gfc_start_block (&block);
3138 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
3139 code->loc);
3140 stmt = gfc_trans_omp_code (code->block->next, true);
3141 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
3142 omp_clauses);
3143 gfc_add_expr_to_block (&block, stmt);
3144 return gfc_finish_block (&block);
3147 enum
3149 GFC_OMP_SPLIT_SIMD,
3150 GFC_OMP_SPLIT_DO,
3151 GFC_OMP_SPLIT_PARALLEL,
3152 GFC_OMP_SPLIT_DISTRIBUTE,
3153 GFC_OMP_SPLIT_TEAMS,
3154 GFC_OMP_SPLIT_TARGET,
3155 GFC_OMP_SPLIT_NUM
3158 enum
3160 GFC_OMP_MASK_SIMD = (1 << GFC_OMP_SPLIT_SIMD),
3161 GFC_OMP_MASK_DO = (1 << GFC_OMP_SPLIT_DO),
3162 GFC_OMP_MASK_PARALLEL = (1 << GFC_OMP_SPLIT_PARALLEL),
3163 GFC_OMP_MASK_DISTRIBUTE = (1 << GFC_OMP_SPLIT_DISTRIBUTE),
3164 GFC_OMP_MASK_TEAMS = (1 << GFC_OMP_SPLIT_TEAMS),
3165 GFC_OMP_MASK_TARGET = (1 << GFC_OMP_SPLIT_TARGET)
3168 static void
3169 gfc_split_omp_clauses (gfc_code *code,
3170 gfc_omp_clauses clausesa[GFC_OMP_SPLIT_NUM])
3172 int mask = 0, innermost = 0;
3173 memset (clausesa, 0, GFC_OMP_SPLIT_NUM * sizeof (gfc_omp_clauses));
3174 switch (code->op)
3176 case EXEC_OMP_DISTRIBUTE:
3177 innermost = GFC_OMP_SPLIT_DISTRIBUTE;
3178 break;
3179 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO:
3180 mask = GFC_OMP_MASK_DISTRIBUTE | GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO;
3181 innermost = GFC_OMP_SPLIT_DO;
3182 break;
3183 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD:
3184 mask = GFC_OMP_MASK_DISTRIBUTE | GFC_OMP_MASK_PARALLEL
3185 | GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
3186 innermost = GFC_OMP_SPLIT_SIMD;
3187 break;
3188 case EXEC_OMP_DISTRIBUTE_SIMD:
3189 mask = GFC_OMP_MASK_DISTRIBUTE | GFC_OMP_MASK_SIMD;
3190 innermost = GFC_OMP_SPLIT_SIMD;
3191 break;
3192 case EXEC_OMP_DO:
3193 innermost = GFC_OMP_SPLIT_DO;
3194 break;
3195 case EXEC_OMP_DO_SIMD:
3196 mask = GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
3197 innermost = GFC_OMP_SPLIT_SIMD;
3198 break;
3199 case EXEC_OMP_PARALLEL:
3200 innermost = GFC_OMP_SPLIT_PARALLEL;
3201 break;
3202 case EXEC_OMP_PARALLEL_DO:
3203 mask = GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO;
3204 innermost = GFC_OMP_SPLIT_DO;
3205 break;
3206 case EXEC_OMP_PARALLEL_DO_SIMD:
3207 mask = GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
3208 innermost = GFC_OMP_SPLIT_SIMD;
3209 break;
3210 case EXEC_OMP_SIMD:
3211 innermost = GFC_OMP_SPLIT_SIMD;
3212 break;
3213 case EXEC_OMP_TARGET:
3214 innermost = GFC_OMP_SPLIT_TARGET;
3215 break;
3216 case EXEC_OMP_TARGET_TEAMS:
3217 mask = GFC_OMP_MASK_TARGET | GFC_OMP_MASK_TEAMS;
3218 innermost = GFC_OMP_SPLIT_TEAMS;
3219 break;
3220 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE:
3221 mask = GFC_OMP_MASK_TARGET | GFC_OMP_MASK_TEAMS
3222 | GFC_OMP_MASK_DISTRIBUTE;
3223 innermost = GFC_OMP_SPLIT_DISTRIBUTE;
3224 break;
3225 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO:
3226 mask = GFC_OMP_MASK_TARGET | GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE
3227 | GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO;
3228 innermost = GFC_OMP_SPLIT_DO;
3229 break;
3230 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
3231 mask = GFC_OMP_MASK_TARGET | GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE
3232 | GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
3233 innermost = GFC_OMP_SPLIT_SIMD;
3234 break;
3235 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD:
3236 mask = GFC_OMP_MASK_TARGET | GFC_OMP_MASK_TEAMS
3237 | GFC_OMP_MASK_DISTRIBUTE | GFC_OMP_MASK_SIMD;
3238 innermost = GFC_OMP_SPLIT_SIMD;
3239 break;
3240 case EXEC_OMP_TEAMS:
3241 innermost = GFC_OMP_SPLIT_TEAMS;
3242 break;
3243 case EXEC_OMP_TEAMS_DISTRIBUTE:
3244 mask = GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE;
3245 innermost = GFC_OMP_SPLIT_DISTRIBUTE;
3246 break;
3247 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO:
3248 mask = GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE
3249 | GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO;
3250 innermost = GFC_OMP_SPLIT_DO;
3251 break;
3252 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
3253 mask = GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE
3254 | GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
3255 innermost = GFC_OMP_SPLIT_SIMD;
3256 break;
3257 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD:
3258 mask = GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE | GFC_OMP_MASK_SIMD;
3259 innermost = GFC_OMP_SPLIT_SIMD;
3260 break;
3261 default:
3262 gcc_unreachable ();
3264 if (mask == 0)
3266 clausesa[innermost] = *code->ext.omp_clauses;
3267 return;
3269 if (code->ext.omp_clauses != NULL)
3271 if (mask & GFC_OMP_MASK_TARGET)
3273 /* First the clauses that are unique to some constructs. */
3274 clausesa[GFC_OMP_SPLIT_TARGET].lists[OMP_LIST_MAP]
3275 = code->ext.omp_clauses->lists[OMP_LIST_MAP];
3276 clausesa[GFC_OMP_SPLIT_TARGET].device
3277 = code->ext.omp_clauses->device;
3279 if (mask & GFC_OMP_MASK_TEAMS)
3281 /* First the clauses that are unique to some constructs. */
3282 clausesa[GFC_OMP_SPLIT_TEAMS].num_teams
3283 = code->ext.omp_clauses->num_teams;
3284 clausesa[GFC_OMP_SPLIT_TEAMS].thread_limit
3285 = code->ext.omp_clauses->thread_limit;
3286 /* Shared and default clauses are allowed on parallel and teams. */
3287 clausesa[GFC_OMP_SPLIT_TEAMS].lists[OMP_LIST_SHARED]
3288 = code->ext.omp_clauses->lists[OMP_LIST_SHARED];
3289 clausesa[GFC_OMP_SPLIT_TEAMS].default_sharing
3290 = code->ext.omp_clauses->default_sharing;
3292 if (mask & GFC_OMP_MASK_DISTRIBUTE)
3294 /* First the clauses that are unique to some constructs. */
3295 clausesa[GFC_OMP_SPLIT_DISTRIBUTE].dist_sched_kind
3296 = code->ext.omp_clauses->dist_sched_kind;
3297 clausesa[GFC_OMP_SPLIT_DISTRIBUTE].dist_chunk_size
3298 = code->ext.omp_clauses->dist_chunk_size;
3299 /* Duplicate collapse. */
3300 clausesa[GFC_OMP_SPLIT_DISTRIBUTE].collapse
3301 = code->ext.omp_clauses->collapse;
3303 if (mask & GFC_OMP_MASK_PARALLEL)
3305 /* First the clauses that are unique to some constructs. */
3306 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_COPYIN]
3307 = code->ext.omp_clauses->lists[OMP_LIST_COPYIN];
3308 clausesa[GFC_OMP_SPLIT_PARALLEL].num_threads
3309 = code->ext.omp_clauses->num_threads;
3310 clausesa[GFC_OMP_SPLIT_PARALLEL].proc_bind
3311 = code->ext.omp_clauses->proc_bind;
3312 /* Shared and default clauses are allowed on parallel and teams. */
3313 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_SHARED]
3314 = code->ext.omp_clauses->lists[OMP_LIST_SHARED];
3315 clausesa[GFC_OMP_SPLIT_PARALLEL].default_sharing
3316 = code->ext.omp_clauses->default_sharing;
3318 if (mask & GFC_OMP_MASK_DO)
3320 /* First the clauses that are unique to some constructs. */
3321 clausesa[GFC_OMP_SPLIT_DO].ordered
3322 = code->ext.omp_clauses->ordered;
3323 clausesa[GFC_OMP_SPLIT_DO].sched_kind
3324 = code->ext.omp_clauses->sched_kind;
3325 clausesa[GFC_OMP_SPLIT_DO].chunk_size
3326 = code->ext.omp_clauses->chunk_size;
3327 clausesa[GFC_OMP_SPLIT_DO].nowait
3328 = code->ext.omp_clauses->nowait;
3329 /* Duplicate collapse. */
3330 clausesa[GFC_OMP_SPLIT_DO].collapse
3331 = code->ext.omp_clauses->collapse;
3333 if (mask & GFC_OMP_MASK_SIMD)
3335 clausesa[GFC_OMP_SPLIT_SIMD].safelen_expr
3336 = code->ext.omp_clauses->safelen_expr;
3337 clausesa[GFC_OMP_SPLIT_SIMD].lists[OMP_LIST_LINEAR]
3338 = code->ext.omp_clauses->lists[OMP_LIST_LINEAR];
3339 clausesa[GFC_OMP_SPLIT_SIMD].lists[OMP_LIST_ALIGNED]
3340 = code->ext.omp_clauses->lists[OMP_LIST_ALIGNED];
3341 /* Duplicate collapse. */
3342 clausesa[GFC_OMP_SPLIT_SIMD].collapse
3343 = code->ext.omp_clauses->collapse;
3345 /* Private clause is supported on all constructs but target,
3346 it is enough to put it on the innermost one. For
3347 !$ omp do put it on parallel though,
3348 as that's what we did for OpenMP 3.1. */
3349 clausesa[innermost == GFC_OMP_SPLIT_DO
3350 ? (int) GFC_OMP_SPLIT_PARALLEL
3351 : innermost].lists[OMP_LIST_PRIVATE]
3352 = code->ext.omp_clauses->lists[OMP_LIST_PRIVATE];
3353 /* Firstprivate clause is supported on all constructs but
3354 target and simd. Put it on the outermost of those and
3355 duplicate on parallel. */
3356 if (mask & GFC_OMP_MASK_TEAMS)
3357 clausesa[GFC_OMP_SPLIT_TEAMS].lists[OMP_LIST_FIRSTPRIVATE]
3358 = code->ext.omp_clauses->lists[OMP_LIST_FIRSTPRIVATE];
3359 else if (mask & GFC_OMP_MASK_DISTRIBUTE)
3360 clausesa[GFC_OMP_SPLIT_DISTRIBUTE].lists[OMP_LIST_FIRSTPRIVATE]
3361 = code->ext.omp_clauses->lists[OMP_LIST_FIRSTPRIVATE];
3362 if (mask & GFC_OMP_MASK_PARALLEL)
3363 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_FIRSTPRIVATE]
3364 = code->ext.omp_clauses->lists[OMP_LIST_FIRSTPRIVATE];
3365 else if (mask & GFC_OMP_MASK_DO)
3366 clausesa[GFC_OMP_SPLIT_DO].lists[OMP_LIST_FIRSTPRIVATE]
3367 = code->ext.omp_clauses->lists[OMP_LIST_FIRSTPRIVATE];
3368 /* Lastprivate is allowed on do and simd. In
3369 parallel do{, simd} we actually want to put it on
3370 parallel rather than do. */
3371 if (mask & GFC_OMP_MASK_PARALLEL)
3372 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_LASTPRIVATE]
3373 = code->ext.omp_clauses->lists[OMP_LIST_LASTPRIVATE];
3374 else if (mask & GFC_OMP_MASK_DO)
3375 clausesa[GFC_OMP_SPLIT_DO].lists[OMP_LIST_LASTPRIVATE]
3376 = code->ext.omp_clauses->lists[OMP_LIST_LASTPRIVATE];
3377 if (mask & GFC_OMP_MASK_SIMD)
3378 clausesa[GFC_OMP_SPLIT_SIMD].lists[OMP_LIST_LASTPRIVATE]
3379 = code->ext.omp_clauses->lists[OMP_LIST_LASTPRIVATE];
3380 /* Reduction is allowed on simd, do, parallel and teams.
3381 Duplicate it on all of them, but omit on do if
3382 parallel is present. */
3383 if (mask & GFC_OMP_MASK_TEAMS)
3384 clausesa[GFC_OMP_SPLIT_TEAMS].lists[OMP_LIST_REDUCTION]
3385 = code->ext.omp_clauses->lists[OMP_LIST_REDUCTION];
3386 if (mask & GFC_OMP_MASK_PARALLEL)
3387 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_REDUCTION]
3388 = code->ext.omp_clauses->lists[OMP_LIST_REDUCTION];
3389 else if (mask & GFC_OMP_MASK_DO)
3390 clausesa[GFC_OMP_SPLIT_DO].lists[OMP_LIST_REDUCTION]
3391 = code->ext.omp_clauses->lists[OMP_LIST_REDUCTION];
3392 if (mask & GFC_OMP_MASK_SIMD)
3393 clausesa[GFC_OMP_SPLIT_SIMD].lists[OMP_LIST_REDUCTION]
3394 = code->ext.omp_clauses->lists[OMP_LIST_REDUCTION];
3395 /* FIXME: This is currently being discussed. */
3396 if (mask & GFC_OMP_MASK_PARALLEL)
3397 clausesa[GFC_OMP_SPLIT_PARALLEL].if_expr
3398 = code->ext.omp_clauses->if_expr;
3399 else
3400 clausesa[GFC_OMP_SPLIT_TARGET].if_expr
3401 = code->ext.omp_clauses->if_expr;
3403 if ((mask & (GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO))
3404 == (GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO))
3405 clausesa[GFC_OMP_SPLIT_DO].nowait = true;
3408 static tree
3409 gfc_trans_omp_do_simd (gfc_code *code, stmtblock_t *pblock,
3410 gfc_omp_clauses *clausesa, tree omp_clauses)
3412 stmtblock_t block;
3413 gfc_omp_clauses clausesa_buf[GFC_OMP_SPLIT_NUM];
3414 tree stmt, body, omp_do_clauses = NULL_TREE;
3416 if (pblock == NULL)
3417 gfc_start_block (&block);
3418 else
3419 gfc_init_block (&block);
3421 if (clausesa == NULL)
3423 clausesa = clausesa_buf;
3424 gfc_split_omp_clauses (code, clausesa);
3426 if (gfc_option.gfc_flag_openmp)
3427 omp_do_clauses
3428 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_DO], code->loc);
3429 body = gfc_trans_omp_do (code, EXEC_OMP_SIMD, pblock ? pblock : &block,
3430 &clausesa[GFC_OMP_SPLIT_SIMD], omp_clauses);
3431 if (pblock == NULL)
3433 if (TREE_CODE (body) != BIND_EXPR)
3434 body = build3_v (BIND_EXPR, NULL, body, poplevel (1, 0));
3435 else
3436 poplevel (0, 0);
3438 else if (TREE_CODE (body) != BIND_EXPR)
3439 body = build3_v (BIND_EXPR, NULL, body, NULL_TREE);
3440 if (gfc_option.gfc_flag_openmp)
3442 stmt = make_node (OMP_FOR);
3443 TREE_TYPE (stmt) = void_type_node;
3444 OMP_FOR_BODY (stmt) = body;
3445 OMP_FOR_CLAUSES (stmt) = omp_do_clauses;
3447 else
3448 stmt = body;
3449 gfc_add_expr_to_block (&block, stmt);
3450 return gfc_finish_block (&block);
3453 static tree
3454 gfc_trans_omp_parallel_do (gfc_code *code, stmtblock_t *pblock,
3455 gfc_omp_clauses *clausesa)
3457 stmtblock_t block, *new_pblock = pblock;
3458 gfc_omp_clauses clausesa_buf[GFC_OMP_SPLIT_NUM];
3459 tree stmt, omp_clauses = NULL_TREE;
3461 if (pblock == NULL)
3462 gfc_start_block (&block);
3463 else
3464 gfc_init_block (&block);
3466 if (clausesa == NULL)
3468 clausesa = clausesa_buf;
3469 gfc_split_omp_clauses (code, clausesa);
3471 omp_clauses
3472 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_PARALLEL],
3473 code->loc);
3474 if (pblock == NULL)
3476 if (!clausesa[GFC_OMP_SPLIT_DO].ordered
3477 && clausesa[GFC_OMP_SPLIT_DO].sched_kind != OMP_SCHED_STATIC)
3478 new_pblock = &block;
3479 else
3480 pushlevel ();
3482 stmt = gfc_trans_omp_do (code, EXEC_OMP_DO, new_pblock,
3483 &clausesa[GFC_OMP_SPLIT_DO], omp_clauses);
3484 if (pblock == NULL)
3486 if (TREE_CODE (stmt) != BIND_EXPR)
3487 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3488 else
3489 poplevel (0, 0);
3491 else if (TREE_CODE (stmt) != BIND_EXPR)
3492 stmt = build3_v (BIND_EXPR, NULL, stmt, NULL_TREE);
3493 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
3494 omp_clauses);
3495 OMP_PARALLEL_COMBINED (stmt) = 1;
3496 gfc_add_expr_to_block (&block, stmt);
3497 return gfc_finish_block (&block);
3500 static tree
3501 gfc_trans_omp_parallel_do_simd (gfc_code *code, stmtblock_t *pblock,
3502 gfc_omp_clauses *clausesa)
3504 stmtblock_t block;
3505 gfc_omp_clauses clausesa_buf[GFC_OMP_SPLIT_NUM];
3506 tree stmt, omp_clauses = NULL_TREE;
3508 if (pblock == NULL)
3509 gfc_start_block (&block);
3510 else
3511 gfc_init_block (&block);
3513 if (clausesa == NULL)
3515 clausesa = clausesa_buf;
3516 gfc_split_omp_clauses (code, clausesa);
3518 if (gfc_option.gfc_flag_openmp)
3519 omp_clauses
3520 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_PARALLEL],
3521 code->loc);
3522 if (pblock == NULL)
3523 pushlevel ();
3524 stmt = gfc_trans_omp_do_simd (code, pblock, clausesa, omp_clauses);
3525 if (pblock == NULL)
3527 if (TREE_CODE (stmt) != BIND_EXPR)
3528 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3529 else
3530 poplevel (0, 0);
3532 else if (TREE_CODE (stmt) != BIND_EXPR)
3533 stmt = build3_v (BIND_EXPR, NULL, stmt, NULL_TREE);
3534 if (gfc_option.gfc_flag_openmp)
3536 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
3537 omp_clauses);
3538 OMP_PARALLEL_COMBINED (stmt) = 1;
3540 gfc_add_expr_to_block (&block, stmt);
3541 return gfc_finish_block (&block);
3544 static tree
3545 gfc_trans_omp_parallel_sections (gfc_code *code)
3547 stmtblock_t block;
3548 gfc_omp_clauses section_clauses;
3549 tree stmt, omp_clauses;
3551 memset (&section_clauses, 0, sizeof (section_clauses));
3552 section_clauses.nowait = true;
3554 gfc_start_block (&block);
3555 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
3556 code->loc);
3557 pushlevel ();
3558 stmt = gfc_trans_omp_sections (code, &section_clauses);
3559 if (TREE_CODE (stmt) != BIND_EXPR)
3560 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3561 else
3562 poplevel (0, 0);
3563 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
3564 omp_clauses);
3565 OMP_PARALLEL_COMBINED (stmt) = 1;
3566 gfc_add_expr_to_block (&block, stmt);
3567 return gfc_finish_block (&block);
3570 static tree
3571 gfc_trans_omp_parallel_workshare (gfc_code *code)
3573 stmtblock_t block;
3574 gfc_omp_clauses workshare_clauses;
3575 tree stmt, omp_clauses;
3577 memset (&workshare_clauses, 0, sizeof (workshare_clauses));
3578 workshare_clauses.nowait = true;
3580 gfc_start_block (&block);
3581 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
3582 code->loc);
3583 pushlevel ();
3584 stmt = gfc_trans_omp_workshare (code, &workshare_clauses);
3585 if (TREE_CODE (stmt) != BIND_EXPR)
3586 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3587 else
3588 poplevel (0, 0);
3589 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
3590 omp_clauses);
3591 OMP_PARALLEL_COMBINED (stmt) = 1;
3592 gfc_add_expr_to_block (&block, stmt);
3593 return gfc_finish_block (&block);
3596 static tree
3597 gfc_trans_omp_sections (gfc_code *code, gfc_omp_clauses *clauses)
3599 stmtblock_t block, body;
3600 tree omp_clauses, stmt;
3601 bool has_lastprivate = clauses->lists[OMP_LIST_LASTPRIVATE] != NULL;
3603 gfc_start_block (&block);
3605 omp_clauses = gfc_trans_omp_clauses (&block, clauses, code->loc);
3607 gfc_init_block (&body);
3608 for (code = code->block; code; code = code->block)
3610 /* Last section is special because of lastprivate, so even if it
3611 is empty, chain it in. */
3612 stmt = gfc_trans_omp_code (code->next,
3613 has_lastprivate && code->block == NULL);
3614 if (! IS_EMPTY_STMT (stmt))
3616 stmt = build1_v (OMP_SECTION, stmt);
3617 gfc_add_expr_to_block (&body, stmt);
3620 stmt = gfc_finish_block (&body);
3622 stmt = build2_loc (input_location, OMP_SECTIONS, void_type_node, stmt,
3623 omp_clauses);
3624 gfc_add_expr_to_block (&block, stmt);
3626 return gfc_finish_block (&block);
3629 static tree
3630 gfc_trans_omp_single (gfc_code *code, gfc_omp_clauses *clauses)
3632 tree omp_clauses = gfc_trans_omp_clauses (NULL, clauses, code->loc);
3633 tree stmt = gfc_trans_omp_code (code->block->next, true);
3634 stmt = build2_loc (input_location, OMP_SINGLE, void_type_node, stmt,
3635 omp_clauses);
3636 return stmt;
3639 static tree
3640 gfc_trans_omp_task (gfc_code *code)
3642 stmtblock_t block;
3643 tree stmt, omp_clauses;
3645 gfc_start_block (&block);
3646 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
3647 code->loc);
3648 stmt = gfc_trans_omp_code (code->block->next, true);
3649 stmt = build2_loc (input_location, OMP_TASK, void_type_node, stmt,
3650 omp_clauses);
3651 gfc_add_expr_to_block (&block, stmt);
3652 return gfc_finish_block (&block);
3655 static tree
3656 gfc_trans_omp_taskgroup (gfc_code *code)
3658 tree stmt = gfc_trans_code (code->block->next);
3659 return build1_loc (input_location, OMP_TASKGROUP, void_type_node, stmt);
3662 static tree
3663 gfc_trans_omp_taskwait (void)
3665 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT);
3666 return build_call_expr_loc (input_location, decl, 0);
3669 static tree
3670 gfc_trans_omp_taskyield (void)
3672 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD);
3673 return build_call_expr_loc (input_location, decl, 0);
3676 static tree
3677 gfc_trans_omp_distribute (gfc_code *code, gfc_omp_clauses *clausesa)
3679 stmtblock_t block;
3680 gfc_omp_clauses clausesa_buf[GFC_OMP_SPLIT_NUM];
3681 tree stmt, omp_clauses = NULL_TREE;
3683 gfc_start_block (&block);
3684 if (clausesa == NULL)
3686 clausesa = clausesa_buf;
3687 gfc_split_omp_clauses (code, clausesa);
3689 if (gfc_option.gfc_flag_openmp)
3690 omp_clauses
3691 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_DISTRIBUTE],
3692 code->loc);
3693 switch (code->op)
3695 case EXEC_OMP_DISTRIBUTE:
3696 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE:
3697 case EXEC_OMP_TEAMS_DISTRIBUTE:
3698 /* This is handled in gfc_trans_omp_do. */
3699 gcc_unreachable ();
3700 break;
3701 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO:
3702 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO:
3703 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO:
3704 stmt = gfc_trans_omp_parallel_do (code, &block, clausesa);
3705 if (TREE_CODE (stmt) != BIND_EXPR)
3706 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3707 else
3708 poplevel (0, 0);
3709 break;
3710 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD:
3711 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
3712 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
3713 stmt = gfc_trans_omp_parallel_do_simd (code, &block, clausesa);
3714 if (TREE_CODE (stmt) != BIND_EXPR)
3715 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3716 else
3717 poplevel (0, 0);
3718 break;
3719 case EXEC_OMP_DISTRIBUTE_SIMD:
3720 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD:
3721 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD:
3722 stmt = gfc_trans_omp_do (code, EXEC_OMP_SIMD, &block,
3723 &clausesa[GFC_OMP_SPLIT_SIMD], NULL_TREE);
3724 if (TREE_CODE (stmt) != BIND_EXPR)
3725 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3726 else
3727 poplevel (0, 0);
3728 break;
3729 default:
3730 gcc_unreachable ();
3732 if (gfc_option.gfc_flag_openmp)
3734 tree distribute = make_node (OMP_DISTRIBUTE);
3735 TREE_TYPE (distribute) = void_type_node;
3736 OMP_FOR_BODY (distribute) = stmt;
3737 OMP_FOR_CLAUSES (distribute) = omp_clauses;
3738 stmt = distribute;
3740 gfc_add_expr_to_block (&block, stmt);
3741 return gfc_finish_block (&block);
3744 static tree
3745 gfc_trans_omp_teams (gfc_code *code, gfc_omp_clauses *clausesa)
3747 stmtblock_t block;
3748 gfc_omp_clauses clausesa_buf[GFC_OMP_SPLIT_NUM];
3749 tree stmt, omp_clauses = NULL_TREE;
3751 gfc_start_block (&block);
3752 if (clausesa == NULL)
3754 clausesa = clausesa_buf;
3755 gfc_split_omp_clauses (code, clausesa);
3757 if (gfc_option.gfc_flag_openmp)
3758 omp_clauses
3759 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_TEAMS],
3760 code->loc);
3761 switch (code->op)
3763 case EXEC_OMP_TARGET_TEAMS:
3764 case EXEC_OMP_TEAMS:
3765 stmt = gfc_trans_omp_code (code->block->next, true);
3766 break;
3767 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE:
3768 case EXEC_OMP_TEAMS_DISTRIBUTE:
3769 stmt = gfc_trans_omp_do (code, EXEC_OMP_DISTRIBUTE, NULL,
3770 &clausesa[GFC_OMP_SPLIT_DISTRIBUTE],
3771 NULL);
3772 break;
3773 default:
3774 stmt = gfc_trans_omp_distribute (code, clausesa);
3775 break;
3777 stmt = build2_loc (input_location, OMP_TEAMS, void_type_node, stmt,
3778 omp_clauses);
3779 gfc_add_expr_to_block (&block, stmt);
3780 return gfc_finish_block (&block);
3783 static tree
3784 gfc_trans_omp_target (gfc_code *code)
3786 stmtblock_t block;
3787 gfc_omp_clauses clausesa[GFC_OMP_SPLIT_NUM];
3788 tree stmt, omp_clauses = NULL_TREE;
3790 gfc_start_block (&block);
3791 gfc_split_omp_clauses (code, clausesa);
3792 if (gfc_option.gfc_flag_openmp)
3793 omp_clauses
3794 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_TARGET],
3795 code->loc);
3796 if (code->op == EXEC_OMP_TARGET)
3797 stmt = gfc_trans_omp_code (code->block->next, true);
3798 else
3799 stmt = gfc_trans_omp_teams (code, clausesa);
3800 if (TREE_CODE (stmt) != BIND_EXPR)
3801 stmt = build3_v (BIND_EXPR, NULL, stmt, NULL_TREE);
3802 if (gfc_option.gfc_flag_openmp)
3803 stmt = build2_loc (input_location, OMP_TARGET, void_type_node, stmt,
3804 omp_clauses);
3805 gfc_add_expr_to_block (&block, stmt);
3806 return gfc_finish_block (&block);
3809 static tree
3810 gfc_trans_omp_target_data (gfc_code *code)
3812 stmtblock_t block;
3813 tree stmt, omp_clauses;
3815 gfc_start_block (&block);
3816 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
3817 code->loc);
3818 stmt = gfc_trans_omp_code (code->block->next, true);
3819 stmt = build2_loc (input_location, OMP_TARGET_DATA, void_type_node, stmt,
3820 omp_clauses);
3821 gfc_add_expr_to_block (&block, stmt);
3822 return gfc_finish_block (&block);
3825 static tree
3826 gfc_trans_omp_target_update (gfc_code *code)
3828 stmtblock_t block;
3829 tree stmt, omp_clauses;
3831 gfc_start_block (&block);
3832 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
3833 code->loc);
3834 stmt = build1_loc (input_location, OMP_TARGET_UPDATE, void_type_node,
3835 omp_clauses);
3836 gfc_add_expr_to_block (&block, stmt);
3837 return gfc_finish_block (&block);
3840 static tree
3841 gfc_trans_omp_workshare (gfc_code *code, gfc_omp_clauses *clauses)
3843 tree res, tmp, stmt;
3844 stmtblock_t block, *pblock = NULL;
3845 stmtblock_t singleblock;
3846 int saved_ompws_flags;
3847 bool singleblock_in_progress = false;
3848 /* True if previous gfc_code in workshare construct is not workshared. */
3849 bool prev_singleunit;
3851 code = code->block->next;
3853 pushlevel ();
3855 gfc_start_block (&block);
3856 pblock = &block;
3858 ompws_flags = OMPWS_WORKSHARE_FLAG;
3859 prev_singleunit = false;
3861 /* Translate statements one by one to trees until we reach
3862 the end of the workshare construct. Adjacent gfc_codes that
3863 are a single unit of work are clustered and encapsulated in a
3864 single OMP_SINGLE construct. */
3865 for (; code; code = code->next)
3867 if (code->here != 0)
3869 res = gfc_trans_label_here (code);
3870 gfc_add_expr_to_block (pblock, res);
3873 /* No dependence analysis, use for clauses with wait.
3874 If this is the last gfc_code, use default omp_clauses. */
3875 if (code->next == NULL && clauses->nowait)
3876 ompws_flags |= OMPWS_NOWAIT;
3878 /* By default, every gfc_code is a single unit of work. */
3879 ompws_flags |= OMPWS_CURR_SINGLEUNIT;
3880 ompws_flags &= ~OMPWS_SCALARIZER_WS;
3882 switch (code->op)
3884 case EXEC_NOP:
3885 res = NULL_TREE;
3886 break;
3888 case EXEC_ASSIGN:
3889 res = gfc_trans_assign (code);
3890 break;
3892 case EXEC_POINTER_ASSIGN:
3893 res = gfc_trans_pointer_assign (code);
3894 break;
3896 case EXEC_INIT_ASSIGN:
3897 res = gfc_trans_init_assign (code);
3898 break;
3900 case EXEC_FORALL:
3901 res = gfc_trans_forall (code);
3902 break;
3904 case EXEC_WHERE:
3905 res = gfc_trans_where (code);
3906 break;
3908 case EXEC_OMP_ATOMIC:
3909 res = gfc_trans_omp_directive (code);
3910 break;
3912 case EXEC_OMP_PARALLEL:
3913 case EXEC_OMP_PARALLEL_DO:
3914 case EXEC_OMP_PARALLEL_SECTIONS:
3915 case EXEC_OMP_PARALLEL_WORKSHARE:
3916 case EXEC_OMP_CRITICAL:
3917 saved_ompws_flags = ompws_flags;
3918 ompws_flags = 0;
3919 res = gfc_trans_omp_directive (code);
3920 ompws_flags = saved_ompws_flags;
3921 break;
3923 default:
3924 internal_error ("gfc_trans_omp_workshare(): Bad statement code");
3927 gfc_set_backend_locus (&code->loc);
3929 if (res != NULL_TREE && ! IS_EMPTY_STMT (res))
3931 if (prev_singleunit)
3933 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
3934 /* Add current gfc_code to single block. */
3935 gfc_add_expr_to_block (&singleblock, res);
3936 else
3938 /* Finish single block and add it to pblock. */
3939 tmp = gfc_finish_block (&singleblock);
3940 tmp = build2_loc (input_location, OMP_SINGLE,
3941 void_type_node, tmp, NULL_TREE);
3942 gfc_add_expr_to_block (pblock, tmp);
3943 /* Add current gfc_code to pblock. */
3944 gfc_add_expr_to_block (pblock, res);
3945 singleblock_in_progress = false;
3948 else
3950 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
3952 /* Start single block. */
3953 gfc_init_block (&singleblock);
3954 gfc_add_expr_to_block (&singleblock, res);
3955 singleblock_in_progress = true;
3957 else
3958 /* Add the new statement to the block. */
3959 gfc_add_expr_to_block (pblock, res);
3961 prev_singleunit = (ompws_flags & OMPWS_CURR_SINGLEUNIT) != 0;
3965 /* Finish remaining SINGLE block, if we were in the middle of one. */
3966 if (singleblock_in_progress)
3968 /* Finish single block and add it to pblock. */
3969 tmp = gfc_finish_block (&singleblock);
3970 tmp = build2_loc (input_location, OMP_SINGLE, void_type_node, tmp,
3971 clauses->nowait
3972 ? build_omp_clause (input_location, OMP_CLAUSE_NOWAIT)
3973 : NULL_TREE);
3974 gfc_add_expr_to_block (pblock, tmp);
3977 stmt = gfc_finish_block (pblock);
3978 if (TREE_CODE (stmt) != BIND_EXPR)
3980 if (!IS_EMPTY_STMT (stmt))
3982 tree bindblock = poplevel (1, 0);
3983 stmt = build3_v (BIND_EXPR, NULL, stmt, bindblock);
3985 else
3986 poplevel (0, 0);
3988 else
3989 poplevel (0, 0);
3991 if (IS_EMPTY_STMT (stmt) && !clauses->nowait)
3992 stmt = gfc_trans_omp_barrier ();
3994 ompws_flags = 0;
3995 return stmt;
3998 tree
3999 gfc_trans_omp_directive (gfc_code *code)
4001 switch (code->op)
4003 case EXEC_OMP_ATOMIC:
4004 return gfc_trans_omp_atomic (code);
4005 case EXEC_OMP_BARRIER:
4006 return gfc_trans_omp_barrier ();
4007 case EXEC_OMP_CANCEL:
4008 return gfc_trans_omp_cancel (code);
4009 case EXEC_OMP_CANCELLATION_POINT:
4010 return gfc_trans_omp_cancellation_point (code);
4011 case EXEC_OMP_CRITICAL:
4012 return gfc_trans_omp_critical (code);
4013 case EXEC_OMP_DISTRIBUTE:
4014 case EXEC_OMP_DO:
4015 case EXEC_OMP_SIMD:
4016 return gfc_trans_omp_do (code, code->op, NULL, code->ext.omp_clauses,
4017 NULL);
4018 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO:
4019 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD:
4020 case EXEC_OMP_DISTRIBUTE_SIMD:
4021 return gfc_trans_omp_distribute (code, NULL);
4022 case EXEC_OMP_DO_SIMD:
4023 return gfc_trans_omp_do_simd (code, NULL, NULL, NULL_TREE);
4024 case EXEC_OMP_FLUSH:
4025 return gfc_trans_omp_flush ();
4026 case EXEC_OMP_MASTER:
4027 return gfc_trans_omp_master (code);
4028 case EXEC_OMP_ORDERED:
4029 return gfc_trans_omp_ordered (code);
4030 case EXEC_OMP_PARALLEL:
4031 return gfc_trans_omp_parallel (code);
4032 case EXEC_OMP_PARALLEL_DO:
4033 return gfc_trans_omp_parallel_do (code, NULL, NULL);
4034 case EXEC_OMP_PARALLEL_DO_SIMD:
4035 return gfc_trans_omp_parallel_do_simd (code, NULL, NULL);
4036 case EXEC_OMP_PARALLEL_SECTIONS:
4037 return gfc_trans_omp_parallel_sections (code);
4038 case EXEC_OMP_PARALLEL_WORKSHARE:
4039 return gfc_trans_omp_parallel_workshare (code);
4040 case EXEC_OMP_SECTIONS:
4041 return gfc_trans_omp_sections (code, code->ext.omp_clauses);
4042 case EXEC_OMP_SINGLE:
4043 return gfc_trans_omp_single (code, code->ext.omp_clauses);
4044 case EXEC_OMP_TARGET:
4045 case EXEC_OMP_TARGET_TEAMS:
4046 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE:
4047 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO:
4048 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
4049 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD:
4050 return gfc_trans_omp_target (code);
4051 case EXEC_OMP_TARGET_DATA:
4052 return gfc_trans_omp_target_data (code);
4053 case EXEC_OMP_TARGET_UPDATE:
4054 return gfc_trans_omp_target_update (code);
4055 case EXEC_OMP_TASK:
4056 return gfc_trans_omp_task (code);
4057 case EXEC_OMP_TASKGROUP:
4058 return gfc_trans_omp_taskgroup (code);
4059 case EXEC_OMP_TASKWAIT:
4060 return gfc_trans_omp_taskwait ();
4061 case EXEC_OMP_TASKYIELD:
4062 return gfc_trans_omp_taskyield ();
4063 case EXEC_OMP_TEAMS:
4064 case EXEC_OMP_TEAMS_DISTRIBUTE:
4065 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO:
4066 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
4067 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD:
4068 return gfc_trans_omp_teams (code, NULL);
4069 case EXEC_OMP_WORKSHARE:
4070 return gfc_trans_omp_workshare (code, code->ext.omp_clauses);
4071 default:
4072 gcc_unreachable ();
4076 void
4077 gfc_trans_omp_declare_simd (gfc_namespace *ns)
4079 if (ns->entries)
4080 return;
4082 gfc_omp_declare_simd *ods;
4083 for (ods = ns->omp_declare_simd; ods; ods = ods->next)
4085 tree c = gfc_trans_omp_clauses (NULL, ods->clauses, ods->where, true);
4086 tree fndecl = ns->proc_name->backend_decl;
4087 if (c != NULL_TREE)
4088 c = tree_cons (NULL_TREE, c, NULL_TREE);
4089 c = build_tree_list (get_identifier ("omp declare simd"), c);
4090 TREE_CHAIN (c) = DECL_ATTRIBUTES (fndecl);
4091 DECL_ATTRIBUTES (fndecl) = c;