Replace enum omp_clause_map_kind with enum gomp_map_kind.
[official-gcc.git] / gcc / fortran / trans-openmp.c
blobc230f73cd288467731a1b69bc656fae9ecdf3cf4
1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "hash-set.h"
26 #include "machmode.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "options.h"
33 #include "wide-int.h"
34 #include "inchash.h"
35 #include "tree.h"
36 #include "fold-const.h"
37 #include "gimple-expr.h"
38 #include "gimplify.h" /* For create_tmp_var_raw. */
39 #include "stringpool.h"
40 #include "gfortran.h"
41 #include "diagnostic-core.h" /* For internal_error. */
42 #include "trans.h"
43 #include "trans-stmt.h"
44 #include "trans-types.h"
45 #include "trans-array.h"
46 #include "trans-const.h"
47 #include "arith.h"
48 #include "omp-low.h"
49 #include "gomp-constants.h"
51 int ompws_flags;
53 /* True if OpenMP should privatize what this DECL points to rather
54 than the DECL itself. */
56 bool
57 gfc_omp_privatize_by_reference (const_tree decl)
59 tree type = TREE_TYPE (decl);
61 if (TREE_CODE (type) == REFERENCE_TYPE
62 && (!DECL_ARTIFICIAL (decl) || TREE_CODE (decl) == PARM_DECL))
63 return true;
65 if (TREE_CODE (type) == POINTER_TYPE)
67 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
68 that have POINTER_TYPE type and aren't scalar pointers, scalar
69 allocatables, Cray pointees or C pointers are supposed to be
70 privatized by reference. */
71 if (GFC_DECL_GET_SCALAR_POINTER (decl)
72 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl)
73 || GFC_DECL_CRAY_POINTEE (decl)
74 || VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
75 return false;
77 if (!DECL_ARTIFICIAL (decl)
78 && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
79 return true;
81 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
82 by the frontend. */
83 if (DECL_LANG_SPECIFIC (decl)
84 && GFC_DECL_SAVED_DESCRIPTOR (decl))
85 return true;
88 return false;
91 /* True if OpenMP sharing attribute of DECL is predetermined. */
93 enum omp_clause_default_kind
94 gfc_omp_predetermined_sharing (tree decl)
96 /* Associate names preserve the association established during ASSOCIATE.
97 As they are implemented either as pointers to the selector or array
98 descriptor and shouldn't really change in the ASSOCIATE region,
99 this decl can be either shared or firstprivate. If it is a pointer,
100 use firstprivate, as it is cheaper that way, otherwise make it shared. */
101 if (GFC_DECL_ASSOCIATE_VAR_P (decl))
103 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
104 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
105 else
106 return OMP_CLAUSE_DEFAULT_SHARED;
109 if (DECL_ARTIFICIAL (decl)
110 && ! GFC_DECL_RESULT (decl)
111 && ! (DECL_LANG_SPECIFIC (decl)
112 && GFC_DECL_SAVED_DESCRIPTOR (decl)))
113 return OMP_CLAUSE_DEFAULT_SHARED;
115 /* Cray pointees shouldn't be listed in any clauses and should be
116 gimplified to dereference of the corresponding Cray pointer.
117 Make them all private, so that they are emitted in the debug
118 information. */
119 if (GFC_DECL_CRAY_POINTEE (decl))
120 return OMP_CLAUSE_DEFAULT_PRIVATE;
122 /* Assumed-size arrays are predetermined shared. */
123 if (TREE_CODE (decl) == PARM_DECL
124 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl))
125 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_UNKNOWN
126 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl),
127 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl)) - 1)
128 == NULL)
129 return OMP_CLAUSE_DEFAULT_SHARED;
131 /* Dummy procedures aren't considered variables by OpenMP, thus are
132 disallowed in OpenMP clauses. They are represented as PARM_DECLs
133 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
134 to avoid complaining about their uses with default(none). */
135 if (TREE_CODE (decl) == PARM_DECL
136 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
137 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == FUNCTION_TYPE)
138 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
140 /* COMMON and EQUIVALENCE decls are shared. They
141 are only referenced through DECL_VALUE_EXPR of the variables
142 contained in them. If those are privatized, they will not be
143 gimplified to the COMMON or EQUIVALENCE decls. */
144 if (GFC_DECL_COMMON_OR_EQUIV (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
145 return OMP_CLAUSE_DEFAULT_SHARED;
147 if (GFC_DECL_RESULT (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
148 return OMP_CLAUSE_DEFAULT_SHARED;
150 /* These are either array or derived parameters, or vtables.
151 In the former cases, the OpenMP standard doesn't consider them to be
152 variables at all (they can't be redefined), but they can nevertheless appear
153 in parallel/task regions and for default(none) purposes treat them as shared.
154 For vtables likely the same handling is desirable. */
155 if (TREE_CODE (decl) == VAR_DECL
156 && TREE_READONLY (decl)
157 && TREE_STATIC (decl))
158 return OMP_CLAUSE_DEFAULT_SHARED;
160 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
163 /* Return decl that should be used when reporting DEFAULT(NONE)
164 diagnostics. */
166 tree
167 gfc_omp_report_decl (tree decl)
169 if (DECL_ARTIFICIAL (decl)
170 && DECL_LANG_SPECIFIC (decl)
171 && GFC_DECL_SAVED_DESCRIPTOR (decl))
172 return GFC_DECL_SAVED_DESCRIPTOR (decl);
174 return decl;
177 /* Return true if TYPE has any allocatable components. */
179 static bool
180 gfc_has_alloc_comps (tree type, tree decl)
182 tree field, ftype;
184 if (POINTER_TYPE_P (type))
186 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl))
187 type = TREE_TYPE (type);
188 else if (GFC_DECL_GET_SCALAR_POINTER (decl))
189 return false;
192 while (GFC_DESCRIPTOR_TYPE_P (type) || GFC_ARRAY_TYPE_P (type))
193 type = gfc_get_element_type (type);
195 if (TREE_CODE (type) != RECORD_TYPE)
196 return false;
198 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
200 ftype = TREE_TYPE (field);
201 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
202 return true;
203 if (GFC_DESCRIPTOR_TYPE_P (ftype)
204 && GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
205 return true;
206 if (gfc_has_alloc_comps (ftype, field))
207 return true;
209 return false;
212 /* Return true if DECL in private clause needs
213 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
214 bool
215 gfc_omp_private_outer_ref (tree decl)
217 tree type = TREE_TYPE (decl);
219 if (GFC_DESCRIPTOR_TYPE_P (type)
220 && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
221 return true;
223 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl))
224 return true;
226 if (gfc_omp_privatize_by_reference (decl))
227 type = TREE_TYPE (type);
229 if (gfc_has_alloc_comps (type, decl))
230 return true;
232 return false;
235 /* Callback for gfc_omp_unshare_expr. */
237 static tree
238 gfc_omp_unshare_expr_r (tree *tp, int *walk_subtrees, void *)
240 tree t = *tp;
241 enum tree_code code = TREE_CODE (t);
243 /* Stop at types, decls, constants like copy_tree_r. */
244 if (TREE_CODE_CLASS (code) == tcc_type
245 || TREE_CODE_CLASS (code) == tcc_declaration
246 || TREE_CODE_CLASS (code) == tcc_constant
247 || code == BLOCK)
248 *walk_subtrees = 0;
249 else if (handled_component_p (t)
250 || TREE_CODE (t) == MEM_REF)
252 *tp = unshare_expr (t);
253 *walk_subtrees = 0;
256 return NULL_TREE;
259 /* Unshare in expr anything that the FE which normally doesn't
260 care much about tree sharing (because during gimplification
261 everything is unshared) could cause problems with tree sharing
262 at omp-low.c time. */
264 static tree
265 gfc_omp_unshare_expr (tree expr)
267 walk_tree (&expr, gfc_omp_unshare_expr_r, NULL, NULL);
268 return expr;
271 enum walk_alloc_comps
273 WALK_ALLOC_COMPS_DTOR,
274 WALK_ALLOC_COMPS_DEFAULT_CTOR,
275 WALK_ALLOC_COMPS_COPY_CTOR
278 /* Handle allocatable components in OpenMP clauses. */
280 static tree
281 gfc_walk_alloc_comps (tree decl, tree dest, tree var,
282 enum walk_alloc_comps kind)
284 stmtblock_t block, tmpblock;
285 tree type = TREE_TYPE (decl), then_b, tem, field;
286 gfc_init_block (&block);
288 if (GFC_ARRAY_TYPE_P (type) || GFC_DESCRIPTOR_TYPE_P (type))
290 if (GFC_DESCRIPTOR_TYPE_P (type))
292 gfc_init_block (&tmpblock);
293 tem = gfc_full_array_size (&tmpblock, decl,
294 GFC_TYPE_ARRAY_RANK (type));
295 then_b = gfc_finish_block (&tmpblock);
296 gfc_add_expr_to_block (&block, gfc_omp_unshare_expr (then_b));
297 tem = gfc_omp_unshare_expr (tem);
298 tem = fold_build2_loc (input_location, MINUS_EXPR,
299 gfc_array_index_type, tem,
300 gfc_index_one_node);
302 else
304 if (!TYPE_DOMAIN (type)
305 || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE
306 || TYPE_MIN_VALUE (TYPE_DOMAIN (type)) == error_mark_node
307 || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == error_mark_node)
309 tem = fold_build2 (EXACT_DIV_EXPR, sizetype,
310 TYPE_SIZE_UNIT (type),
311 TYPE_SIZE_UNIT (TREE_TYPE (type)));
312 tem = size_binop (MINUS_EXPR, tem, size_one_node);
314 else
315 tem = array_type_nelts (type);
316 tem = fold_convert (gfc_array_index_type, tem);
319 tree nelems = gfc_evaluate_now (tem, &block);
320 tree index = gfc_create_var (gfc_array_index_type, "S");
322 gfc_init_block (&tmpblock);
323 tem = gfc_conv_array_data (decl);
324 tree declvar = build_fold_indirect_ref_loc (input_location, tem);
325 tree declvref = gfc_build_array_ref (declvar, index, NULL);
326 tree destvar, destvref = NULL_TREE;
327 if (dest)
329 tem = gfc_conv_array_data (dest);
330 destvar = build_fold_indirect_ref_loc (input_location, tem);
331 destvref = gfc_build_array_ref (destvar, index, NULL);
333 gfc_add_expr_to_block (&tmpblock,
334 gfc_walk_alloc_comps (declvref, destvref,
335 var, kind));
337 gfc_loopinfo loop;
338 gfc_init_loopinfo (&loop);
339 loop.dimen = 1;
340 loop.from[0] = gfc_index_zero_node;
341 loop.loopvar[0] = index;
342 loop.to[0] = nelems;
343 gfc_trans_scalarizing_loops (&loop, &tmpblock);
344 gfc_add_block_to_block (&block, &loop.pre);
345 return gfc_finish_block (&block);
347 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (var))
349 decl = build_fold_indirect_ref_loc (input_location, decl);
350 if (dest)
351 dest = build_fold_indirect_ref_loc (input_location, dest);
352 type = TREE_TYPE (decl);
355 gcc_assert (TREE_CODE (type) == RECORD_TYPE);
356 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
358 tree ftype = TREE_TYPE (field);
359 tree declf, destf = NULL_TREE;
360 bool has_alloc_comps = gfc_has_alloc_comps (ftype, field);
361 if ((!GFC_DESCRIPTOR_TYPE_P (ftype)
362 || GFC_TYPE_ARRAY_AKIND (ftype) != GFC_ARRAY_ALLOCATABLE)
363 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (field)
364 && !has_alloc_comps)
365 continue;
366 declf = fold_build3_loc (input_location, COMPONENT_REF, ftype,
367 decl, field, NULL_TREE);
368 if (dest)
369 destf = fold_build3_loc (input_location, COMPONENT_REF, ftype,
370 dest, field, NULL_TREE);
372 tem = NULL_TREE;
373 switch (kind)
375 case WALK_ALLOC_COMPS_DTOR:
376 break;
377 case WALK_ALLOC_COMPS_DEFAULT_CTOR:
378 if (GFC_DESCRIPTOR_TYPE_P (ftype)
379 && GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
381 gfc_add_modify (&block, unshare_expr (destf),
382 unshare_expr (declf));
383 tem = gfc_duplicate_allocatable_nocopy
384 (destf, declf, ftype,
385 GFC_TYPE_ARRAY_RANK (ftype));
387 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
388 tem = gfc_duplicate_allocatable_nocopy (destf, declf, ftype, 0);
389 break;
390 case WALK_ALLOC_COMPS_COPY_CTOR:
391 if (GFC_DESCRIPTOR_TYPE_P (ftype)
392 && GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
393 tem = gfc_duplicate_allocatable (destf, declf, ftype,
394 GFC_TYPE_ARRAY_RANK (ftype));
395 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
396 tem = gfc_duplicate_allocatable (destf, declf, ftype, 0);
397 break;
399 if (tem)
400 gfc_add_expr_to_block (&block, gfc_omp_unshare_expr (tem));
401 if (has_alloc_comps)
403 gfc_init_block (&tmpblock);
404 gfc_add_expr_to_block (&tmpblock,
405 gfc_walk_alloc_comps (declf, destf,
406 field, kind));
407 then_b = gfc_finish_block (&tmpblock);
408 if (GFC_DESCRIPTOR_TYPE_P (ftype)
409 && GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
410 tem = gfc_conv_descriptor_data_get (unshare_expr (declf));
411 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
412 tem = unshare_expr (declf);
413 else
414 tem = NULL_TREE;
415 if (tem)
417 tem = fold_convert (pvoid_type_node, tem);
418 tem = fold_build2_loc (input_location, NE_EXPR,
419 boolean_type_node, tem,
420 null_pointer_node);
421 then_b = build3_loc (input_location, COND_EXPR, void_type_node,
422 tem, then_b,
423 build_empty_stmt (input_location));
425 gfc_add_expr_to_block (&block, then_b);
427 if (kind == WALK_ALLOC_COMPS_DTOR)
429 if (GFC_DESCRIPTOR_TYPE_P (ftype)
430 && GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
432 tem = gfc_trans_dealloc_allocated (unshare_expr (declf),
433 false, NULL);
434 gfc_add_expr_to_block (&block, gfc_omp_unshare_expr (tem));
436 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
438 tem = gfc_call_free (unshare_expr (declf));
439 gfc_add_expr_to_block (&block, gfc_omp_unshare_expr (tem));
444 return gfc_finish_block (&block);
447 /* Return code to initialize DECL with its default constructor, or
448 NULL if there's nothing to do. */
450 tree
451 gfc_omp_clause_default_ctor (tree clause, tree decl, tree outer)
453 tree type = TREE_TYPE (decl), size, ptr, cond, then_b, else_b;
454 stmtblock_t block, cond_block;
456 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_PRIVATE
457 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LASTPRIVATE
458 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LINEAR
459 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_REDUCTION);
461 if ((! GFC_DESCRIPTOR_TYPE_P (type)
462 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
463 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause)))
465 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
467 gcc_assert (outer);
468 gfc_start_block (&block);
469 tree tem = gfc_walk_alloc_comps (outer, decl,
470 OMP_CLAUSE_DECL (clause),
471 WALK_ALLOC_COMPS_DEFAULT_CTOR);
472 gfc_add_expr_to_block (&block, tem);
473 return gfc_finish_block (&block);
475 return NULL_TREE;
478 gcc_assert (outer != NULL_TREE);
480 /* Allocatable arrays and scalars in PRIVATE clauses need to be set to
481 "not currently allocated" allocation status if outer
482 array is "not currently allocated", otherwise should be allocated. */
483 gfc_start_block (&block);
485 gfc_init_block (&cond_block);
487 if (GFC_DESCRIPTOR_TYPE_P (type))
489 gfc_add_modify (&cond_block, decl, outer);
490 tree rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
491 size = gfc_conv_descriptor_ubound_get (decl, rank);
492 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
493 size,
494 gfc_conv_descriptor_lbound_get (decl, rank));
495 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
496 size, gfc_index_one_node);
497 if (GFC_TYPE_ARRAY_RANK (type) > 1)
498 size = fold_build2_loc (input_location, MULT_EXPR,
499 gfc_array_index_type, size,
500 gfc_conv_descriptor_stride_get (decl, rank));
501 tree esize = fold_convert (gfc_array_index_type,
502 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
503 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
504 size, esize);
505 size = unshare_expr (size);
506 size = gfc_evaluate_now (fold_convert (size_type_node, size),
507 &cond_block);
509 else
510 size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (type)));
511 ptr = gfc_create_var (pvoid_type_node, NULL);
512 gfc_allocate_using_malloc (&cond_block, ptr, size, NULL_TREE);
513 if (GFC_DESCRIPTOR_TYPE_P (type))
514 gfc_conv_descriptor_data_set (&cond_block, unshare_expr (decl), ptr);
515 else
516 gfc_add_modify (&cond_block, unshare_expr (decl),
517 fold_convert (TREE_TYPE (decl), ptr));
518 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
520 tree tem = gfc_walk_alloc_comps (outer, decl,
521 OMP_CLAUSE_DECL (clause),
522 WALK_ALLOC_COMPS_DEFAULT_CTOR);
523 gfc_add_expr_to_block (&cond_block, tem);
525 then_b = gfc_finish_block (&cond_block);
527 /* Reduction clause requires allocated ALLOCATABLE. */
528 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_REDUCTION)
530 gfc_init_block (&cond_block);
531 if (GFC_DESCRIPTOR_TYPE_P (type))
532 gfc_conv_descriptor_data_set (&cond_block, unshare_expr (decl),
533 null_pointer_node);
534 else
535 gfc_add_modify (&cond_block, unshare_expr (decl),
536 build_zero_cst (TREE_TYPE (decl)));
537 else_b = gfc_finish_block (&cond_block);
539 tree tem = fold_convert (pvoid_type_node,
540 GFC_DESCRIPTOR_TYPE_P (type)
541 ? gfc_conv_descriptor_data_get (outer) : outer);
542 tem = unshare_expr (tem);
543 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
544 tem, null_pointer_node);
545 gfc_add_expr_to_block (&block,
546 build3_loc (input_location, COND_EXPR,
547 void_type_node, cond, then_b,
548 else_b));
550 else
551 gfc_add_expr_to_block (&block, then_b);
553 return gfc_finish_block (&block);
556 /* Build and return code for a copy constructor from SRC to DEST. */
558 tree
559 gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src)
561 tree type = TREE_TYPE (dest), ptr, size, call;
562 tree cond, then_b, else_b;
563 stmtblock_t block, cond_block;
565 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_FIRSTPRIVATE
566 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LINEAR);
568 if ((! GFC_DESCRIPTOR_TYPE_P (type)
569 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
570 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause)))
572 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
574 gfc_start_block (&block);
575 gfc_add_modify (&block, dest, src);
576 tree tem = gfc_walk_alloc_comps (src, dest, OMP_CLAUSE_DECL (clause),
577 WALK_ALLOC_COMPS_COPY_CTOR);
578 gfc_add_expr_to_block (&block, tem);
579 return gfc_finish_block (&block);
581 else
582 return build2_v (MODIFY_EXPR, dest, src);
585 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
586 and copied from SRC. */
587 gfc_start_block (&block);
589 gfc_init_block (&cond_block);
591 gfc_add_modify (&cond_block, dest, src);
592 if (GFC_DESCRIPTOR_TYPE_P (type))
594 tree rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
595 size = gfc_conv_descriptor_ubound_get (dest, rank);
596 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
597 size,
598 gfc_conv_descriptor_lbound_get (dest, rank));
599 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
600 size, gfc_index_one_node);
601 if (GFC_TYPE_ARRAY_RANK (type) > 1)
602 size = fold_build2_loc (input_location, MULT_EXPR,
603 gfc_array_index_type, size,
604 gfc_conv_descriptor_stride_get (dest, rank));
605 tree esize = fold_convert (gfc_array_index_type,
606 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
607 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
608 size, esize);
609 size = unshare_expr (size);
610 size = gfc_evaluate_now (fold_convert (size_type_node, size),
611 &cond_block);
613 else
614 size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (type)));
615 ptr = gfc_create_var (pvoid_type_node, NULL);
616 gfc_allocate_using_malloc (&cond_block, ptr, size, NULL_TREE);
617 if (GFC_DESCRIPTOR_TYPE_P (type))
618 gfc_conv_descriptor_data_set (&cond_block, unshare_expr (dest), ptr);
619 else
620 gfc_add_modify (&cond_block, unshare_expr (dest),
621 fold_convert (TREE_TYPE (dest), ptr));
623 tree srcptr = GFC_DESCRIPTOR_TYPE_P (type)
624 ? gfc_conv_descriptor_data_get (src) : src;
625 srcptr = unshare_expr (srcptr);
626 srcptr = fold_convert (pvoid_type_node, srcptr);
627 call = build_call_expr_loc (input_location,
628 builtin_decl_explicit (BUILT_IN_MEMCPY), 3, ptr,
629 srcptr, size);
630 gfc_add_expr_to_block (&cond_block, fold_convert (void_type_node, call));
631 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
633 tree tem = gfc_walk_alloc_comps (src, dest,
634 OMP_CLAUSE_DECL (clause),
635 WALK_ALLOC_COMPS_COPY_CTOR);
636 gfc_add_expr_to_block (&cond_block, tem);
638 then_b = gfc_finish_block (&cond_block);
640 gfc_init_block (&cond_block);
641 if (GFC_DESCRIPTOR_TYPE_P (type))
642 gfc_conv_descriptor_data_set (&cond_block, unshare_expr (dest),
643 null_pointer_node);
644 else
645 gfc_add_modify (&cond_block, unshare_expr (dest),
646 build_zero_cst (TREE_TYPE (dest)));
647 else_b = gfc_finish_block (&cond_block);
649 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
650 unshare_expr (srcptr), null_pointer_node);
651 gfc_add_expr_to_block (&block,
652 build3_loc (input_location, COND_EXPR,
653 void_type_node, cond, then_b, else_b));
655 return gfc_finish_block (&block);
658 /* Similarly, except use an intrinsic or pointer assignment operator
659 instead. */
661 tree
662 gfc_omp_clause_assign_op (tree clause, tree dest, tree src)
664 tree type = TREE_TYPE (dest), ptr, size, call, nonalloc;
665 tree cond, then_b, else_b;
666 stmtblock_t block, cond_block, cond_block2, inner_block;
668 if ((! GFC_DESCRIPTOR_TYPE_P (type)
669 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
670 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause)))
672 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
674 gfc_start_block (&block);
675 /* First dealloc any allocatable components in DEST. */
676 tree tem = gfc_walk_alloc_comps (dest, NULL_TREE,
677 OMP_CLAUSE_DECL (clause),
678 WALK_ALLOC_COMPS_DTOR);
679 gfc_add_expr_to_block (&block, tem);
680 /* Then copy over toplevel data. */
681 gfc_add_modify (&block, dest, src);
682 /* Finally allocate any allocatable components and copy. */
683 tem = gfc_walk_alloc_comps (src, dest, OMP_CLAUSE_DECL (clause),
684 WALK_ALLOC_COMPS_COPY_CTOR);
685 gfc_add_expr_to_block (&block, tem);
686 return gfc_finish_block (&block);
688 else
689 return build2_v (MODIFY_EXPR, dest, src);
692 gfc_start_block (&block);
694 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
696 then_b = gfc_walk_alloc_comps (dest, NULL_TREE, OMP_CLAUSE_DECL (clause),
697 WALK_ALLOC_COMPS_DTOR);
698 tree tem = fold_convert (pvoid_type_node,
699 GFC_DESCRIPTOR_TYPE_P (type)
700 ? gfc_conv_descriptor_data_get (dest) : dest);
701 tem = unshare_expr (tem);
702 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
703 tem, null_pointer_node);
704 tem = build3_loc (input_location, COND_EXPR, void_type_node, cond,
705 then_b, build_empty_stmt (input_location));
706 gfc_add_expr_to_block (&block, tem);
709 gfc_init_block (&cond_block);
711 if (GFC_DESCRIPTOR_TYPE_P (type))
713 tree rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
714 size = gfc_conv_descriptor_ubound_get (src, rank);
715 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
716 size,
717 gfc_conv_descriptor_lbound_get (src, rank));
718 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
719 size, gfc_index_one_node);
720 if (GFC_TYPE_ARRAY_RANK (type) > 1)
721 size = fold_build2_loc (input_location, MULT_EXPR,
722 gfc_array_index_type, size,
723 gfc_conv_descriptor_stride_get (src, rank));
724 tree esize = fold_convert (gfc_array_index_type,
725 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
726 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
727 size, esize);
728 size = unshare_expr (size);
729 size = gfc_evaluate_now (fold_convert (size_type_node, size),
730 &cond_block);
732 else
733 size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (type)));
734 ptr = gfc_create_var (pvoid_type_node, NULL);
736 tree destptr = GFC_DESCRIPTOR_TYPE_P (type)
737 ? gfc_conv_descriptor_data_get (dest) : dest;
738 destptr = unshare_expr (destptr);
739 destptr = fold_convert (pvoid_type_node, destptr);
740 gfc_add_modify (&cond_block, ptr, destptr);
742 nonalloc = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
743 destptr, null_pointer_node);
744 cond = nonalloc;
745 if (GFC_DESCRIPTOR_TYPE_P (type))
747 int i;
748 for (i = 0; i < GFC_TYPE_ARRAY_RANK (type); i++)
750 tree rank = gfc_rank_cst[i];
751 tree tem = gfc_conv_descriptor_ubound_get (src, rank);
752 tem = fold_build2_loc (input_location, MINUS_EXPR,
753 gfc_array_index_type, tem,
754 gfc_conv_descriptor_lbound_get (src, rank));
755 tem = fold_build2_loc (input_location, PLUS_EXPR,
756 gfc_array_index_type, tem,
757 gfc_conv_descriptor_lbound_get (dest, rank));
758 tem = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
759 tem, gfc_conv_descriptor_ubound_get (dest,
760 rank));
761 cond = fold_build2_loc (input_location, TRUTH_ORIF_EXPR,
762 boolean_type_node, cond, tem);
766 gfc_init_block (&cond_block2);
768 if (GFC_DESCRIPTOR_TYPE_P (type))
770 gfc_init_block (&inner_block);
771 gfc_allocate_using_malloc (&inner_block, ptr, size, NULL_TREE);
772 then_b = gfc_finish_block (&inner_block);
774 gfc_init_block (&inner_block);
775 gfc_add_modify (&inner_block, ptr,
776 gfc_call_realloc (&inner_block, ptr, size));
777 else_b = gfc_finish_block (&inner_block);
779 gfc_add_expr_to_block (&cond_block2,
780 build3_loc (input_location, COND_EXPR,
781 void_type_node,
782 unshare_expr (nonalloc),
783 then_b, else_b));
784 gfc_add_modify (&cond_block2, dest, src);
785 gfc_conv_descriptor_data_set (&cond_block2, unshare_expr (dest), ptr);
787 else
789 gfc_allocate_using_malloc (&cond_block2, ptr, size, NULL_TREE);
790 gfc_add_modify (&cond_block2, unshare_expr (dest),
791 fold_convert (type, ptr));
793 then_b = gfc_finish_block (&cond_block2);
794 else_b = build_empty_stmt (input_location);
796 gfc_add_expr_to_block (&cond_block,
797 build3_loc (input_location, COND_EXPR,
798 void_type_node, unshare_expr (cond),
799 then_b, else_b));
801 tree srcptr = GFC_DESCRIPTOR_TYPE_P (type)
802 ? gfc_conv_descriptor_data_get (src) : src;
803 srcptr = unshare_expr (srcptr);
804 srcptr = fold_convert (pvoid_type_node, srcptr);
805 call = build_call_expr_loc (input_location,
806 builtin_decl_explicit (BUILT_IN_MEMCPY), 3, ptr,
807 srcptr, size);
808 gfc_add_expr_to_block (&cond_block, fold_convert (void_type_node, call));
809 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
811 tree tem = gfc_walk_alloc_comps (src, dest,
812 OMP_CLAUSE_DECL (clause),
813 WALK_ALLOC_COMPS_COPY_CTOR);
814 gfc_add_expr_to_block (&cond_block, tem);
816 then_b = gfc_finish_block (&cond_block);
818 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_COPYIN)
820 gfc_init_block (&cond_block);
821 if (GFC_DESCRIPTOR_TYPE_P (type))
822 gfc_add_expr_to_block (&cond_block,
823 gfc_trans_dealloc_allocated (unshare_expr (dest),
824 false, NULL));
825 else
827 destptr = gfc_evaluate_now (destptr, &cond_block);
828 gfc_add_expr_to_block (&cond_block, gfc_call_free (destptr));
829 gfc_add_modify (&cond_block, unshare_expr (dest),
830 build_zero_cst (TREE_TYPE (dest)));
832 else_b = gfc_finish_block (&cond_block);
834 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
835 unshare_expr (srcptr), null_pointer_node);
836 gfc_add_expr_to_block (&block,
837 build3_loc (input_location, COND_EXPR,
838 void_type_node, cond,
839 then_b, else_b));
841 else
842 gfc_add_expr_to_block (&block, then_b);
844 return gfc_finish_block (&block);
847 static void
848 gfc_omp_linear_clause_add_loop (stmtblock_t *block, tree dest, tree src,
849 tree add, tree nelems)
851 stmtblock_t tmpblock;
852 tree desta, srca, index = gfc_create_var (gfc_array_index_type, "S");
853 nelems = gfc_evaluate_now (nelems, block);
855 gfc_init_block (&tmpblock);
856 if (TREE_CODE (TREE_TYPE (dest)) == ARRAY_TYPE)
858 desta = gfc_build_array_ref (dest, index, NULL);
859 srca = gfc_build_array_ref (src, index, NULL);
861 else
863 gcc_assert (POINTER_TYPE_P (TREE_TYPE (dest)));
864 tree idx = fold_build2 (MULT_EXPR, sizetype,
865 fold_convert (sizetype, index),
866 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dest))));
867 desta = build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR,
868 TREE_TYPE (dest), dest,
869 idx));
870 srca = build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR,
871 TREE_TYPE (src), src,
872 idx));
874 gfc_add_modify (&tmpblock, desta,
875 fold_build2 (PLUS_EXPR, TREE_TYPE (desta),
876 srca, add));
878 gfc_loopinfo loop;
879 gfc_init_loopinfo (&loop);
880 loop.dimen = 1;
881 loop.from[0] = gfc_index_zero_node;
882 loop.loopvar[0] = index;
883 loop.to[0] = nelems;
884 gfc_trans_scalarizing_loops (&loop, &tmpblock);
885 gfc_add_block_to_block (block, &loop.pre);
888 /* Build and return code for a constructor of DEST that initializes
889 it to SRC plus ADD (ADD is scalar integer). */
891 tree
892 gfc_omp_clause_linear_ctor (tree clause, tree dest, tree src, tree add)
894 tree type = TREE_TYPE (dest), ptr, size, nelems = NULL_TREE;
895 stmtblock_t block;
897 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LINEAR);
899 gfc_start_block (&block);
900 add = gfc_evaluate_now (add, &block);
902 if ((! GFC_DESCRIPTOR_TYPE_P (type)
903 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
904 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause)))
906 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
907 if (!TYPE_DOMAIN (type)
908 || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE
909 || TYPE_MIN_VALUE (TYPE_DOMAIN (type)) == error_mark_node
910 || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == error_mark_node)
912 nelems = fold_build2 (EXACT_DIV_EXPR, sizetype,
913 TYPE_SIZE_UNIT (type),
914 TYPE_SIZE_UNIT (TREE_TYPE (type)));
915 nelems = size_binop (MINUS_EXPR, nelems, size_one_node);
917 else
918 nelems = array_type_nelts (type);
919 nelems = fold_convert (gfc_array_index_type, nelems);
921 gfc_omp_linear_clause_add_loop (&block, dest, src, add, nelems);
922 return gfc_finish_block (&block);
925 /* Allocatable arrays in LINEAR clauses need to be allocated
926 and copied from SRC. */
927 gfc_add_modify (&block, dest, src);
928 if (GFC_DESCRIPTOR_TYPE_P (type))
930 tree rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
931 size = gfc_conv_descriptor_ubound_get (dest, rank);
932 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
933 size,
934 gfc_conv_descriptor_lbound_get (dest, rank));
935 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
936 size, gfc_index_one_node);
937 if (GFC_TYPE_ARRAY_RANK (type) > 1)
938 size = fold_build2_loc (input_location, MULT_EXPR,
939 gfc_array_index_type, size,
940 gfc_conv_descriptor_stride_get (dest, rank));
941 tree esize = fold_convert (gfc_array_index_type,
942 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
943 nelems = gfc_evaluate_now (unshare_expr (size), &block);
944 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
945 nelems, unshare_expr (esize));
946 size = gfc_evaluate_now (fold_convert (size_type_node, size),
947 &block);
948 nelems = fold_build2_loc (input_location, MINUS_EXPR,
949 gfc_array_index_type, nelems,
950 gfc_index_one_node);
952 else
953 size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (type)));
954 ptr = gfc_create_var (pvoid_type_node, NULL);
955 gfc_allocate_using_malloc (&block, ptr, size, NULL_TREE);
956 if (GFC_DESCRIPTOR_TYPE_P (type))
958 gfc_conv_descriptor_data_set (&block, unshare_expr (dest), ptr);
959 tree etype = gfc_get_element_type (type);
960 ptr = fold_convert (build_pointer_type (etype), ptr);
961 tree srcptr = gfc_conv_descriptor_data_get (unshare_expr (src));
962 srcptr = fold_convert (build_pointer_type (etype), srcptr);
963 gfc_omp_linear_clause_add_loop (&block, ptr, srcptr, add, nelems);
965 else
967 gfc_add_modify (&block, unshare_expr (dest),
968 fold_convert (TREE_TYPE (dest), ptr));
969 ptr = fold_convert (TREE_TYPE (dest), ptr);
970 tree dstm = build_fold_indirect_ref (ptr);
971 tree srcm = build_fold_indirect_ref (unshare_expr (src));
972 gfc_add_modify (&block, dstm,
973 fold_build2 (PLUS_EXPR, TREE_TYPE (add), srcm, add));
975 return gfc_finish_block (&block);
978 /* Build and return code destructing DECL. Return NULL if nothing
979 to be done. */
981 tree
982 gfc_omp_clause_dtor (tree clause, tree decl)
984 tree type = TREE_TYPE (decl), tem;
986 if ((! GFC_DESCRIPTOR_TYPE_P (type)
987 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
988 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause)))
990 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
991 return gfc_walk_alloc_comps (decl, NULL_TREE,
992 OMP_CLAUSE_DECL (clause),
993 WALK_ALLOC_COMPS_DTOR);
994 return NULL_TREE;
997 if (GFC_DESCRIPTOR_TYPE_P (type))
998 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
999 to be deallocated if they were allocated. */
1000 tem = gfc_trans_dealloc_allocated (decl, false, NULL);
1001 else
1002 tem = gfc_call_free (decl);
1003 tem = gfc_omp_unshare_expr (tem);
1005 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
1007 stmtblock_t block;
1008 tree then_b;
1010 gfc_init_block (&block);
1011 gfc_add_expr_to_block (&block,
1012 gfc_walk_alloc_comps (decl, NULL_TREE,
1013 OMP_CLAUSE_DECL (clause),
1014 WALK_ALLOC_COMPS_DTOR));
1015 gfc_add_expr_to_block (&block, tem);
1016 then_b = gfc_finish_block (&block);
1018 tem = fold_convert (pvoid_type_node,
1019 GFC_DESCRIPTOR_TYPE_P (type)
1020 ? gfc_conv_descriptor_data_get (decl) : decl);
1021 tem = unshare_expr (tem);
1022 tree cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
1023 tem, null_pointer_node);
1024 tem = build3_loc (input_location, COND_EXPR, void_type_node, cond,
1025 then_b, build_empty_stmt (input_location));
1027 return tem;
1031 void
1032 gfc_omp_finish_clause (tree c, gimple_seq *pre_p)
1034 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
1035 return;
1037 tree decl = OMP_CLAUSE_DECL (c);
1038 tree c2 = NULL_TREE, c3 = NULL_TREE, c4 = NULL_TREE;
1039 if (POINTER_TYPE_P (TREE_TYPE (decl)))
1041 if (!gfc_omp_privatize_by_reference (decl)
1042 && !GFC_DECL_GET_SCALAR_POINTER (decl)
1043 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (decl)
1044 && !GFC_DECL_CRAY_POINTEE (decl)
1045 && !GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
1046 return;
1047 tree orig_decl = decl;
1048 c4 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
1049 OMP_CLAUSE_MAP_KIND (c4) = GOMP_MAP_POINTER;
1050 OMP_CLAUSE_DECL (c4) = decl;
1051 OMP_CLAUSE_SIZE (c4) = size_int (0);
1052 decl = build_fold_indirect_ref (decl);
1053 OMP_CLAUSE_DECL (c) = decl;
1054 OMP_CLAUSE_SIZE (c) = NULL_TREE;
1055 if (TREE_CODE (TREE_TYPE (orig_decl)) == REFERENCE_TYPE
1056 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl)
1057 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl)))
1059 c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
1060 OMP_CLAUSE_MAP_KIND (c3) = GOMP_MAP_POINTER;
1061 OMP_CLAUSE_DECL (c3) = unshare_expr (decl);
1062 OMP_CLAUSE_SIZE (c3) = size_int (0);
1063 decl = build_fold_indirect_ref (decl);
1064 OMP_CLAUSE_DECL (c) = decl;
1067 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
1069 stmtblock_t block;
1070 gfc_start_block (&block);
1071 tree type = TREE_TYPE (decl);
1072 tree ptr = gfc_conv_descriptor_data_get (decl);
1073 ptr = fold_convert (build_pointer_type (char_type_node), ptr);
1074 ptr = build_fold_indirect_ref (ptr);
1075 OMP_CLAUSE_DECL (c) = ptr;
1076 c2 = build_omp_clause (input_location, OMP_CLAUSE_MAP);
1077 OMP_CLAUSE_MAP_KIND (c2) = GOMP_MAP_TO_PSET;
1078 OMP_CLAUSE_DECL (c2) = decl;
1079 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (type);
1080 c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
1081 OMP_CLAUSE_MAP_KIND (c3) = GOMP_MAP_POINTER;
1082 OMP_CLAUSE_DECL (c3) = gfc_conv_descriptor_data_get (decl);
1083 OMP_CLAUSE_SIZE (c3) = size_int (0);
1084 tree size = create_tmp_var (gfc_array_index_type);
1085 tree elemsz = TYPE_SIZE_UNIT (gfc_get_element_type (type));
1086 elemsz = fold_convert (gfc_array_index_type, elemsz);
1087 if (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER
1088 || GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER_CONT)
1090 stmtblock_t cond_block;
1091 tree tem, then_b, else_b, zero, cond;
1093 gfc_init_block (&cond_block);
1094 tem = gfc_full_array_size (&cond_block, decl,
1095 GFC_TYPE_ARRAY_RANK (type));
1096 gfc_add_modify (&cond_block, size, tem);
1097 gfc_add_modify (&cond_block, size,
1098 fold_build2 (MULT_EXPR, gfc_array_index_type,
1099 size, elemsz));
1100 then_b = gfc_finish_block (&cond_block);
1101 gfc_init_block (&cond_block);
1102 zero = build_int_cst (gfc_array_index_type, 0);
1103 gfc_add_modify (&cond_block, size, zero);
1104 else_b = gfc_finish_block (&cond_block);
1105 tem = gfc_conv_descriptor_data_get (decl);
1106 tem = fold_convert (pvoid_type_node, tem);
1107 cond = fold_build2_loc (input_location, NE_EXPR,
1108 boolean_type_node, tem, null_pointer_node);
1109 gfc_add_expr_to_block (&block, build3_loc (input_location, COND_EXPR,
1110 void_type_node, cond,
1111 then_b, else_b));
1113 else
1115 gfc_add_modify (&block, size,
1116 gfc_full_array_size (&block, decl,
1117 GFC_TYPE_ARRAY_RANK (type)));
1118 gfc_add_modify (&block, size,
1119 fold_build2 (MULT_EXPR, gfc_array_index_type,
1120 size, elemsz));
1122 OMP_CLAUSE_SIZE (c) = size;
1123 tree stmt = gfc_finish_block (&block);
1124 gimplify_and_add (stmt, pre_p);
1126 tree last = c;
1127 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
1128 OMP_CLAUSE_SIZE (c)
1129 = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
1130 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
1131 if (c2)
1133 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (last);
1134 OMP_CLAUSE_CHAIN (last) = c2;
1135 last = c2;
1137 if (c3)
1139 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (last);
1140 OMP_CLAUSE_CHAIN (last) = c3;
1141 last = c3;
1143 if (c4)
1145 OMP_CLAUSE_CHAIN (c4) = OMP_CLAUSE_CHAIN (last);
1146 OMP_CLAUSE_CHAIN (last) = c4;
1147 last = c4;
1152 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1153 disregarded in OpenMP construct, because it is going to be
1154 remapped during OpenMP lowering. SHARED is true if DECL
1155 is going to be shared, false if it is going to be privatized. */
1157 bool
1158 gfc_omp_disregard_value_expr (tree decl, bool shared)
1160 if (GFC_DECL_COMMON_OR_EQUIV (decl)
1161 && DECL_HAS_VALUE_EXPR_P (decl))
1163 tree value = DECL_VALUE_EXPR (decl);
1165 if (TREE_CODE (value) == COMPONENT_REF
1166 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
1167 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
1169 /* If variable in COMMON or EQUIVALENCE is privatized, return
1170 true, as just that variable is supposed to be privatized,
1171 not the whole COMMON or whole EQUIVALENCE.
1172 For shared variables in COMMON or EQUIVALENCE, let them be
1173 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
1174 from the same COMMON or EQUIVALENCE just one sharing of the
1175 whole COMMON or EQUIVALENCE is enough. */
1176 return ! shared;
1180 if (GFC_DECL_RESULT (decl) && DECL_HAS_VALUE_EXPR_P (decl))
1181 return ! shared;
1183 return false;
1186 /* Return true if DECL that is shared iff SHARED is true should
1187 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
1188 flag set. */
1190 bool
1191 gfc_omp_private_debug_clause (tree decl, bool shared)
1193 if (GFC_DECL_CRAY_POINTEE (decl))
1194 return true;
1196 if (GFC_DECL_COMMON_OR_EQUIV (decl)
1197 && DECL_HAS_VALUE_EXPR_P (decl))
1199 tree value = DECL_VALUE_EXPR (decl);
1201 if (TREE_CODE (value) == COMPONENT_REF
1202 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
1203 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
1204 return shared;
1207 return false;
1210 /* Register language specific type size variables as potentially OpenMP
1211 firstprivate variables. */
1213 void
1214 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
1216 if (GFC_ARRAY_TYPE_P (type) || GFC_DESCRIPTOR_TYPE_P (type))
1218 int r;
1220 gcc_assert (TYPE_LANG_SPECIFIC (type) != NULL);
1221 for (r = 0; r < GFC_TYPE_ARRAY_RANK (type); r++)
1223 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_LBOUND (type, r));
1224 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_UBOUND (type, r));
1225 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_STRIDE (type, r));
1227 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_SIZE (type));
1228 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_OFFSET (type));
1233 static inline tree
1234 gfc_trans_add_clause (tree node, tree tail)
1236 OMP_CLAUSE_CHAIN (node) = tail;
1237 return node;
1240 static tree
1241 gfc_trans_omp_variable (gfc_symbol *sym, bool declare_simd)
1243 if (declare_simd)
1245 int cnt = 0;
1246 gfc_symbol *proc_sym;
1247 gfc_formal_arglist *f;
1249 gcc_assert (sym->attr.dummy);
1250 proc_sym = sym->ns->proc_name;
1251 if (proc_sym->attr.entry_master)
1252 ++cnt;
1253 if (gfc_return_by_reference (proc_sym))
1255 ++cnt;
1256 if (proc_sym->ts.type == BT_CHARACTER)
1257 ++cnt;
1259 for (f = gfc_sym_get_dummy_args (proc_sym); f; f = f->next)
1260 if (f->sym == sym)
1261 break;
1262 else if (f->sym)
1263 ++cnt;
1264 gcc_assert (f);
1265 return build_int_cst (integer_type_node, cnt);
1268 tree t = gfc_get_symbol_decl (sym);
1269 tree parent_decl;
1270 int parent_flag;
1271 bool return_value;
1272 bool alternate_entry;
1273 bool entry_master;
1275 return_value = sym->attr.function && sym->result == sym;
1276 alternate_entry = sym->attr.function && sym->attr.entry
1277 && sym->result == sym;
1278 entry_master = sym->attr.result
1279 && sym->ns->proc_name->attr.entry_master
1280 && !gfc_return_by_reference (sym->ns->proc_name);
1281 parent_decl = current_function_decl
1282 ? DECL_CONTEXT (current_function_decl) : NULL_TREE;
1284 if ((t == parent_decl && return_value)
1285 || (sym->ns && sym->ns->proc_name
1286 && sym->ns->proc_name->backend_decl == parent_decl
1287 && (alternate_entry || entry_master)))
1288 parent_flag = 1;
1289 else
1290 parent_flag = 0;
1292 /* Special case for assigning the return value of a function.
1293 Self recursive functions must have an explicit return value. */
1294 if (return_value && (t == current_function_decl || parent_flag))
1295 t = gfc_get_fake_result_decl (sym, parent_flag);
1297 /* Similarly for alternate entry points. */
1298 else if (alternate_entry
1299 && (sym->ns->proc_name->backend_decl == current_function_decl
1300 || parent_flag))
1302 gfc_entry_list *el = NULL;
1304 for (el = sym->ns->entries; el; el = el->next)
1305 if (sym == el->sym)
1307 t = gfc_get_fake_result_decl (sym, parent_flag);
1308 break;
1312 else if (entry_master
1313 && (sym->ns->proc_name->backend_decl == current_function_decl
1314 || parent_flag))
1315 t = gfc_get_fake_result_decl (sym, parent_flag);
1317 return t;
1320 static tree
1321 gfc_trans_omp_variable_list (enum omp_clause_code code,
1322 gfc_omp_namelist *namelist, tree list,
1323 bool declare_simd)
1325 for (; namelist != NULL; namelist = namelist->next)
1326 if (namelist->sym->attr.referenced || declare_simd)
1328 tree t = gfc_trans_omp_variable (namelist->sym, declare_simd);
1329 if (t != error_mark_node)
1331 tree node = build_omp_clause (input_location, code);
1332 OMP_CLAUSE_DECL (node) = t;
1333 list = gfc_trans_add_clause (node, list);
1336 return list;
1339 struct omp_udr_find_orig_data
1341 gfc_omp_udr *omp_udr;
1342 bool omp_orig_seen;
1345 static int
1346 omp_udr_find_orig (gfc_expr **e, int *walk_subtrees ATTRIBUTE_UNUSED,
1347 void *data)
1349 struct omp_udr_find_orig_data *cd = (struct omp_udr_find_orig_data *) data;
1350 if ((*e)->expr_type == EXPR_VARIABLE
1351 && (*e)->symtree->n.sym == cd->omp_udr->omp_orig)
1352 cd->omp_orig_seen = true;
1354 return 0;
1357 static void
1358 gfc_trans_omp_array_reduction_or_udr (tree c, gfc_omp_namelist *n, locus where)
1360 gfc_symbol *sym = n->sym;
1361 gfc_symtree *root1 = NULL, *root2 = NULL, *root3 = NULL, *root4 = NULL;
1362 gfc_symtree *symtree1, *symtree2, *symtree3, *symtree4 = NULL;
1363 gfc_symbol init_val_sym, outer_sym, intrinsic_sym;
1364 gfc_symbol omp_var_copy[4];
1365 gfc_expr *e1, *e2, *e3, *e4;
1366 gfc_ref *ref;
1367 tree decl, backend_decl, stmt, type, outer_decl;
1368 locus old_loc = gfc_current_locus;
1369 const char *iname;
1370 bool t;
1371 gfc_omp_udr *udr = n->udr ? n->udr->udr : NULL;
1373 decl = OMP_CLAUSE_DECL (c);
1374 gfc_current_locus = where;
1375 type = TREE_TYPE (decl);
1376 outer_decl = create_tmp_var_raw (type);
1377 if (TREE_CODE (decl) == PARM_DECL
1378 && TREE_CODE (type) == REFERENCE_TYPE
1379 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type))
1380 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type)) == GFC_ARRAY_ALLOCATABLE)
1382 decl = build_fold_indirect_ref (decl);
1383 type = TREE_TYPE (type);
1386 /* Create a fake symbol for init value. */
1387 memset (&init_val_sym, 0, sizeof (init_val_sym));
1388 init_val_sym.ns = sym->ns;
1389 init_val_sym.name = sym->name;
1390 init_val_sym.ts = sym->ts;
1391 init_val_sym.attr.referenced = 1;
1392 init_val_sym.declared_at = where;
1393 init_val_sym.attr.flavor = FL_VARIABLE;
1394 if (OMP_CLAUSE_REDUCTION_CODE (c) != ERROR_MARK)
1395 backend_decl = omp_reduction_init (c, gfc_sym_type (&init_val_sym));
1396 else if (udr->initializer_ns)
1397 backend_decl = NULL;
1398 else
1399 switch (sym->ts.type)
1401 case BT_LOGICAL:
1402 case BT_INTEGER:
1403 case BT_REAL:
1404 case BT_COMPLEX:
1405 backend_decl = build_zero_cst (gfc_sym_type (&init_val_sym));
1406 break;
1407 default:
1408 backend_decl = NULL_TREE;
1409 break;
1411 init_val_sym.backend_decl = backend_decl;
1413 /* Create a fake symbol for the outer array reference. */
1414 outer_sym = *sym;
1415 if (sym->as)
1416 outer_sym.as = gfc_copy_array_spec (sym->as);
1417 outer_sym.attr.dummy = 0;
1418 outer_sym.attr.result = 0;
1419 outer_sym.attr.flavor = FL_VARIABLE;
1420 outer_sym.backend_decl = outer_decl;
1421 if (decl != OMP_CLAUSE_DECL (c))
1422 outer_sym.backend_decl = build_fold_indirect_ref (outer_decl);
1424 /* Create fake symtrees for it. */
1425 symtree1 = gfc_new_symtree (&root1, sym->name);
1426 symtree1->n.sym = sym;
1427 gcc_assert (symtree1 == root1);
1429 symtree2 = gfc_new_symtree (&root2, sym->name);
1430 symtree2->n.sym = &init_val_sym;
1431 gcc_assert (symtree2 == root2);
1433 symtree3 = gfc_new_symtree (&root3, sym->name);
1434 symtree3->n.sym = &outer_sym;
1435 gcc_assert (symtree3 == root3);
1437 memset (omp_var_copy, 0, sizeof omp_var_copy);
1438 if (udr)
1440 omp_var_copy[0] = *udr->omp_out;
1441 omp_var_copy[1] = *udr->omp_in;
1442 *udr->omp_out = outer_sym;
1443 *udr->omp_in = *sym;
1444 if (udr->initializer_ns)
1446 omp_var_copy[2] = *udr->omp_priv;
1447 omp_var_copy[3] = *udr->omp_orig;
1448 *udr->omp_priv = *sym;
1449 *udr->omp_orig = outer_sym;
1453 /* Create expressions. */
1454 e1 = gfc_get_expr ();
1455 e1->expr_type = EXPR_VARIABLE;
1456 e1->where = where;
1457 e1->symtree = symtree1;
1458 e1->ts = sym->ts;
1459 if (sym->attr.dimension)
1461 e1->ref = ref = gfc_get_ref ();
1462 ref->type = REF_ARRAY;
1463 ref->u.ar.where = where;
1464 ref->u.ar.as = sym->as;
1465 ref->u.ar.type = AR_FULL;
1466 ref->u.ar.dimen = 0;
1468 t = gfc_resolve_expr (e1);
1469 gcc_assert (t);
1471 e2 = NULL;
1472 if (backend_decl != NULL_TREE)
1474 e2 = gfc_get_expr ();
1475 e2->expr_type = EXPR_VARIABLE;
1476 e2->where = where;
1477 e2->symtree = symtree2;
1478 e2->ts = sym->ts;
1479 t = gfc_resolve_expr (e2);
1480 gcc_assert (t);
1482 else if (udr->initializer_ns == NULL)
1484 gcc_assert (sym->ts.type == BT_DERIVED);
1485 e2 = gfc_default_initializer (&sym->ts);
1486 gcc_assert (e2);
1487 t = gfc_resolve_expr (e2);
1488 gcc_assert (t);
1490 else if (n->udr->initializer->op == EXEC_ASSIGN)
1492 e2 = gfc_copy_expr (n->udr->initializer->expr2);
1493 t = gfc_resolve_expr (e2);
1494 gcc_assert (t);
1496 if (udr && udr->initializer_ns)
1498 struct omp_udr_find_orig_data cd;
1499 cd.omp_udr = udr;
1500 cd.omp_orig_seen = false;
1501 gfc_code_walker (&n->udr->initializer,
1502 gfc_dummy_code_callback, omp_udr_find_orig, &cd);
1503 if (cd.omp_orig_seen)
1504 OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c) = 1;
1507 e3 = gfc_copy_expr (e1);
1508 e3->symtree = symtree3;
1509 t = gfc_resolve_expr (e3);
1510 gcc_assert (t);
1512 iname = NULL;
1513 e4 = NULL;
1514 switch (OMP_CLAUSE_REDUCTION_CODE (c))
1516 case PLUS_EXPR:
1517 case MINUS_EXPR:
1518 e4 = gfc_add (e3, e1);
1519 break;
1520 case MULT_EXPR:
1521 e4 = gfc_multiply (e3, e1);
1522 break;
1523 case TRUTH_ANDIF_EXPR:
1524 e4 = gfc_and (e3, e1);
1525 break;
1526 case TRUTH_ORIF_EXPR:
1527 e4 = gfc_or (e3, e1);
1528 break;
1529 case EQ_EXPR:
1530 e4 = gfc_eqv (e3, e1);
1531 break;
1532 case NE_EXPR:
1533 e4 = gfc_neqv (e3, e1);
1534 break;
1535 case MIN_EXPR:
1536 iname = "min";
1537 break;
1538 case MAX_EXPR:
1539 iname = "max";
1540 break;
1541 case BIT_AND_EXPR:
1542 iname = "iand";
1543 break;
1544 case BIT_IOR_EXPR:
1545 iname = "ior";
1546 break;
1547 case BIT_XOR_EXPR:
1548 iname = "ieor";
1549 break;
1550 case ERROR_MARK:
1551 if (n->udr->combiner->op == EXEC_ASSIGN)
1553 gfc_free_expr (e3);
1554 e3 = gfc_copy_expr (n->udr->combiner->expr1);
1555 e4 = gfc_copy_expr (n->udr->combiner->expr2);
1556 t = gfc_resolve_expr (e3);
1557 gcc_assert (t);
1558 t = gfc_resolve_expr (e4);
1559 gcc_assert (t);
1561 break;
1562 default:
1563 gcc_unreachable ();
1565 if (iname != NULL)
1567 memset (&intrinsic_sym, 0, sizeof (intrinsic_sym));
1568 intrinsic_sym.ns = sym->ns;
1569 intrinsic_sym.name = iname;
1570 intrinsic_sym.ts = sym->ts;
1571 intrinsic_sym.attr.referenced = 1;
1572 intrinsic_sym.attr.intrinsic = 1;
1573 intrinsic_sym.attr.function = 1;
1574 intrinsic_sym.result = &intrinsic_sym;
1575 intrinsic_sym.declared_at = where;
1577 symtree4 = gfc_new_symtree (&root4, iname);
1578 symtree4->n.sym = &intrinsic_sym;
1579 gcc_assert (symtree4 == root4);
1581 e4 = gfc_get_expr ();
1582 e4->expr_type = EXPR_FUNCTION;
1583 e4->where = where;
1584 e4->symtree = symtree4;
1585 e4->value.function.actual = gfc_get_actual_arglist ();
1586 e4->value.function.actual->expr = e3;
1587 e4->value.function.actual->next = gfc_get_actual_arglist ();
1588 e4->value.function.actual->next->expr = e1;
1590 if (OMP_CLAUSE_REDUCTION_CODE (c) != ERROR_MARK)
1592 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
1593 e1 = gfc_copy_expr (e1);
1594 e3 = gfc_copy_expr (e3);
1595 t = gfc_resolve_expr (e4);
1596 gcc_assert (t);
1599 /* Create the init statement list. */
1600 pushlevel ();
1601 if (e2)
1602 stmt = gfc_trans_assignment (e1, e2, false, false);
1603 else
1604 stmt = gfc_trans_call (n->udr->initializer, false,
1605 NULL_TREE, NULL_TREE, false);
1606 if (TREE_CODE (stmt) != BIND_EXPR)
1607 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
1608 else
1609 poplevel (0, 0);
1610 OMP_CLAUSE_REDUCTION_INIT (c) = stmt;
1612 /* Create the merge statement list. */
1613 pushlevel ();
1614 if (e4)
1615 stmt = gfc_trans_assignment (e3, e4, false, true);
1616 else
1617 stmt = gfc_trans_call (n->udr->combiner, false,
1618 NULL_TREE, NULL_TREE, false);
1619 if (TREE_CODE (stmt) != BIND_EXPR)
1620 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
1621 else
1622 poplevel (0, 0);
1623 OMP_CLAUSE_REDUCTION_MERGE (c) = stmt;
1625 /* And stick the placeholder VAR_DECL into the clause as well. */
1626 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = outer_decl;
1628 gfc_current_locus = old_loc;
1630 gfc_free_expr (e1);
1631 if (e2)
1632 gfc_free_expr (e2);
1633 gfc_free_expr (e3);
1634 if (e4)
1635 gfc_free_expr (e4);
1636 free (symtree1);
1637 free (symtree2);
1638 free (symtree3);
1639 free (symtree4);
1640 if (outer_sym.as)
1641 gfc_free_array_spec (outer_sym.as);
1643 if (udr)
1645 *udr->omp_out = omp_var_copy[0];
1646 *udr->omp_in = omp_var_copy[1];
1647 if (udr->initializer_ns)
1649 *udr->omp_priv = omp_var_copy[2];
1650 *udr->omp_orig = omp_var_copy[3];
1655 static tree
1656 gfc_trans_omp_reduction_list (gfc_omp_namelist *namelist, tree list,
1657 locus where)
1659 for (; namelist != NULL; namelist = namelist->next)
1660 if (namelist->sym->attr.referenced)
1662 tree t = gfc_trans_omp_variable (namelist->sym, false);
1663 if (t != error_mark_node)
1665 tree node = build_omp_clause (where.lb->location,
1666 OMP_CLAUSE_REDUCTION);
1667 OMP_CLAUSE_DECL (node) = t;
1668 switch (namelist->u.reduction_op)
1670 case OMP_REDUCTION_PLUS:
1671 OMP_CLAUSE_REDUCTION_CODE (node) = PLUS_EXPR;
1672 break;
1673 case OMP_REDUCTION_MINUS:
1674 OMP_CLAUSE_REDUCTION_CODE (node) = MINUS_EXPR;
1675 break;
1676 case OMP_REDUCTION_TIMES:
1677 OMP_CLAUSE_REDUCTION_CODE (node) = MULT_EXPR;
1678 break;
1679 case OMP_REDUCTION_AND:
1680 OMP_CLAUSE_REDUCTION_CODE (node) = TRUTH_ANDIF_EXPR;
1681 break;
1682 case OMP_REDUCTION_OR:
1683 OMP_CLAUSE_REDUCTION_CODE (node) = TRUTH_ORIF_EXPR;
1684 break;
1685 case OMP_REDUCTION_EQV:
1686 OMP_CLAUSE_REDUCTION_CODE (node) = EQ_EXPR;
1687 break;
1688 case OMP_REDUCTION_NEQV:
1689 OMP_CLAUSE_REDUCTION_CODE (node) = NE_EXPR;
1690 break;
1691 case OMP_REDUCTION_MAX:
1692 OMP_CLAUSE_REDUCTION_CODE (node) = MAX_EXPR;
1693 break;
1694 case OMP_REDUCTION_MIN:
1695 OMP_CLAUSE_REDUCTION_CODE (node) = MIN_EXPR;
1696 break;
1697 case OMP_REDUCTION_IAND:
1698 OMP_CLAUSE_REDUCTION_CODE (node) = BIT_AND_EXPR;
1699 break;
1700 case OMP_REDUCTION_IOR:
1701 OMP_CLAUSE_REDUCTION_CODE (node) = BIT_IOR_EXPR;
1702 break;
1703 case OMP_REDUCTION_IEOR:
1704 OMP_CLAUSE_REDUCTION_CODE (node) = BIT_XOR_EXPR;
1705 break;
1706 case OMP_REDUCTION_USER:
1707 OMP_CLAUSE_REDUCTION_CODE (node) = ERROR_MARK;
1708 break;
1709 default:
1710 gcc_unreachable ();
1712 if (namelist->sym->attr.dimension
1713 || namelist->u.reduction_op == OMP_REDUCTION_USER
1714 || namelist->sym->attr.allocatable)
1715 gfc_trans_omp_array_reduction_or_udr (node, namelist, where);
1716 list = gfc_trans_add_clause (node, list);
1719 return list;
1722 static inline tree
1723 gfc_convert_expr_to_tree (stmtblock_t *block, gfc_expr *expr)
1725 gfc_se se;
1726 tree result;
1728 gfc_init_se (&se, NULL );
1729 gfc_conv_expr (&se, expr);
1730 gfc_add_block_to_block (block, &se.pre);
1731 result = gfc_evaluate_now (se.expr, block);
1732 gfc_add_block_to_block (block, &se.post);
1734 return result;
1737 static tree
1738 gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
1739 locus where, bool declare_simd = false)
1741 tree omp_clauses = NULL_TREE, chunk_size, c;
1742 int list;
1743 enum omp_clause_code clause_code;
1744 gfc_se se;
1746 if (clauses == NULL)
1747 return NULL_TREE;
1749 for (list = 0; list < OMP_LIST_NUM; list++)
1751 gfc_omp_namelist *n = clauses->lists[list];
1753 if (n == NULL)
1754 continue;
1755 switch (list)
1757 case OMP_LIST_REDUCTION:
1758 omp_clauses = gfc_trans_omp_reduction_list (n, omp_clauses, where);
1759 break;
1760 case OMP_LIST_PRIVATE:
1761 clause_code = OMP_CLAUSE_PRIVATE;
1762 goto add_clause;
1763 case OMP_LIST_SHARED:
1764 clause_code = OMP_CLAUSE_SHARED;
1765 goto add_clause;
1766 case OMP_LIST_FIRSTPRIVATE:
1767 clause_code = OMP_CLAUSE_FIRSTPRIVATE;
1768 goto add_clause;
1769 case OMP_LIST_LASTPRIVATE:
1770 clause_code = OMP_CLAUSE_LASTPRIVATE;
1771 goto add_clause;
1772 case OMP_LIST_COPYIN:
1773 clause_code = OMP_CLAUSE_COPYIN;
1774 goto add_clause;
1775 case OMP_LIST_COPYPRIVATE:
1776 clause_code = OMP_CLAUSE_COPYPRIVATE;
1777 goto add_clause;
1778 case OMP_LIST_UNIFORM:
1779 clause_code = OMP_CLAUSE_UNIFORM;
1780 goto add_clause;
1781 case OMP_LIST_USE_DEVICE:
1782 clause_code = OMP_CLAUSE_USE_DEVICE;
1783 goto add_clause;
1784 case OMP_LIST_DEVICE_RESIDENT:
1785 clause_code = OMP_CLAUSE_DEVICE_RESIDENT;
1786 goto add_clause;
1787 case OMP_LIST_CACHE:
1788 clause_code = OMP_CLAUSE__CACHE_;
1789 goto add_clause;
1791 add_clause:
1792 omp_clauses
1793 = gfc_trans_omp_variable_list (clause_code, n, omp_clauses,
1794 declare_simd);
1795 break;
1796 case OMP_LIST_ALIGNED:
1797 for (; n != NULL; n = n->next)
1798 if (n->sym->attr.referenced || declare_simd)
1800 tree t = gfc_trans_omp_variable (n->sym, declare_simd);
1801 if (t != error_mark_node)
1803 tree node = build_omp_clause (input_location,
1804 OMP_CLAUSE_ALIGNED);
1805 OMP_CLAUSE_DECL (node) = t;
1806 if (n->expr)
1808 tree alignment_var;
1810 if (block == NULL)
1811 alignment_var = gfc_conv_constant_to_tree (n->expr);
1812 else
1814 gfc_init_se (&se, NULL);
1815 gfc_conv_expr (&se, n->expr);
1816 gfc_add_block_to_block (block, &se.pre);
1817 alignment_var = gfc_evaluate_now (se.expr, block);
1818 gfc_add_block_to_block (block, &se.post);
1820 OMP_CLAUSE_ALIGNED_ALIGNMENT (node) = alignment_var;
1822 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
1825 break;
1826 case OMP_LIST_LINEAR:
1828 gfc_expr *last_step_expr = NULL;
1829 tree last_step = NULL_TREE;
1831 for (; n != NULL; n = n->next)
1833 if (n->expr)
1835 last_step_expr = n->expr;
1836 last_step = NULL_TREE;
1838 if (n->sym->attr.referenced || declare_simd)
1840 tree t = gfc_trans_omp_variable (n->sym, declare_simd);
1841 if (t != error_mark_node)
1843 tree node = build_omp_clause (input_location,
1844 OMP_CLAUSE_LINEAR);
1845 OMP_CLAUSE_DECL (node) = t;
1846 if (last_step_expr && last_step == NULL_TREE)
1848 if (block == NULL)
1849 last_step
1850 = gfc_conv_constant_to_tree (last_step_expr);
1851 else
1853 gfc_init_se (&se, NULL);
1854 gfc_conv_expr (&se, last_step_expr);
1855 gfc_add_block_to_block (block, &se.pre);
1856 last_step = gfc_evaluate_now (se.expr, block);
1857 gfc_add_block_to_block (block, &se.post);
1860 OMP_CLAUSE_LINEAR_STEP (node)
1861 = fold_convert (gfc_typenode_for_spec (&n->sym->ts),
1862 last_step);
1863 if (n->sym->attr.dimension || n->sym->attr.allocatable)
1864 OMP_CLAUSE_LINEAR_ARRAY (node) = 1;
1865 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
1870 break;
1871 case OMP_LIST_DEPEND:
1872 for (; n != NULL; n = n->next)
1874 if (!n->sym->attr.referenced)
1875 continue;
1877 tree node = build_omp_clause (input_location, OMP_CLAUSE_DEPEND);
1878 if (n->expr == NULL || n->expr->ref->u.ar.type == AR_FULL)
1880 tree decl = gfc_get_symbol_decl (n->sym);
1881 if (gfc_omp_privatize_by_reference (decl))
1882 decl = build_fold_indirect_ref (decl);
1883 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
1885 decl = gfc_conv_descriptor_data_get (decl);
1886 decl = fold_convert (build_pointer_type (char_type_node),
1887 decl);
1888 decl = build_fold_indirect_ref (decl);
1890 else if (DECL_P (decl))
1891 TREE_ADDRESSABLE (decl) = 1;
1892 OMP_CLAUSE_DECL (node) = decl;
1894 else
1896 tree ptr;
1897 gfc_init_se (&se, NULL);
1898 if (n->expr->ref->u.ar.type == AR_ELEMENT)
1900 gfc_conv_expr_reference (&se, n->expr);
1901 ptr = se.expr;
1903 else
1905 gfc_conv_expr_descriptor (&se, n->expr);
1906 ptr = gfc_conv_array_data (se.expr);
1908 gfc_add_block_to_block (block, &se.pre);
1909 gfc_add_block_to_block (block, &se.post);
1910 ptr = fold_convert (build_pointer_type (char_type_node),
1911 ptr);
1912 OMP_CLAUSE_DECL (node) = build_fold_indirect_ref (ptr);
1914 switch (n->u.depend_op)
1916 case OMP_DEPEND_IN:
1917 OMP_CLAUSE_DEPEND_KIND (node) = OMP_CLAUSE_DEPEND_IN;
1918 break;
1919 case OMP_DEPEND_OUT:
1920 OMP_CLAUSE_DEPEND_KIND (node) = OMP_CLAUSE_DEPEND_OUT;
1921 break;
1922 case OMP_DEPEND_INOUT:
1923 OMP_CLAUSE_DEPEND_KIND (node) = OMP_CLAUSE_DEPEND_INOUT;
1924 break;
1925 default:
1926 gcc_unreachable ();
1928 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
1930 break;
1931 case OMP_LIST_MAP:
1932 for (; n != NULL; n = n->next)
1934 if (!n->sym->attr.referenced)
1935 continue;
1937 tree node = build_omp_clause (input_location, OMP_CLAUSE_MAP);
1938 tree node2 = NULL_TREE;
1939 tree node3 = NULL_TREE;
1940 tree node4 = NULL_TREE;
1941 tree decl = gfc_get_symbol_decl (n->sym);
1942 if (DECL_P (decl))
1943 TREE_ADDRESSABLE (decl) = 1;
1944 if (n->expr == NULL || n->expr->ref->u.ar.type == AR_FULL)
1946 if (POINTER_TYPE_P (TREE_TYPE (decl))
1947 && (gfc_omp_privatize_by_reference (decl)
1948 || GFC_DECL_GET_SCALAR_POINTER (decl)
1949 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl)
1950 || GFC_DECL_CRAY_POINTEE (decl)
1951 || GFC_DESCRIPTOR_TYPE_P
1952 (TREE_TYPE (TREE_TYPE (decl)))))
1954 tree orig_decl = decl;
1955 node4 = build_omp_clause (input_location,
1956 OMP_CLAUSE_MAP);
1957 OMP_CLAUSE_MAP_KIND (node4) = GOMP_MAP_POINTER;
1958 OMP_CLAUSE_DECL (node4) = decl;
1959 OMP_CLAUSE_SIZE (node4) = size_int (0);
1960 decl = build_fold_indirect_ref (decl);
1961 if (TREE_CODE (TREE_TYPE (orig_decl)) == REFERENCE_TYPE
1962 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl)
1963 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl)))
1965 node3 = build_omp_clause (input_location,
1966 OMP_CLAUSE_MAP);
1967 OMP_CLAUSE_MAP_KIND (node3) = GOMP_MAP_POINTER;
1968 OMP_CLAUSE_DECL (node3) = decl;
1969 OMP_CLAUSE_SIZE (node3) = size_int (0);
1970 decl = build_fold_indirect_ref (decl);
1973 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
1975 tree type = TREE_TYPE (decl);
1976 tree ptr = gfc_conv_descriptor_data_get (decl);
1977 ptr = fold_convert (build_pointer_type (char_type_node),
1978 ptr);
1979 ptr = build_fold_indirect_ref (ptr);
1980 OMP_CLAUSE_DECL (node) = ptr;
1981 node2 = build_omp_clause (input_location,
1982 OMP_CLAUSE_MAP);
1983 OMP_CLAUSE_MAP_KIND (node2) = GOMP_MAP_TO_PSET;
1984 OMP_CLAUSE_DECL (node2) = decl;
1985 OMP_CLAUSE_SIZE (node2) = TYPE_SIZE_UNIT (type);
1986 node3 = build_omp_clause (input_location,
1987 OMP_CLAUSE_MAP);
1988 OMP_CLAUSE_MAP_KIND (node3) = GOMP_MAP_POINTER;
1989 OMP_CLAUSE_DECL (node3)
1990 = gfc_conv_descriptor_data_get (decl);
1991 OMP_CLAUSE_SIZE (node3) = size_int (0);
1992 if (n->sym->attr.pointer)
1994 stmtblock_t cond_block;
1995 tree size
1996 = gfc_create_var (gfc_array_index_type, NULL);
1997 tree tem, then_b, else_b, zero, cond;
1999 gfc_init_block (&cond_block);
2001 = gfc_full_array_size (&cond_block, decl,
2002 GFC_TYPE_ARRAY_RANK (type));
2003 gfc_add_modify (&cond_block, size, tem);
2004 then_b = gfc_finish_block (&cond_block);
2005 gfc_init_block (&cond_block);
2006 zero = build_int_cst (gfc_array_index_type, 0);
2007 gfc_add_modify (&cond_block, size, zero);
2008 else_b = gfc_finish_block (&cond_block);
2009 tem = gfc_conv_descriptor_data_get (decl);
2010 tem = fold_convert (pvoid_type_node, tem);
2011 cond = fold_build2_loc (input_location, NE_EXPR,
2012 boolean_type_node,
2013 tem, null_pointer_node);
2014 gfc_add_expr_to_block (block,
2015 build3_loc (input_location,
2016 COND_EXPR,
2017 void_type_node,
2018 cond, then_b,
2019 else_b));
2020 OMP_CLAUSE_SIZE (node) = size;
2022 else
2023 OMP_CLAUSE_SIZE (node)
2024 = gfc_full_array_size (block, decl,
2025 GFC_TYPE_ARRAY_RANK (type));
2026 tree elemsz
2027 = TYPE_SIZE_UNIT (gfc_get_element_type (type));
2028 elemsz = fold_convert (gfc_array_index_type, elemsz);
2029 OMP_CLAUSE_SIZE (node)
2030 = fold_build2 (MULT_EXPR, gfc_array_index_type,
2031 OMP_CLAUSE_SIZE (node), elemsz);
2033 else
2034 OMP_CLAUSE_DECL (node) = decl;
2036 else
2038 tree ptr, ptr2;
2039 gfc_init_se (&se, NULL);
2040 if (n->expr->ref->u.ar.type == AR_ELEMENT)
2042 gfc_conv_expr_reference (&se, n->expr);
2043 gfc_add_block_to_block (block, &se.pre);
2044 ptr = se.expr;
2045 OMP_CLAUSE_SIZE (node)
2046 = TYPE_SIZE_UNIT (TREE_TYPE (ptr));
2048 else
2050 gfc_conv_expr_descriptor (&se, n->expr);
2051 ptr = gfc_conv_array_data (se.expr);
2052 tree type = TREE_TYPE (se.expr);
2053 gfc_add_block_to_block (block, &se.pre);
2054 OMP_CLAUSE_SIZE (node)
2055 = gfc_full_array_size (block, se.expr,
2056 GFC_TYPE_ARRAY_RANK (type));
2057 tree elemsz
2058 = TYPE_SIZE_UNIT (gfc_get_element_type (type));
2059 elemsz = fold_convert (gfc_array_index_type, elemsz);
2060 OMP_CLAUSE_SIZE (node)
2061 = fold_build2 (MULT_EXPR, gfc_array_index_type,
2062 OMP_CLAUSE_SIZE (node), elemsz);
2064 gfc_add_block_to_block (block, &se.post);
2065 ptr = fold_convert (build_pointer_type (char_type_node),
2066 ptr);
2067 OMP_CLAUSE_DECL (node) = build_fold_indirect_ref (ptr);
2069 if (POINTER_TYPE_P (TREE_TYPE (decl))
2070 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
2072 node4 = build_omp_clause (input_location,
2073 OMP_CLAUSE_MAP);
2074 OMP_CLAUSE_MAP_KIND (node4) = GOMP_MAP_POINTER;
2075 OMP_CLAUSE_DECL (node4) = decl;
2076 OMP_CLAUSE_SIZE (node4) = size_int (0);
2077 decl = build_fold_indirect_ref (decl);
2079 ptr = fold_convert (sizetype, ptr);
2080 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
2082 tree type = TREE_TYPE (decl);
2083 ptr2 = gfc_conv_descriptor_data_get (decl);
2084 node2 = build_omp_clause (input_location,
2085 OMP_CLAUSE_MAP);
2086 OMP_CLAUSE_MAP_KIND (node2) = GOMP_MAP_TO_PSET;
2087 OMP_CLAUSE_DECL (node2) = decl;
2088 OMP_CLAUSE_SIZE (node2) = TYPE_SIZE_UNIT (type);
2089 node3 = build_omp_clause (input_location,
2090 OMP_CLAUSE_MAP);
2091 OMP_CLAUSE_MAP_KIND (node3) = GOMP_MAP_POINTER;
2092 OMP_CLAUSE_DECL (node3)
2093 = gfc_conv_descriptor_data_get (decl);
2095 else
2097 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
2098 ptr2 = build_fold_addr_expr (decl);
2099 else
2101 gcc_assert (POINTER_TYPE_P (TREE_TYPE (decl)));
2102 ptr2 = decl;
2104 node3 = build_omp_clause (input_location,
2105 OMP_CLAUSE_MAP);
2106 OMP_CLAUSE_MAP_KIND (node3) = GOMP_MAP_POINTER;
2107 OMP_CLAUSE_DECL (node3) = decl;
2109 ptr2 = fold_convert (sizetype, ptr2);
2110 OMP_CLAUSE_SIZE (node3)
2111 = fold_build2 (MINUS_EXPR, sizetype, ptr, ptr2);
2113 switch (n->u.map_op)
2115 case OMP_MAP_ALLOC:
2116 OMP_CLAUSE_MAP_KIND (node) = GOMP_MAP_ALLOC;
2117 break;
2118 case OMP_MAP_TO:
2119 OMP_CLAUSE_MAP_KIND (node) = GOMP_MAP_TO;
2120 break;
2121 case OMP_MAP_FROM:
2122 OMP_CLAUSE_MAP_KIND (node) = GOMP_MAP_FROM;
2123 break;
2124 case OMP_MAP_TOFROM:
2125 OMP_CLAUSE_MAP_KIND (node) = GOMP_MAP_TOFROM;
2126 break;
2127 case OMP_MAP_FORCE_ALLOC:
2128 OMP_CLAUSE_MAP_KIND (node) = GOMP_MAP_FORCE_ALLOC;
2129 break;
2130 case OMP_MAP_FORCE_DEALLOC:
2131 OMP_CLAUSE_MAP_KIND (node) = GOMP_MAP_FORCE_DEALLOC;
2132 break;
2133 case OMP_MAP_FORCE_TO:
2134 OMP_CLAUSE_MAP_KIND (node) = GOMP_MAP_FORCE_TO;
2135 break;
2136 case OMP_MAP_FORCE_FROM:
2137 OMP_CLAUSE_MAP_KIND (node) = GOMP_MAP_FORCE_FROM;
2138 break;
2139 case OMP_MAP_FORCE_TOFROM:
2140 OMP_CLAUSE_MAP_KIND (node) = GOMP_MAP_FORCE_TOFROM;
2141 break;
2142 case OMP_MAP_FORCE_PRESENT:
2143 OMP_CLAUSE_MAP_KIND (node) = GOMP_MAP_FORCE_PRESENT;
2144 break;
2145 case OMP_MAP_FORCE_DEVICEPTR:
2146 OMP_CLAUSE_MAP_KIND (node) = GOMP_MAP_FORCE_DEVICEPTR;
2147 break;
2148 default:
2149 gcc_unreachable ();
2151 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
2152 if (node2)
2153 omp_clauses = gfc_trans_add_clause (node2, omp_clauses);
2154 if (node3)
2155 omp_clauses = gfc_trans_add_clause (node3, omp_clauses);
2156 if (node4)
2157 omp_clauses = gfc_trans_add_clause (node4, omp_clauses);
2159 break;
2160 case OMP_LIST_TO:
2161 case OMP_LIST_FROM:
2162 for (; n != NULL; n = n->next)
2164 if (!n->sym->attr.referenced)
2165 continue;
2167 tree node = build_omp_clause (input_location,
2168 list == OMP_LIST_TO
2169 ? OMP_CLAUSE_TO : OMP_CLAUSE_FROM);
2170 if (n->expr == NULL || n->expr->ref->u.ar.type == AR_FULL)
2172 tree decl = gfc_get_symbol_decl (n->sym);
2173 if (gfc_omp_privatize_by_reference (decl))
2174 decl = build_fold_indirect_ref (decl);
2175 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
2177 tree type = TREE_TYPE (decl);
2178 tree ptr = gfc_conv_descriptor_data_get (decl);
2179 ptr = fold_convert (build_pointer_type (char_type_node),
2180 ptr);
2181 ptr = build_fold_indirect_ref (ptr);
2182 OMP_CLAUSE_DECL (node) = ptr;
2183 OMP_CLAUSE_SIZE (node)
2184 = gfc_full_array_size (block, decl,
2185 GFC_TYPE_ARRAY_RANK (type));
2186 tree elemsz
2187 = TYPE_SIZE_UNIT (gfc_get_element_type (type));
2188 elemsz = fold_convert (gfc_array_index_type, elemsz);
2189 OMP_CLAUSE_SIZE (node)
2190 = fold_build2 (MULT_EXPR, gfc_array_index_type,
2191 OMP_CLAUSE_SIZE (node), elemsz);
2193 else
2194 OMP_CLAUSE_DECL (node) = decl;
2196 else
2198 tree ptr;
2199 gfc_init_se (&se, NULL);
2200 if (n->expr->ref->u.ar.type == AR_ELEMENT)
2202 gfc_conv_expr_reference (&se, n->expr);
2203 ptr = se.expr;
2204 gfc_add_block_to_block (block, &se.pre);
2205 OMP_CLAUSE_SIZE (node)
2206 = TYPE_SIZE_UNIT (TREE_TYPE (ptr));
2208 else
2210 gfc_conv_expr_descriptor (&se, n->expr);
2211 ptr = gfc_conv_array_data (se.expr);
2212 tree type = TREE_TYPE (se.expr);
2213 gfc_add_block_to_block (block, &se.pre);
2214 OMP_CLAUSE_SIZE (node)
2215 = gfc_full_array_size (block, se.expr,
2216 GFC_TYPE_ARRAY_RANK (type));
2217 tree elemsz
2218 = TYPE_SIZE_UNIT (gfc_get_element_type (type));
2219 elemsz = fold_convert (gfc_array_index_type, elemsz);
2220 OMP_CLAUSE_SIZE (node)
2221 = fold_build2 (MULT_EXPR, gfc_array_index_type,
2222 OMP_CLAUSE_SIZE (node), elemsz);
2224 gfc_add_block_to_block (block, &se.post);
2225 ptr = fold_convert (build_pointer_type (char_type_node),
2226 ptr);
2227 OMP_CLAUSE_DECL (node) = build_fold_indirect_ref (ptr);
2229 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
2231 break;
2232 default:
2233 break;
2237 if (clauses->if_expr)
2239 tree if_var;
2241 gfc_init_se (&se, NULL);
2242 gfc_conv_expr (&se, clauses->if_expr);
2243 gfc_add_block_to_block (block, &se.pre);
2244 if_var = gfc_evaluate_now (se.expr, block);
2245 gfc_add_block_to_block (block, &se.post);
2247 c = build_omp_clause (where.lb->location, OMP_CLAUSE_IF);
2248 OMP_CLAUSE_IF_EXPR (c) = if_var;
2249 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2252 if (clauses->final_expr)
2254 tree final_var;
2256 gfc_init_se (&se, NULL);
2257 gfc_conv_expr (&se, clauses->final_expr);
2258 gfc_add_block_to_block (block, &se.pre);
2259 final_var = gfc_evaluate_now (se.expr, block);
2260 gfc_add_block_to_block (block, &se.post);
2262 c = build_omp_clause (where.lb->location, OMP_CLAUSE_FINAL);
2263 OMP_CLAUSE_FINAL_EXPR (c) = final_var;
2264 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2267 if (clauses->num_threads)
2269 tree num_threads;
2271 gfc_init_se (&se, NULL);
2272 gfc_conv_expr (&se, clauses->num_threads);
2273 gfc_add_block_to_block (block, &se.pre);
2274 num_threads = gfc_evaluate_now (se.expr, block);
2275 gfc_add_block_to_block (block, &se.post);
2277 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NUM_THREADS);
2278 OMP_CLAUSE_NUM_THREADS_EXPR (c) = num_threads;
2279 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2282 chunk_size = NULL_TREE;
2283 if (clauses->chunk_size)
2285 gfc_init_se (&se, NULL);
2286 gfc_conv_expr (&se, clauses->chunk_size);
2287 gfc_add_block_to_block (block, &se.pre);
2288 chunk_size = gfc_evaluate_now (se.expr, block);
2289 gfc_add_block_to_block (block, &se.post);
2292 if (clauses->sched_kind != OMP_SCHED_NONE)
2294 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SCHEDULE);
2295 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = chunk_size;
2296 switch (clauses->sched_kind)
2298 case OMP_SCHED_STATIC:
2299 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_STATIC;
2300 break;
2301 case OMP_SCHED_DYNAMIC:
2302 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_DYNAMIC;
2303 break;
2304 case OMP_SCHED_GUIDED:
2305 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_GUIDED;
2306 break;
2307 case OMP_SCHED_RUNTIME:
2308 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_RUNTIME;
2309 break;
2310 case OMP_SCHED_AUTO:
2311 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_AUTO;
2312 break;
2313 default:
2314 gcc_unreachable ();
2316 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2319 if (clauses->default_sharing != OMP_DEFAULT_UNKNOWN)
2321 c = build_omp_clause (where.lb->location, OMP_CLAUSE_DEFAULT);
2322 switch (clauses->default_sharing)
2324 case OMP_DEFAULT_NONE:
2325 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_NONE;
2326 break;
2327 case OMP_DEFAULT_SHARED:
2328 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_SHARED;
2329 break;
2330 case OMP_DEFAULT_PRIVATE:
2331 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_PRIVATE;
2332 break;
2333 case OMP_DEFAULT_FIRSTPRIVATE:
2334 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
2335 break;
2336 default:
2337 gcc_unreachable ();
2339 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2342 if (clauses->nowait)
2344 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NOWAIT);
2345 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2348 if (clauses->ordered)
2350 c = build_omp_clause (where.lb->location, OMP_CLAUSE_ORDERED);
2351 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2354 if (clauses->untied)
2356 c = build_omp_clause (where.lb->location, OMP_CLAUSE_UNTIED);
2357 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2360 if (clauses->mergeable)
2362 c = build_omp_clause (where.lb->location, OMP_CLAUSE_MERGEABLE);
2363 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2366 if (clauses->collapse)
2368 c = build_omp_clause (where.lb->location, OMP_CLAUSE_COLLAPSE);
2369 OMP_CLAUSE_COLLAPSE_EXPR (c)
2370 = build_int_cst (integer_type_node, clauses->collapse);
2371 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2374 if (clauses->inbranch)
2376 c = build_omp_clause (where.lb->location, OMP_CLAUSE_INBRANCH);
2377 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2380 if (clauses->notinbranch)
2382 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NOTINBRANCH);
2383 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2386 switch (clauses->cancel)
2388 case OMP_CANCEL_UNKNOWN:
2389 break;
2390 case OMP_CANCEL_PARALLEL:
2391 c = build_omp_clause (where.lb->location, OMP_CLAUSE_PARALLEL);
2392 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2393 break;
2394 case OMP_CANCEL_SECTIONS:
2395 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SECTIONS);
2396 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2397 break;
2398 case OMP_CANCEL_DO:
2399 c = build_omp_clause (where.lb->location, OMP_CLAUSE_FOR);
2400 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2401 break;
2402 case OMP_CANCEL_TASKGROUP:
2403 c = build_omp_clause (where.lb->location, OMP_CLAUSE_TASKGROUP);
2404 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2405 break;
2408 if (clauses->proc_bind != OMP_PROC_BIND_UNKNOWN)
2410 c = build_omp_clause (where.lb->location, OMP_CLAUSE_PROC_BIND);
2411 switch (clauses->proc_bind)
2413 case OMP_PROC_BIND_MASTER:
2414 OMP_CLAUSE_PROC_BIND_KIND (c) = OMP_CLAUSE_PROC_BIND_MASTER;
2415 break;
2416 case OMP_PROC_BIND_SPREAD:
2417 OMP_CLAUSE_PROC_BIND_KIND (c) = OMP_CLAUSE_PROC_BIND_SPREAD;
2418 break;
2419 case OMP_PROC_BIND_CLOSE:
2420 OMP_CLAUSE_PROC_BIND_KIND (c) = OMP_CLAUSE_PROC_BIND_CLOSE;
2421 break;
2422 default:
2423 gcc_unreachable ();
2425 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2428 if (clauses->safelen_expr)
2430 tree safelen_var;
2432 gfc_init_se (&se, NULL);
2433 gfc_conv_expr (&se, clauses->safelen_expr);
2434 gfc_add_block_to_block (block, &se.pre);
2435 safelen_var = gfc_evaluate_now (se.expr, block);
2436 gfc_add_block_to_block (block, &se.post);
2438 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SAFELEN);
2439 OMP_CLAUSE_SAFELEN_EXPR (c) = safelen_var;
2440 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2443 if (clauses->simdlen_expr)
2445 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SIMDLEN);
2446 OMP_CLAUSE_SIMDLEN_EXPR (c)
2447 = gfc_conv_constant_to_tree (clauses->simdlen_expr);
2448 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2451 if (clauses->num_teams)
2453 tree num_teams;
2455 gfc_init_se (&se, NULL);
2456 gfc_conv_expr (&se, clauses->num_teams);
2457 gfc_add_block_to_block (block, &se.pre);
2458 num_teams = gfc_evaluate_now (se.expr, block);
2459 gfc_add_block_to_block (block, &se.post);
2461 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NUM_TEAMS);
2462 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
2463 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2466 if (clauses->device)
2468 tree device;
2470 gfc_init_se (&se, NULL);
2471 gfc_conv_expr (&se, clauses->device);
2472 gfc_add_block_to_block (block, &se.pre);
2473 device = gfc_evaluate_now (se.expr, block);
2474 gfc_add_block_to_block (block, &se.post);
2476 c = build_omp_clause (where.lb->location, OMP_CLAUSE_DEVICE);
2477 OMP_CLAUSE_DEVICE_ID (c) = device;
2478 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2481 if (clauses->thread_limit)
2483 tree thread_limit;
2485 gfc_init_se (&se, NULL);
2486 gfc_conv_expr (&se, clauses->thread_limit);
2487 gfc_add_block_to_block (block, &se.pre);
2488 thread_limit = gfc_evaluate_now (se.expr, block);
2489 gfc_add_block_to_block (block, &se.post);
2491 c = build_omp_clause (where.lb->location, OMP_CLAUSE_THREAD_LIMIT);
2492 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
2493 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2496 chunk_size = NULL_TREE;
2497 if (clauses->dist_chunk_size)
2499 gfc_init_se (&se, NULL);
2500 gfc_conv_expr (&se, clauses->dist_chunk_size);
2501 gfc_add_block_to_block (block, &se.pre);
2502 chunk_size = gfc_evaluate_now (se.expr, block);
2503 gfc_add_block_to_block (block, &se.post);
2506 if (clauses->dist_sched_kind != OMP_SCHED_NONE)
2508 c = build_omp_clause (where.lb->location, OMP_CLAUSE_DIST_SCHEDULE);
2509 OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c) = chunk_size;
2510 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2513 if (clauses->async)
2515 c = build_omp_clause (where.lb->location, OMP_CLAUSE_ASYNC);
2516 if (clauses->async_expr)
2517 OMP_CLAUSE_ASYNC_EXPR (c)
2518 = gfc_convert_expr_to_tree (block, clauses->async_expr);
2519 else
2520 OMP_CLAUSE_ASYNC_EXPR (c) = NULL;
2521 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2523 if (clauses->seq)
2525 c = build_omp_clause (where.lb->location, OMP_CLAUSE_ORDERED);
2526 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2528 if (clauses->independent)
2530 c = build_omp_clause (where.lb->location, OMP_CLAUSE_INDEPENDENT);
2531 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2533 if (clauses->wait_list)
2535 gfc_expr_list *el;
2537 for (el = clauses->wait_list; el; el = el->next)
2539 c = build_omp_clause (where.lb->location, OMP_CLAUSE_WAIT);
2540 OMP_CLAUSE_DECL (c) = gfc_convert_expr_to_tree (block, el->expr);
2541 OMP_CLAUSE_CHAIN (c) = omp_clauses;
2542 omp_clauses = c;
2545 if (clauses->num_gangs_expr)
2547 tree num_gangs_var
2548 = gfc_convert_expr_to_tree (block, clauses->num_gangs_expr);
2549 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NUM_GANGS);
2550 OMP_CLAUSE_NUM_GANGS_EXPR (c) = num_gangs_var;
2551 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2553 if (clauses->num_workers_expr)
2555 tree num_workers_var
2556 = gfc_convert_expr_to_tree (block, clauses->num_workers_expr);
2557 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NUM_WORKERS);
2558 OMP_CLAUSE_NUM_WORKERS_EXPR (c) = num_workers_var;
2559 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2561 if (clauses->vector_length_expr)
2563 tree vector_length_var
2564 = gfc_convert_expr_to_tree (block, clauses->vector_length_expr);
2565 c = build_omp_clause (where.lb->location, OMP_CLAUSE_VECTOR_LENGTH);
2566 OMP_CLAUSE_VECTOR_LENGTH_EXPR (c) = vector_length_var;
2567 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2569 if (clauses->vector)
2571 if (clauses->vector_expr)
2573 tree vector_var
2574 = gfc_convert_expr_to_tree (block, clauses->vector_expr);
2575 c = build_omp_clause (where.lb->location, OMP_CLAUSE_VECTOR);
2576 OMP_CLAUSE_VECTOR_EXPR (c) = vector_var;
2577 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2579 else
2581 c = build_omp_clause (where.lb->location, OMP_CLAUSE_VECTOR);
2582 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2585 if (clauses->worker)
2587 if (clauses->worker_expr)
2589 tree worker_var
2590 = gfc_convert_expr_to_tree (block, clauses->worker_expr);
2591 c = build_omp_clause (where.lb->location, OMP_CLAUSE_WORKER);
2592 OMP_CLAUSE_WORKER_EXPR (c) = worker_var;
2593 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2595 else
2597 c = build_omp_clause (where.lb->location, OMP_CLAUSE_WORKER);
2598 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2601 if (clauses->gang)
2603 if (clauses->gang_expr)
2605 tree gang_var
2606 = gfc_convert_expr_to_tree (block, clauses->gang_expr);
2607 c = build_omp_clause (where.lb->location, OMP_CLAUSE_GANG);
2608 OMP_CLAUSE_GANG_EXPR (c) = gang_var;
2609 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2611 else
2613 c = build_omp_clause (where.lb->location, OMP_CLAUSE_GANG);
2614 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2618 return nreverse (omp_clauses);
2621 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
2623 static tree
2624 gfc_trans_omp_code (gfc_code *code, bool force_empty)
2626 tree stmt;
2628 pushlevel ();
2629 stmt = gfc_trans_code (code);
2630 if (TREE_CODE (stmt) != BIND_EXPR)
2632 if (!IS_EMPTY_STMT (stmt) || force_empty)
2634 tree block = poplevel (1, 0);
2635 stmt = build3_v (BIND_EXPR, NULL, stmt, block);
2637 else
2638 poplevel (0, 0);
2640 else
2641 poplevel (0, 0);
2642 return stmt;
2645 /* Trans OpenACC directives. */
2646 /* parallel, kernels, data and host_data. */
2647 static tree
2648 gfc_trans_oacc_construct (gfc_code *code)
2650 stmtblock_t block;
2651 tree stmt, oacc_clauses;
2652 enum tree_code construct_code;
2654 switch (code->op)
2656 case EXEC_OACC_PARALLEL:
2657 construct_code = OACC_PARALLEL;
2658 break;
2659 case EXEC_OACC_KERNELS:
2660 construct_code = OACC_KERNELS;
2661 break;
2662 case EXEC_OACC_DATA:
2663 construct_code = OACC_DATA;
2664 break;
2665 case EXEC_OACC_HOST_DATA:
2666 construct_code = OACC_HOST_DATA;
2667 break;
2668 default:
2669 gcc_unreachable ();
2672 gfc_start_block (&block);
2673 oacc_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
2674 code->loc);
2675 stmt = gfc_trans_omp_code (code->block->next, true);
2676 stmt = build2_loc (input_location, construct_code, void_type_node, stmt,
2677 oacc_clauses);
2678 gfc_add_expr_to_block (&block, stmt);
2679 return gfc_finish_block (&block);
2682 /* update, enter_data, exit_data, cache. */
2683 static tree
2684 gfc_trans_oacc_executable_directive (gfc_code *code)
2686 stmtblock_t block;
2687 tree stmt, oacc_clauses;
2688 enum tree_code construct_code;
2690 switch (code->op)
2692 case EXEC_OACC_UPDATE:
2693 construct_code = OACC_UPDATE;
2694 break;
2695 case EXEC_OACC_ENTER_DATA:
2696 construct_code = OACC_ENTER_DATA;
2697 break;
2698 case EXEC_OACC_EXIT_DATA:
2699 construct_code = OACC_EXIT_DATA;
2700 break;
2701 case EXEC_OACC_CACHE:
2702 construct_code = OACC_CACHE;
2703 break;
2704 default:
2705 gcc_unreachable ();
2708 gfc_start_block (&block);
2709 oacc_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
2710 code->loc);
2711 stmt = build1_loc (input_location, construct_code, void_type_node,
2712 oacc_clauses);
2713 gfc_add_expr_to_block (&block, stmt);
2714 return gfc_finish_block (&block);
2717 static tree
2718 gfc_trans_oacc_wait_directive (gfc_code *code)
2720 stmtblock_t block;
2721 tree stmt, t;
2722 vec<tree, va_gc> *args;
2723 int nparms = 0;
2724 gfc_expr_list *el;
2725 gfc_omp_clauses *clauses = code->ext.omp_clauses;
2726 location_t loc = input_location;
2728 for (el = clauses->wait_list; el; el = el->next)
2729 nparms++;
2731 vec_alloc (args, nparms + 2);
2732 stmt = builtin_decl_explicit (BUILT_IN_GOACC_WAIT);
2734 gfc_start_block (&block);
2736 if (clauses->async_expr)
2737 t = gfc_convert_expr_to_tree (&block, clauses->async_expr);
2738 else
2739 t = build_int_cst (integer_type_node, -2);
2741 args->quick_push (t);
2742 args->quick_push (build_int_cst (integer_type_node, nparms));
2744 for (el = clauses->wait_list; el; el = el->next)
2745 args->quick_push (gfc_convert_expr_to_tree (&block, el->expr));
2747 stmt = build_call_expr_loc_vec (loc, stmt, args);
2748 gfc_add_expr_to_block (&block, stmt);
2750 vec_free (args);
2752 return gfc_finish_block (&block);
2755 static tree gfc_trans_omp_sections (gfc_code *, gfc_omp_clauses *);
2756 static tree gfc_trans_omp_workshare (gfc_code *, gfc_omp_clauses *);
2758 static tree
2759 gfc_trans_omp_atomic (gfc_code *code)
2761 gfc_code *atomic_code = code;
2762 gfc_se lse;
2763 gfc_se rse;
2764 gfc_se vse;
2765 gfc_expr *expr2, *e;
2766 gfc_symbol *var;
2767 stmtblock_t block;
2768 tree lhsaddr, type, rhs, x;
2769 enum tree_code op = ERROR_MARK;
2770 enum tree_code aop = OMP_ATOMIC;
2771 bool var_on_left = false;
2772 bool seq_cst = (atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_SEQ_CST) != 0;
2774 code = code->block->next;
2775 gcc_assert (code->op == EXEC_ASSIGN);
2776 var = code->expr1->symtree->n.sym;
2778 gfc_init_se (&lse, NULL);
2779 gfc_init_se (&rse, NULL);
2780 gfc_init_se (&vse, NULL);
2781 gfc_start_block (&block);
2783 expr2 = code->expr2;
2784 if (expr2->expr_type == EXPR_FUNCTION
2785 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
2786 expr2 = expr2->value.function.actual->expr;
2788 switch (atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_MASK)
2790 case GFC_OMP_ATOMIC_READ:
2791 gfc_conv_expr (&vse, code->expr1);
2792 gfc_add_block_to_block (&block, &vse.pre);
2794 gfc_conv_expr (&lse, expr2);
2795 gfc_add_block_to_block (&block, &lse.pre);
2796 type = TREE_TYPE (lse.expr);
2797 lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
2799 x = build1 (OMP_ATOMIC_READ, type, lhsaddr);
2800 OMP_ATOMIC_SEQ_CST (x) = seq_cst;
2801 x = convert (TREE_TYPE (vse.expr), x);
2802 gfc_add_modify (&block, vse.expr, x);
2804 gfc_add_block_to_block (&block, &lse.pre);
2805 gfc_add_block_to_block (&block, &rse.pre);
2807 return gfc_finish_block (&block);
2808 case GFC_OMP_ATOMIC_CAPTURE:
2809 aop = OMP_ATOMIC_CAPTURE_NEW;
2810 if (expr2->expr_type == EXPR_VARIABLE)
2812 aop = OMP_ATOMIC_CAPTURE_OLD;
2813 gfc_conv_expr (&vse, code->expr1);
2814 gfc_add_block_to_block (&block, &vse.pre);
2816 gfc_conv_expr (&lse, expr2);
2817 gfc_add_block_to_block (&block, &lse.pre);
2818 gfc_init_se (&lse, NULL);
2819 code = code->next;
2820 var = code->expr1->symtree->n.sym;
2821 expr2 = code->expr2;
2822 if (expr2->expr_type == EXPR_FUNCTION
2823 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
2824 expr2 = expr2->value.function.actual->expr;
2826 break;
2827 default:
2828 break;
2831 gfc_conv_expr (&lse, code->expr1);
2832 gfc_add_block_to_block (&block, &lse.pre);
2833 type = TREE_TYPE (lse.expr);
2834 lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
2836 if (((atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_MASK)
2837 == GFC_OMP_ATOMIC_WRITE)
2838 || (atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_SWAP))
2840 gfc_conv_expr (&rse, expr2);
2841 gfc_add_block_to_block (&block, &rse.pre);
2843 else if (expr2->expr_type == EXPR_OP)
2845 gfc_expr *e;
2846 switch (expr2->value.op.op)
2848 case INTRINSIC_PLUS:
2849 op = PLUS_EXPR;
2850 break;
2851 case INTRINSIC_TIMES:
2852 op = MULT_EXPR;
2853 break;
2854 case INTRINSIC_MINUS:
2855 op = MINUS_EXPR;
2856 break;
2857 case INTRINSIC_DIVIDE:
2858 if (expr2->ts.type == BT_INTEGER)
2859 op = TRUNC_DIV_EXPR;
2860 else
2861 op = RDIV_EXPR;
2862 break;
2863 case INTRINSIC_AND:
2864 op = TRUTH_ANDIF_EXPR;
2865 break;
2866 case INTRINSIC_OR:
2867 op = TRUTH_ORIF_EXPR;
2868 break;
2869 case INTRINSIC_EQV:
2870 op = EQ_EXPR;
2871 break;
2872 case INTRINSIC_NEQV:
2873 op = NE_EXPR;
2874 break;
2875 default:
2876 gcc_unreachable ();
2878 e = expr2->value.op.op1;
2879 if (e->expr_type == EXPR_FUNCTION
2880 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
2881 e = e->value.function.actual->expr;
2882 if (e->expr_type == EXPR_VARIABLE
2883 && e->symtree != NULL
2884 && e->symtree->n.sym == var)
2886 expr2 = expr2->value.op.op2;
2887 var_on_left = true;
2889 else
2891 e = expr2->value.op.op2;
2892 if (e->expr_type == EXPR_FUNCTION
2893 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
2894 e = e->value.function.actual->expr;
2895 gcc_assert (e->expr_type == EXPR_VARIABLE
2896 && e->symtree != NULL
2897 && e->symtree->n.sym == var);
2898 expr2 = expr2->value.op.op1;
2899 var_on_left = false;
2901 gfc_conv_expr (&rse, expr2);
2902 gfc_add_block_to_block (&block, &rse.pre);
2904 else
2906 gcc_assert (expr2->expr_type == EXPR_FUNCTION);
2907 switch (expr2->value.function.isym->id)
2909 case GFC_ISYM_MIN:
2910 op = MIN_EXPR;
2911 break;
2912 case GFC_ISYM_MAX:
2913 op = MAX_EXPR;
2914 break;
2915 case GFC_ISYM_IAND:
2916 op = BIT_AND_EXPR;
2917 break;
2918 case GFC_ISYM_IOR:
2919 op = BIT_IOR_EXPR;
2920 break;
2921 case GFC_ISYM_IEOR:
2922 op = BIT_XOR_EXPR;
2923 break;
2924 default:
2925 gcc_unreachable ();
2927 e = expr2->value.function.actual->expr;
2928 gcc_assert (e->expr_type == EXPR_VARIABLE
2929 && e->symtree != NULL
2930 && e->symtree->n.sym == var);
2932 gfc_conv_expr (&rse, expr2->value.function.actual->next->expr);
2933 gfc_add_block_to_block (&block, &rse.pre);
2934 if (expr2->value.function.actual->next->next != NULL)
2936 tree accum = gfc_create_var (TREE_TYPE (rse.expr), NULL);
2937 gfc_actual_arglist *arg;
2939 gfc_add_modify (&block, accum, rse.expr);
2940 for (arg = expr2->value.function.actual->next->next; arg;
2941 arg = arg->next)
2943 gfc_init_block (&rse.pre);
2944 gfc_conv_expr (&rse, arg->expr);
2945 gfc_add_block_to_block (&block, &rse.pre);
2946 x = fold_build2_loc (input_location, op, TREE_TYPE (accum),
2947 accum, rse.expr);
2948 gfc_add_modify (&block, accum, x);
2951 rse.expr = accum;
2954 expr2 = expr2->value.function.actual->next->expr;
2957 lhsaddr = save_expr (lhsaddr);
2958 if (TREE_CODE (lhsaddr) != SAVE_EXPR
2959 && (TREE_CODE (lhsaddr) != ADDR_EXPR
2960 || TREE_CODE (TREE_OPERAND (lhsaddr, 0)) != VAR_DECL))
2962 /* Make sure LHS is simple enough so that goa_lhs_expr_p can recognize
2963 it even after unsharing function body. */
2964 tree var = create_tmp_var_raw (TREE_TYPE (lhsaddr));
2965 DECL_CONTEXT (var) = current_function_decl;
2966 lhsaddr = build4 (TARGET_EXPR, TREE_TYPE (lhsaddr), var, lhsaddr,
2967 NULL_TREE, NULL_TREE);
2970 rhs = gfc_evaluate_now (rse.expr, &block);
2972 if (((atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_MASK)
2973 == GFC_OMP_ATOMIC_WRITE)
2974 || (atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_SWAP))
2975 x = rhs;
2976 else
2978 x = convert (TREE_TYPE (rhs),
2979 build_fold_indirect_ref_loc (input_location, lhsaddr));
2980 if (var_on_left)
2981 x = fold_build2_loc (input_location, op, TREE_TYPE (rhs), x, rhs);
2982 else
2983 x = fold_build2_loc (input_location, op, TREE_TYPE (rhs), rhs, x);
2986 if (TREE_CODE (TREE_TYPE (rhs)) == COMPLEX_TYPE
2987 && TREE_CODE (type) != COMPLEX_TYPE)
2988 x = fold_build1_loc (input_location, REALPART_EXPR,
2989 TREE_TYPE (TREE_TYPE (rhs)), x);
2991 gfc_add_block_to_block (&block, &lse.pre);
2992 gfc_add_block_to_block (&block, &rse.pre);
2994 if (aop == OMP_ATOMIC)
2996 x = build2_v (OMP_ATOMIC, lhsaddr, convert (type, x));
2997 OMP_ATOMIC_SEQ_CST (x) = seq_cst;
2998 gfc_add_expr_to_block (&block, x);
3000 else
3002 if (aop == OMP_ATOMIC_CAPTURE_NEW)
3004 code = code->next;
3005 expr2 = code->expr2;
3006 if (expr2->expr_type == EXPR_FUNCTION
3007 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
3008 expr2 = expr2->value.function.actual->expr;
3010 gcc_assert (expr2->expr_type == EXPR_VARIABLE);
3011 gfc_conv_expr (&vse, code->expr1);
3012 gfc_add_block_to_block (&block, &vse.pre);
3014 gfc_init_se (&lse, NULL);
3015 gfc_conv_expr (&lse, expr2);
3016 gfc_add_block_to_block (&block, &lse.pre);
3018 x = build2 (aop, type, lhsaddr, convert (type, x));
3019 OMP_ATOMIC_SEQ_CST (x) = seq_cst;
3020 x = convert (TREE_TYPE (vse.expr), x);
3021 gfc_add_modify (&block, vse.expr, x);
3024 return gfc_finish_block (&block);
3027 static tree
3028 gfc_trans_omp_barrier (void)
3030 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER);
3031 return build_call_expr_loc (input_location, decl, 0);
3034 static tree
3035 gfc_trans_omp_cancel (gfc_code *code)
3037 int mask = 0;
3038 tree ifc = boolean_true_node;
3039 stmtblock_t block;
3040 switch (code->ext.omp_clauses->cancel)
3042 case OMP_CANCEL_PARALLEL: mask = 1; break;
3043 case OMP_CANCEL_DO: mask = 2; break;
3044 case OMP_CANCEL_SECTIONS: mask = 4; break;
3045 case OMP_CANCEL_TASKGROUP: mask = 8; break;
3046 default: gcc_unreachable ();
3048 gfc_start_block (&block);
3049 if (code->ext.omp_clauses->if_expr)
3051 gfc_se se;
3052 tree if_var;
3054 gfc_init_se (&se, NULL);
3055 gfc_conv_expr (&se, code->ext.omp_clauses->if_expr);
3056 gfc_add_block_to_block (&block, &se.pre);
3057 if_var = gfc_evaluate_now (se.expr, &block);
3058 gfc_add_block_to_block (&block, &se.post);
3059 tree type = TREE_TYPE (if_var);
3060 ifc = fold_build2_loc (input_location, NE_EXPR,
3061 boolean_type_node, if_var,
3062 build_zero_cst (type));
3064 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
3065 tree c_bool_type = TREE_TYPE (TREE_TYPE (decl));
3066 ifc = fold_convert (c_bool_type, ifc);
3067 gfc_add_expr_to_block (&block,
3068 build_call_expr_loc (input_location, decl, 2,
3069 build_int_cst (integer_type_node,
3070 mask), ifc));
3071 return gfc_finish_block (&block);
3074 static tree
3075 gfc_trans_omp_cancellation_point (gfc_code *code)
3077 int mask = 0;
3078 switch (code->ext.omp_clauses->cancel)
3080 case OMP_CANCEL_PARALLEL: mask = 1; break;
3081 case OMP_CANCEL_DO: mask = 2; break;
3082 case OMP_CANCEL_SECTIONS: mask = 4; break;
3083 case OMP_CANCEL_TASKGROUP: mask = 8; break;
3084 default: gcc_unreachable ();
3086 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_CANCELLATION_POINT);
3087 return build_call_expr_loc (input_location, decl, 1,
3088 build_int_cst (integer_type_node, mask));
3091 static tree
3092 gfc_trans_omp_critical (gfc_code *code)
3094 tree name = NULL_TREE, stmt;
3095 if (code->ext.omp_name != NULL)
3096 name = get_identifier (code->ext.omp_name);
3097 stmt = gfc_trans_code (code->block->next);
3098 return build2_loc (input_location, OMP_CRITICAL, void_type_node, stmt, name);
3101 typedef struct dovar_init_d {
3102 tree var;
3103 tree init;
3104 } dovar_init;
3107 static tree
3108 gfc_trans_omp_do (gfc_code *code, gfc_exec_op op, stmtblock_t *pblock,
3109 gfc_omp_clauses *do_clauses, tree par_clauses)
3111 gfc_se se;
3112 tree dovar, stmt, from, to, step, type, init, cond, incr;
3113 tree count = NULL_TREE, cycle_label, tmp, omp_clauses;
3114 stmtblock_t block;
3115 stmtblock_t body;
3116 gfc_omp_clauses *clauses = code->ext.omp_clauses;
3117 int i, collapse = clauses->collapse;
3118 vec<dovar_init> inits = vNULL;
3119 dovar_init *di;
3120 unsigned ix;
3122 if (collapse <= 0)
3123 collapse = 1;
3125 code = code->block->next;
3126 gcc_assert (code->op == EXEC_DO);
3128 init = make_tree_vec (collapse);
3129 cond = make_tree_vec (collapse);
3130 incr = make_tree_vec (collapse);
3132 if (pblock == NULL)
3134 gfc_start_block (&block);
3135 pblock = &block;
3138 omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc);
3140 for (i = 0; i < collapse; i++)
3142 int simple = 0;
3143 int dovar_found = 0;
3144 tree dovar_decl;
3146 if (clauses)
3148 gfc_omp_namelist *n = NULL;
3149 if (op != EXEC_OMP_DISTRIBUTE)
3150 for (n = clauses->lists[(op == EXEC_OMP_SIMD && collapse == 1)
3151 ? OMP_LIST_LINEAR : OMP_LIST_LASTPRIVATE];
3152 n != NULL; n = n->next)
3153 if (code->ext.iterator->var->symtree->n.sym == n->sym)
3154 break;
3155 if (n != NULL)
3156 dovar_found = 1;
3157 else if (n == NULL && op != EXEC_OMP_SIMD)
3158 for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next)
3159 if (code->ext.iterator->var->symtree->n.sym == n->sym)
3160 break;
3161 if (n != NULL)
3162 dovar_found++;
3165 /* Evaluate all the expressions in the iterator. */
3166 gfc_init_se (&se, NULL);
3167 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
3168 gfc_add_block_to_block (pblock, &se.pre);
3169 dovar = se.expr;
3170 type = TREE_TYPE (dovar);
3171 gcc_assert (TREE_CODE (type) == INTEGER_TYPE);
3173 gfc_init_se (&se, NULL);
3174 gfc_conv_expr_val (&se, code->ext.iterator->start);
3175 gfc_add_block_to_block (pblock, &se.pre);
3176 from = gfc_evaluate_now (se.expr, pblock);
3178 gfc_init_se (&se, NULL);
3179 gfc_conv_expr_val (&se, code->ext.iterator->end);
3180 gfc_add_block_to_block (pblock, &se.pre);
3181 to = gfc_evaluate_now (se.expr, pblock);
3183 gfc_init_se (&se, NULL);
3184 gfc_conv_expr_val (&se, code->ext.iterator->step);
3185 gfc_add_block_to_block (pblock, &se.pre);
3186 step = gfc_evaluate_now (se.expr, pblock);
3187 dovar_decl = dovar;
3189 /* Special case simple loops. */
3190 if (TREE_CODE (dovar) == VAR_DECL)
3192 if (integer_onep (step))
3193 simple = 1;
3194 else if (tree_int_cst_equal (step, integer_minus_one_node))
3195 simple = -1;
3197 else
3198 dovar_decl
3199 = gfc_trans_omp_variable (code->ext.iterator->var->symtree->n.sym,
3200 false);
3202 /* Loop body. */
3203 if (simple)
3205 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, dovar, from);
3206 /* The condition should not be folded. */
3207 TREE_VEC_ELT (cond, i) = build2_loc (input_location, simple > 0
3208 ? LE_EXPR : GE_EXPR,
3209 boolean_type_node, dovar, to);
3210 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location, PLUS_EXPR,
3211 type, dovar, step);
3212 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location,
3213 MODIFY_EXPR,
3214 type, dovar,
3215 TREE_VEC_ELT (incr, i));
3217 else
3219 /* STEP is not 1 or -1. Use:
3220 for (count = 0; count < (to + step - from) / step; count++)
3222 dovar = from + count * step;
3223 body;
3224 cycle_label:;
3225 } */
3226 tmp = fold_build2_loc (input_location, MINUS_EXPR, type, step, from);
3227 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, to, tmp);
3228 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR, type, tmp,
3229 step);
3230 tmp = gfc_evaluate_now (tmp, pblock);
3231 count = gfc_create_var (type, "count");
3232 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, count,
3233 build_int_cst (type, 0));
3234 /* The condition should not be folded. */
3235 TREE_VEC_ELT (cond, i) = build2_loc (input_location, LT_EXPR,
3236 boolean_type_node,
3237 count, tmp);
3238 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location, PLUS_EXPR,
3239 type, count,
3240 build_int_cst (type, 1));
3241 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location,
3242 MODIFY_EXPR, type, count,
3243 TREE_VEC_ELT (incr, i));
3245 /* Initialize DOVAR. */
3246 tmp = fold_build2_loc (input_location, MULT_EXPR, type, count, step);
3247 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, from, tmp);
3248 dovar_init e = {dovar, tmp};
3249 inits.safe_push (e);
3252 if (!dovar_found)
3254 if (op == EXEC_OMP_SIMD)
3256 if (collapse == 1)
3258 tmp = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
3259 OMP_CLAUSE_LINEAR_STEP (tmp) = step;
3261 else
3262 tmp = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
3263 if (!simple)
3264 dovar_found = 2;
3266 else
3267 tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
3268 OMP_CLAUSE_DECL (tmp) = dovar_decl;
3269 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
3271 if (dovar_found == 2)
3273 tree c = NULL;
3275 tmp = NULL;
3276 if (!simple)
3278 /* If dovar is lastprivate, but different counter is used,
3279 dovar += step needs to be added to
3280 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
3281 will have the value on entry of the last loop, rather
3282 than value after iterator increment. */
3283 tmp = gfc_evaluate_now (step, pblock);
3284 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, dovar,
3285 tmp);
3286 tmp = fold_build2_loc (input_location, MODIFY_EXPR, type,
3287 dovar, tmp);
3288 for (c = omp_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3289 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
3290 && OMP_CLAUSE_DECL (c) == dovar_decl)
3292 OMP_CLAUSE_LASTPRIVATE_STMT (c) = tmp;
3293 break;
3295 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
3296 && OMP_CLAUSE_DECL (c) == dovar_decl)
3298 OMP_CLAUSE_LINEAR_STMT (c) = tmp;
3299 break;
3302 if (c == NULL && op == EXEC_OMP_DO && par_clauses != NULL)
3304 for (c = par_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3305 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
3306 && OMP_CLAUSE_DECL (c) == dovar_decl)
3308 tree l = build_omp_clause (input_location,
3309 OMP_CLAUSE_LASTPRIVATE);
3310 OMP_CLAUSE_DECL (l) = dovar_decl;
3311 OMP_CLAUSE_CHAIN (l) = omp_clauses;
3312 OMP_CLAUSE_LASTPRIVATE_STMT (l) = tmp;
3313 omp_clauses = l;
3314 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_SHARED);
3315 break;
3318 gcc_assert (simple || c != NULL);
3320 if (!simple)
3322 if (op != EXEC_OMP_SIMD)
3323 tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
3324 else if (collapse == 1)
3326 tmp = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
3327 OMP_CLAUSE_LINEAR_STEP (tmp) = step;
3328 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp) = 1;
3329 OMP_CLAUSE_LINEAR_NO_COPYOUT (tmp) = 1;
3331 else
3332 tmp = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
3333 OMP_CLAUSE_DECL (tmp) = count;
3334 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
3337 if (i + 1 < collapse)
3338 code = code->block->next;
3341 if (pblock != &block)
3343 pushlevel ();
3344 gfc_start_block (&block);
3347 gfc_start_block (&body);
3349 FOR_EACH_VEC_ELT (inits, ix, di)
3350 gfc_add_modify (&body, di->var, di->init);
3351 inits.release ();
3353 /* Cycle statement is implemented with a goto. Exit statement must not be
3354 present for this loop. */
3355 cycle_label = gfc_build_label_decl (NULL_TREE);
3357 /* Put these labels where they can be found later. */
3359 code->cycle_label = cycle_label;
3360 code->exit_label = NULL_TREE;
3362 /* Main loop body. */
3363 tmp = gfc_trans_omp_code (code->block->next, true);
3364 gfc_add_expr_to_block (&body, tmp);
3366 /* Label for cycle statements (if needed). */
3367 if (TREE_USED (cycle_label))
3369 tmp = build1_v (LABEL_EXPR, cycle_label);
3370 gfc_add_expr_to_block (&body, tmp);
3373 /* End of loop body. */
3374 switch (op)
3376 case EXEC_OMP_SIMD: stmt = make_node (OMP_SIMD); break;
3377 case EXEC_OMP_DO: stmt = make_node (OMP_FOR); break;
3378 case EXEC_OMP_DISTRIBUTE: stmt = make_node (OMP_DISTRIBUTE); break;
3379 case EXEC_OACC_LOOP: stmt = make_node (OACC_LOOP); break;
3380 default: gcc_unreachable ();
3383 TREE_TYPE (stmt) = void_type_node;
3384 OMP_FOR_BODY (stmt) = gfc_finish_block (&body);
3385 OMP_FOR_CLAUSES (stmt) = omp_clauses;
3386 OMP_FOR_INIT (stmt) = init;
3387 OMP_FOR_COND (stmt) = cond;
3388 OMP_FOR_INCR (stmt) = incr;
3389 gfc_add_expr_to_block (&block, stmt);
3391 return gfc_finish_block (&block);
3394 /* parallel loop and kernels loop. */
3395 static tree
3396 gfc_trans_oacc_combined_directive (gfc_code *code)
3398 stmtblock_t block, *pblock = NULL;
3399 gfc_omp_clauses construct_clauses, loop_clauses;
3400 tree stmt, oacc_clauses = NULL_TREE;
3401 enum tree_code construct_code;
3403 switch (code->op)
3405 case EXEC_OACC_PARALLEL_LOOP:
3406 construct_code = OACC_PARALLEL;
3407 break;
3408 case EXEC_OACC_KERNELS_LOOP:
3409 construct_code = OACC_KERNELS;
3410 break;
3411 default:
3412 gcc_unreachable ();
3415 gfc_start_block (&block);
3417 memset (&loop_clauses, 0, sizeof (loop_clauses));
3418 if (code->ext.omp_clauses != NULL)
3420 memcpy (&construct_clauses, code->ext.omp_clauses,
3421 sizeof (construct_clauses));
3422 loop_clauses.collapse = construct_clauses.collapse;
3423 loop_clauses.gang = construct_clauses.gang;
3424 loop_clauses.vector = construct_clauses.vector;
3425 loop_clauses.worker = construct_clauses.worker;
3426 loop_clauses.seq = construct_clauses.seq;
3427 loop_clauses.independent = construct_clauses.independent;
3428 construct_clauses.collapse = 0;
3429 construct_clauses.gang = false;
3430 construct_clauses.vector = false;
3431 construct_clauses.worker = false;
3432 construct_clauses.seq = false;
3433 construct_clauses.independent = false;
3434 oacc_clauses = gfc_trans_omp_clauses (&block, &construct_clauses,
3435 code->loc);
3437 if (!loop_clauses.seq)
3438 pblock = &block;
3439 else
3440 pushlevel ();
3441 stmt = gfc_trans_omp_do (code, code->op, pblock, &loop_clauses, NULL);
3442 if (TREE_CODE (stmt) != BIND_EXPR)
3443 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3444 else
3445 poplevel (0, 0);
3446 stmt = build2_loc (input_location, construct_code, void_type_node, stmt,
3447 oacc_clauses);
3448 if (code->op == EXEC_OACC_KERNELS_LOOP)
3449 OACC_KERNELS_COMBINED (stmt) = 1;
3450 else
3451 OACC_PARALLEL_COMBINED (stmt) = 1;
3452 gfc_add_expr_to_block (&block, stmt);
3453 return gfc_finish_block (&block);
3456 static tree
3457 gfc_trans_omp_flush (void)
3459 tree decl = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
3460 return build_call_expr_loc (input_location, decl, 0);
3463 static tree
3464 gfc_trans_omp_master (gfc_code *code)
3466 tree stmt = gfc_trans_code (code->block->next);
3467 if (IS_EMPTY_STMT (stmt))
3468 return stmt;
3469 return build1_v (OMP_MASTER, stmt);
3472 static tree
3473 gfc_trans_omp_ordered (gfc_code *code)
3475 return build1_v (OMP_ORDERED, gfc_trans_code (code->block->next));
3478 static tree
3479 gfc_trans_omp_parallel (gfc_code *code)
3481 stmtblock_t block;
3482 tree stmt, omp_clauses;
3484 gfc_start_block (&block);
3485 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
3486 code->loc);
3487 stmt = gfc_trans_omp_code (code->block->next, true);
3488 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
3489 omp_clauses);
3490 gfc_add_expr_to_block (&block, stmt);
3491 return gfc_finish_block (&block);
3494 enum
3496 GFC_OMP_SPLIT_SIMD,
3497 GFC_OMP_SPLIT_DO,
3498 GFC_OMP_SPLIT_PARALLEL,
3499 GFC_OMP_SPLIT_DISTRIBUTE,
3500 GFC_OMP_SPLIT_TEAMS,
3501 GFC_OMP_SPLIT_TARGET,
3502 GFC_OMP_SPLIT_NUM
3505 enum
3507 GFC_OMP_MASK_SIMD = (1 << GFC_OMP_SPLIT_SIMD),
3508 GFC_OMP_MASK_DO = (1 << GFC_OMP_SPLIT_DO),
3509 GFC_OMP_MASK_PARALLEL = (1 << GFC_OMP_SPLIT_PARALLEL),
3510 GFC_OMP_MASK_DISTRIBUTE = (1 << GFC_OMP_SPLIT_DISTRIBUTE),
3511 GFC_OMP_MASK_TEAMS = (1 << GFC_OMP_SPLIT_TEAMS),
3512 GFC_OMP_MASK_TARGET = (1 << GFC_OMP_SPLIT_TARGET)
3515 static void
3516 gfc_split_omp_clauses (gfc_code *code,
3517 gfc_omp_clauses clausesa[GFC_OMP_SPLIT_NUM])
3519 int mask = 0, innermost = 0;
3520 memset (clausesa, 0, GFC_OMP_SPLIT_NUM * sizeof (gfc_omp_clauses));
3521 switch (code->op)
3523 case EXEC_OMP_DISTRIBUTE:
3524 innermost = GFC_OMP_SPLIT_DISTRIBUTE;
3525 break;
3526 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO:
3527 mask = GFC_OMP_MASK_DISTRIBUTE | GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO;
3528 innermost = GFC_OMP_SPLIT_DO;
3529 break;
3530 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD:
3531 mask = GFC_OMP_MASK_DISTRIBUTE | GFC_OMP_MASK_PARALLEL
3532 | GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
3533 innermost = GFC_OMP_SPLIT_SIMD;
3534 break;
3535 case EXEC_OMP_DISTRIBUTE_SIMD:
3536 mask = GFC_OMP_MASK_DISTRIBUTE | GFC_OMP_MASK_SIMD;
3537 innermost = GFC_OMP_SPLIT_SIMD;
3538 break;
3539 case EXEC_OMP_DO:
3540 innermost = GFC_OMP_SPLIT_DO;
3541 break;
3542 case EXEC_OMP_DO_SIMD:
3543 mask = GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
3544 innermost = GFC_OMP_SPLIT_SIMD;
3545 break;
3546 case EXEC_OMP_PARALLEL:
3547 innermost = GFC_OMP_SPLIT_PARALLEL;
3548 break;
3549 case EXEC_OMP_PARALLEL_DO:
3550 mask = GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO;
3551 innermost = GFC_OMP_SPLIT_DO;
3552 break;
3553 case EXEC_OMP_PARALLEL_DO_SIMD:
3554 mask = GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
3555 innermost = GFC_OMP_SPLIT_SIMD;
3556 break;
3557 case EXEC_OMP_SIMD:
3558 innermost = GFC_OMP_SPLIT_SIMD;
3559 break;
3560 case EXEC_OMP_TARGET:
3561 innermost = GFC_OMP_SPLIT_TARGET;
3562 break;
3563 case EXEC_OMP_TARGET_TEAMS:
3564 mask = GFC_OMP_MASK_TARGET | GFC_OMP_MASK_TEAMS;
3565 innermost = GFC_OMP_SPLIT_TEAMS;
3566 break;
3567 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE:
3568 mask = GFC_OMP_MASK_TARGET | GFC_OMP_MASK_TEAMS
3569 | GFC_OMP_MASK_DISTRIBUTE;
3570 innermost = GFC_OMP_SPLIT_DISTRIBUTE;
3571 break;
3572 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO:
3573 mask = GFC_OMP_MASK_TARGET | GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE
3574 | GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO;
3575 innermost = GFC_OMP_SPLIT_DO;
3576 break;
3577 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
3578 mask = GFC_OMP_MASK_TARGET | GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE
3579 | GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
3580 innermost = GFC_OMP_SPLIT_SIMD;
3581 break;
3582 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD:
3583 mask = GFC_OMP_MASK_TARGET | GFC_OMP_MASK_TEAMS
3584 | GFC_OMP_MASK_DISTRIBUTE | GFC_OMP_MASK_SIMD;
3585 innermost = GFC_OMP_SPLIT_SIMD;
3586 break;
3587 case EXEC_OMP_TEAMS:
3588 innermost = GFC_OMP_SPLIT_TEAMS;
3589 break;
3590 case EXEC_OMP_TEAMS_DISTRIBUTE:
3591 mask = GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE;
3592 innermost = GFC_OMP_SPLIT_DISTRIBUTE;
3593 break;
3594 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO:
3595 mask = GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE
3596 | GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO;
3597 innermost = GFC_OMP_SPLIT_DO;
3598 break;
3599 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
3600 mask = GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE
3601 | GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
3602 innermost = GFC_OMP_SPLIT_SIMD;
3603 break;
3604 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD:
3605 mask = GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE | GFC_OMP_MASK_SIMD;
3606 innermost = GFC_OMP_SPLIT_SIMD;
3607 break;
3608 default:
3609 gcc_unreachable ();
3611 if (mask == 0)
3613 clausesa[innermost] = *code->ext.omp_clauses;
3614 return;
3616 if (code->ext.omp_clauses != NULL)
3618 if (mask & GFC_OMP_MASK_TARGET)
3620 /* First the clauses that are unique to some constructs. */
3621 clausesa[GFC_OMP_SPLIT_TARGET].lists[OMP_LIST_MAP]
3622 = code->ext.omp_clauses->lists[OMP_LIST_MAP];
3623 clausesa[GFC_OMP_SPLIT_TARGET].device
3624 = code->ext.omp_clauses->device;
3626 if (mask & GFC_OMP_MASK_TEAMS)
3628 /* First the clauses that are unique to some constructs. */
3629 clausesa[GFC_OMP_SPLIT_TEAMS].num_teams
3630 = code->ext.omp_clauses->num_teams;
3631 clausesa[GFC_OMP_SPLIT_TEAMS].thread_limit
3632 = code->ext.omp_clauses->thread_limit;
3633 /* Shared and default clauses are allowed on parallel and teams. */
3634 clausesa[GFC_OMP_SPLIT_TEAMS].lists[OMP_LIST_SHARED]
3635 = code->ext.omp_clauses->lists[OMP_LIST_SHARED];
3636 clausesa[GFC_OMP_SPLIT_TEAMS].default_sharing
3637 = code->ext.omp_clauses->default_sharing;
3639 if (mask & GFC_OMP_MASK_DISTRIBUTE)
3641 /* First the clauses that are unique to some constructs. */
3642 clausesa[GFC_OMP_SPLIT_DISTRIBUTE].dist_sched_kind
3643 = code->ext.omp_clauses->dist_sched_kind;
3644 clausesa[GFC_OMP_SPLIT_DISTRIBUTE].dist_chunk_size
3645 = code->ext.omp_clauses->dist_chunk_size;
3646 /* Duplicate collapse. */
3647 clausesa[GFC_OMP_SPLIT_DISTRIBUTE].collapse
3648 = code->ext.omp_clauses->collapse;
3650 if (mask & GFC_OMP_MASK_PARALLEL)
3652 /* First the clauses that are unique to some constructs. */
3653 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_COPYIN]
3654 = code->ext.omp_clauses->lists[OMP_LIST_COPYIN];
3655 clausesa[GFC_OMP_SPLIT_PARALLEL].num_threads
3656 = code->ext.omp_clauses->num_threads;
3657 clausesa[GFC_OMP_SPLIT_PARALLEL].proc_bind
3658 = code->ext.omp_clauses->proc_bind;
3659 /* Shared and default clauses are allowed on parallel and teams. */
3660 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_SHARED]
3661 = code->ext.omp_clauses->lists[OMP_LIST_SHARED];
3662 clausesa[GFC_OMP_SPLIT_PARALLEL].default_sharing
3663 = code->ext.omp_clauses->default_sharing;
3665 if (mask & GFC_OMP_MASK_DO)
3667 /* First the clauses that are unique to some constructs. */
3668 clausesa[GFC_OMP_SPLIT_DO].ordered
3669 = code->ext.omp_clauses->ordered;
3670 clausesa[GFC_OMP_SPLIT_DO].sched_kind
3671 = code->ext.omp_clauses->sched_kind;
3672 clausesa[GFC_OMP_SPLIT_DO].chunk_size
3673 = code->ext.omp_clauses->chunk_size;
3674 clausesa[GFC_OMP_SPLIT_DO].nowait
3675 = code->ext.omp_clauses->nowait;
3676 /* Duplicate collapse. */
3677 clausesa[GFC_OMP_SPLIT_DO].collapse
3678 = code->ext.omp_clauses->collapse;
3680 if (mask & GFC_OMP_MASK_SIMD)
3682 clausesa[GFC_OMP_SPLIT_SIMD].safelen_expr
3683 = code->ext.omp_clauses->safelen_expr;
3684 clausesa[GFC_OMP_SPLIT_SIMD].lists[OMP_LIST_LINEAR]
3685 = code->ext.omp_clauses->lists[OMP_LIST_LINEAR];
3686 clausesa[GFC_OMP_SPLIT_SIMD].lists[OMP_LIST_ALIGNED]
3687 = code->ext.omp_clauses->lists[OMP_LIST_ALIGNED];
3688 /* Duplicate collapse. */
3689 clausesa[GFC_OMP_SPLIT_SIMD].collapse
3690 = code->ext.omp_clauses->collapse;
3692 /* Private clause is supported on all constructs but target,
3693 it is enough to put it on the innermost one. For
3694 !$ omp do put it on parallel though,
3695 as that's what we did for OpenMP 3.1. */
3696 clausesa[innermost == GFC_OMP_SPLIT_DO
3697 ? (int) GFC_OMP_SPLIT_PARALLEL
3698 : innermost].lists[OMP_LIST_PRIVATE]
3699 = code->ext.omp_clauses->lists[OMP_LIST_PRIVATE];
3700 /* Firstprivate clause is supported on all constructs but
3701 target and simd. Put it on the outermost of those and
3702 duplicate on parallel. */
3703 if (mask & GFC_OMP_MASK_TEAMS)
3704 clausesa[GFC_OMP_SPLIT_TEAMS].lists[OMP_LIST_FIRSTPRIVATE]
3705 = code->ext.omp_clauses->lists[OMP_LIST_FIRSTPRIVATE];
3706 else if (mask & GFC_OMP_MASK_DISTRIBUTE)
3707 clausesa[GFC_OMP_SPLIT_DISTRIBUTE].lists[OMP_LIST_FIRSTPRIVATE]
3708 = code->ext.omp_clauses->lists[OMP_LIST_FIRSTPRIVATE];
3709 if (mask & GFC_OMP_MASK_PARALLEL)
3710 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_FIRSTPRIVATE]
3711 = code->ext.omp_clauses->lists[OMP_LIST_FIRSTPRIVATE];
3712 else if (mask & GFC_OMP_MASK_DO)
3713 clausesa[GFC_OMP_SPLIT_DO].lists[OMP_LIST_FIRSTPRIVATE]
3714 = code->ext.omp_clauses->lists[OMP_LIST_FIRSTPRIVATE];
3715 /* Lastprivate is allowed on do and simd. In
3716 parallel do{, simd} we actually want to put it on
3717 parallel rather than do. */
3718 if (mask & GFC_OMP_MASK_PARALLEL)
3719 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_LASTPRIVATE]
3720 = code->ext.omp_clauses->lists[OMP_LIST_LASTPRIVATE];
3721 else if (mask & GFC_OMP_MASK_DO)
3722 clausesa[GFC_OMP_SPLIT_DO].lists[OMP_LIST_LASTPRIVATE]
3723 = code->ext.omp_clauses->lists[OMP_LIST_LASTPRIVATE];
3724 if (mask & GFC_OMP_MASK_SIMD)
3725 clausesa[GFC_OMP_SPLIT_SIMD].lists[OMP_LIST_LASTPRIVATE]
3726 = code->ext.omp_clauses->lists[OMP_LIST_LASTPRIVATE];
3727 /* Reduction is allowed on simd, do, parallel and teams.
3728 Duplicate it on all of them, but omit on do if
3729 parallel is present. */
3730 if (mask & GFC_OMP_MASK_TEAMS)
3731 clausesa[GFC_OMP_SPLIT_TEAMS].lists[OMP_LIST_REDUCTION]
3732 = code->ext.omp_clauses->lists[OMP_LIST_REDUCTION];
3733 if (mask & GFC_OMP_MASK_PARALLEL)
3734 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_REDUCTION]
3735 = code->ext.omp_clauses->lists[OMP_LIST_REDUCTION];
3736 else if (mask & GFC_OMP_MASK_DO)
3737 clausesa[GFC_OMP_SPLIT_DO].lists[OMP_LIST_REDUCTION]
3738 = code->ext.omp_clauses->lists[OMP_LIST_REDUCTION];
3739 if (mask & GFC_OMP_MASK_SIMD)
3740 clausesa[GFC_OMP_SPLIT_SIMD].lists[OMP_LIST_REDUCTION]
3741 = code->ext.omp_clauses->lists[OMP_LIST_REDUCTION];
3742 /* FIXME: This is currently being discussed. */
3743 if (mask & GFC_OMP_MASK_PARALLEL)
3744 clausesa[GFC_OMP_SPLIT_PARALLEL].if_expr
3745 = code->ext.omp_clauses->if_expr;
3746 else
3747 clausesa[GFC_OMP_SPLIT_TARGET].if_expr
3748 = code->ext.omp_clauses->if_expr;
3750 if ((mask & (GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO))
3751 == (GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO))
3752 clausesa[GFC_OMP_SPLIT_DO].nowait = true;
3755 static tree
3756 gfc_trans_omp_do_simd (gfc_code *code, stmtblock_t *pblock,
3757 gfc_omp_clauses *clausesa, tree omp_clauses)
3759 stmtblock_t block;
3760 gfc_omp_clauses clausesa_buf[GFC_OMP_SPLIT_NUM];
3761 tree stmt, body, omp_do_clauses = NULL_TREE;
3763 if (pblock == NULL)
3764 gfc_start_block (&block);
3765 else
3766 gfc_init_block (&block);
3768 if (clausesa == NULL)
3770 clausesa = clausesa_buf;
3771 gfc_split_omp_clauses (code, clausesa);
3773 if (flag_openmp)
3774 omp_do_clauses
3775 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_DO], code->loc);
3776 body = gfc_trans_omp_do (code, EXEC_OMP_SIMD, pblock ? pblock : &block,
3777 &clausesa[GFC_OMP_SPLIT_SIMD], omp_clauses);
3778 if (pblock == NULL)
3780 if (TREE_CODE (body) != BIND_EXPR)
3781 body = build3_v (BIND_EXPR, NULL, body, poplevel (1, 0));
3782 else
3783 poplevel (0, 0);
3785 else if (TREE_CODE (body) != BIND_EXPR)
3786 body = build3_v (BIND_EXPR, NULL, body, NULL_TREE);
3787 if (flag_openmp)
3789 stmt = make_node (OMP_FOR);
3790 TREE_TYPE (stmt) = void_type_node;
3791 OMP_FOR_BODY (stmt) = body;
3792 OMP_FOR_CLAUSES (stmt) = omp_do_clauses;
3794 else
3795 stmt = body;
3796 gfc_add_expr_to_block (&block, stmt);
3797 return gfc_finish_block (&block);
3800 static tree
3801 gfc_trans_omp_parallel_do (gfc_code *code, stmtblock_t *pblock,
3802 gfc_omp_clauses *clausesa)
3804 stmtblock_t block, *new_pblock = pblock;
3805 gfc_omp_clauses clausesa_buf[GFC_OMP_SPLIT_NUM];
3806 tree stmt, omp_clauses = NULL_TREE;
3808 if (pblock == NULL)
3809 gfc_start_block (&block);
3810 else
3811 gfc_init_block (&block);
3813 if (clausesa == NULL)
3815 clausesa = clausesa_buf;
3816 gfc_split_omp_clauses (code, clausesa);
3818 omp_clauses
3819 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_PARALLEL],
3820 code->loc);
3821 if (pblock == NULL)
3823 if (!clausesa[GFC_OMP_SPLIT_DO].ordered
3824 && clausesa[GFC_OMP_SPLIT_DO].sched_kind != OMP_SCHED_STATIC)
3825 new_pblock = &block;
3826 else
3827 pushlevel ();
3829 stmt = gfc_trans_omp_do (code, EXEC_OMP_DO, new_pblock,
3830 &clausesa[GFC_OMP_SPLIT_DO], omp_clauses);
3831 if (pblock == NULL)
3833 if (TREE_CODE (stmt) != BIND_EXPR)
3834 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3835 else
3836 poplevel (0, 0);
3838 else if (TREE_CODE (stmt) != BIND_EXPR)
3839 stmt = build3_v (BIND_EXPR, NULL, stmt, NULL_TREE);
3840 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
3841 omp_clauses);
3842 OMP_PARALLEL_COMBINED (stmt) = 1;
3843 gfc_add_expr_to_block (&block, stmt);
3844 return gfc_finish_block (&block);
3847 static tree
3848 gfc_trans_omp_parallel_do_simd (gfc_code *code, stmtblock_t *pblock,
3849 gfc_omp_clauses *clausesa)
3851 stmtblock_t block;
3852 gfc_omp_clauses clausesa_buf[GFC_OMP_SPLIT_NUM];
3853 tree stmt, omp_clauses = NULL_TREE;
3855 if (pblock == NULL)
3856 gfc_start_block (&block);
3857 else
3858 gfc_init_block (&block);
3860 if (clausesa == NULL)
3862 clausesa = clausesa_buf;
3863 gfc_split_omp_clauses (code, clausesa);
3865 if (flag_openmp)
3866 omp_clauses
3867 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_PARALLEL],
3868 code->loc);
3869 if (pblock == NULL)
3870 pushlevel ();
3871 stmt = gfc_trans_omp_do_simd (code, pblock, clausesa, omp_clauses);
3872 if (pblock == NULL)
3874 if (TREE_CODE (stmt) != BIND_EXPR)
3875 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3876 else
3877 poplevel (0, 0);
3879 else if (TREE_CODE (stmt) != BIND_EXPR)
3880 stmt = build3_v (BIND_EXPR, NULL, stmt, NULL_TREE);
3881 if (flag_openmp)
3883 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
3884 omp_clauses);
3885 OMP_PARALLEL_COMBINED (stmt) = 1;
3887 gfc_add_expr_to_block (&block, stmt);
3888 return gfc_finish_block (&block);
3891 static tree
3892 gfc_trans_omp_parallel_sections (gfc_code *code)
3894 stmtblock_t block;
3895 gfc_omp_clauses section_clauses;
3896 tree stmt, omp_clauses;
3898 memset (&section_clauses, 0, sizeof (section_clauses));
3899 section_clauses.nowait = true;
3901 gfc_start_block (&block);
3902 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
3903 code->loc);
3904 pushlevel ();
3905 stmt = gfc_trans_omp_sections (code, &section_clauses);
3906 if (TREE_CODE (stmt) != BIND_EXPR)
3907 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3908 else
3909 poplevel (0, 0);
3910 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
3911 omp_clauses);
3912 OMP_PARALLEL_COMBINED (stmt) = 1;
3913 gfc_add_expr_to_block (&block, stmt);
3914 return gfc_finish_block (&block);
3917 static tree
3918 gfc_trans_omp_parallel_workshare (gfc_code *code)
3920 stmtblock_t block;
3921 gfc_omp_clauses workshare_clauses;
3922 tree stmt, omp_clauses;
3924 memset (&workshare_clauses, 0, sizeof (workshare_clauses));
3925 workshare_clauses.nowait = true;
3927 gfc_start_block (&block);
3928 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
3929 code->loc);
3930 pushlevel ();
3931 stmt = gfc_trans_omp_workshare (code, &workshare_clauses);
3932 if (TREE_CODE (stmt) != BIND_EXPR)
3933 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3934 else
3935 poplevel (0, 0);
3936 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
3937 omp_clauses);
3938 OMP_PARALLEL_COMBINED (stmt) = 1;
3939 gfc_add_expr_to_block (&block, stmt);
3940 return gfc_finish_block (&block);
3943 static tree
3944 gfc_trans_omp_sections (gfc_code *code, gfc_omp_clauses *clauses)
3946 stmtblock_t block, body;
3947 tree omp_clauses, stmt;
3948 bool has_lastprivate = clauses->lists[OMP_LIST_LASTPRIVATE] != NULL;
3950 gfc_start_block (&block);
3952 omp_clauses = gfc_trans_omp_clauses (&block, clauses, code->loc);
3954 gfc_init_block (&body);
3955 for (code = code->block; code; code = code->block)
3957 /* Last section is special because of lastprivate, so even if it
3958 is empty, chain it in. */
3959 stmt = gfc_trans_omp_code (code->next,
3960 has_lastprivate && code->block == NULL);
3961 if (! IS_EMPTY_STMT (stmt))
3963 stmt = build1_v (OMP_SECTION, stmt);
3964 gfc_add_expr_to_block (&body, stmt);
3967 stmt = gfc_finish_block (&body);
3969 stmt = build2_loc (input_location, OMP_SECTIONS, void_type_node, stmt,
3970 omp_clauses);
3971 gfc_add_expr_to_block (&block, stmt);
3973 return gfc_finish_block (&block);
3976 static tree
3977 gfc_trans_omp_single (gfc_code *code, gfc_omp_clauses *clauses)
3979 tree omp_clauses = gfc_trans_omp_clauses (NULL, clauses, code->loc);
3980 tree stmt = gfc_trans_omp_code (code->block->next, true);
3981 stmt = build2_loc (input_location, OMP_SINGLE, void_type_node, stmt,
3982 omp_clauses);
3983 return stmt;
3986 static tree
3987 gfc_trans_omp_task (gfc_code *code)
3989 stmtblock_t block;
3990 tree stmt, omp_clauses;
3992 gfc_start_block (&block);
3993 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
3994 code->loc);
3995 stmt = gfc_trans_omp_code (code->block->next, true);
3996 stmt = build2_loc (input_location, OMP_TASK, void_type_node, stmt,
3997 omp_clauses);
3998 gfc_add_expr_to_block (&block, stmt);
3999 return gfc_finish_block (&block);
4002 static tree
4003 gfc_trans_omp_taskgroup (gfc_code *code)
4005 tree stmt = gfc_trans_code (code->block->next);
4006 return build1_loc (input_location, OMP_TASKGROUP, void_type_node, stmt);
4009 static tree
4010 gfc_trans_omp_taskwait (void)
4012 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT);
4013 return build_call_expr_loc (input_location, decl, 0);
4016 static tree
4017 gfc_trans_omp_taskyield (void)
4019 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD);
4020 return build_call_expr_loc (input_location, decl, 0);
4023 static tree
4024 gfc_trans_omp_distribute (gfc_code *code, gfc_omp_clauses *clausesa)
4026 stmtblock_t block;
4027 gfc_omp_clauses clausesa_buf[GFC_OMP_SPLIT_NUM];
4028 tree stmt, omp_clauses = NULL_TREE;
4030 gfc_start_block (&block);
4031 if (clausesa == NULL)
4033 clausesa = clausesa_buf;
4034 gfc_split_omp_clauses (code, clausesa);
4036 if (flag_openmp)
4037 omp_clauses
4038 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_DISTRIBUTE],
4039 code->loc);
4040 switch (code->op)
4042 case EXEC_OMP_DISTRIBUTE:
4043 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE:
4044 case EXEC_OMP_TEAMS_DISTRIBUTE:
4045 /* This is handled in gfc_trans_omp_do. */
4046 gcc_unreachable ();
4047 break;
4048 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO:
4049 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO:
4050 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO:
4051 stmt = gfc_trans_omp_parallel_do (code, &block, clausesa);
4052 if (TREE_CODE (stmt) != BIND_EXPR)
4053 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
4054 else
4055 poplevel (0, 0);
4056 break;
4057 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD:
4058 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
4059 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
4060 stmt = gfc_trans_omp_parallel_do_simd (code, &block, clausesa);
4061 if (TREE_CODE (stmt) != BIND_EXPR)
4062 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
4063 else
4064 poplevel (0, 0);
4065 break;
4066 case EXEC_OMP_DISTRIBUTE_SIMD:
4067 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD:
4068 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD:
4069 stmt = gfc_trans_omp_do (code, EXEC_OMP_SIMD, &block,
4070 &clausesa[GFC_OMP_SPLIT_SIMD], NULL_TREE);
4071 if (TREE_CODE (stmt) != BIND_EXPR)
4072 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
4073 else
4074 poplevel (0, 0);
4075 break;
4076 default:
4077 gcc_unreachable ();
4079 if (flag_openmp)
4081 tree distribute = make_node (OMP_DISTRIBUTE);
4082 TREE_TYPE (distribute) = void_type_node;
4083 OMP_FOR_BODY (distribute) = stmt;
4084 OMP_FOR_CLAUSES (distribute) = omp_clauses;
4085 stmt = distribute;
4087 gfc_add_expr_to_block (&block, stmt);
4088 return gfc_finish_block (&block);
4091 static tree
4092 gfc_trans_omp_teams (gfc_code *code, gfc_omp_clauses *clausesa)
4094 stmtblock_t block;
4095 gfc_omp_clauses clausesa_buf[GFC_OMP_SPLIT_NUM];
4096 tree stmt, omp_clauses = NULL_TREE;
4098 gfc_start_block (&block);
4099 if (clausesa == NULL)
4101 clausesa = clausesa_buf;
4102 gfc_split_omp_clauses (code, clausesa);
4104 if (flag_openmp)
4105 omp_clauses
4106 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_TEAMS],
4107 code->loc);
4108 switch (code->op)
4110 case EXEC_OMP_TARGET_TEAMS:
4111 case EXEC_OMP_TEAMS:
4112 stmt = gfc_trans_omp_code (code->block->next, true);
4113 break;
4114 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE:
4115 case EXEC_OMP_TEAMS_DISTRIBUTE:
4116 stmt = gfc_trans_omp_do (code, EXEC_OMP_DISTRIBUTE, NULL,
4117 &clausesa[GFC_OMP_SPLIT_DISTRIBUTE],
4118 NULL);
4119 break;
4120 default:
4121 stmt = gfc_trans_omp_distribute (code, clausesa);
4122 break;
4124 stmt = build2_loc (input_location, OMP_TEAMS, void_type_node, stmt,
4125 omp_clauses);
4126 gfc_add_expr_to_block (&block, stmt);
4127 return gfc_finish_block (&block);
4130 static tree
4131 gfc_trans_omp_target (gfc_code *code)
4133 stmtblock_t block;
4134 gfc_omp_clauses clausesa[GFC_OMP_SPLIT_NUM];
4135 tree stmt, omp_clauses = NULL_TREE;
4137 gfc_start_block (&block);
4138 gfc_split_omp_clauses (code, clausesa);
4139 if (flag_openmp)
4140 omp_clauses
4141 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_TARGET],
4142 code->loc);
4143 if (code->op == EXEC_OMP_TARGET)
4144 stmt = gfc_trans_omp_code (code->block->next, true);
4145 else
4146 stmt = gfc_trans_omp_teams (code, clausesa);
4147 if (TREE_CODE (stmt) != BIND_EXPR)
4148 stmt = build3_v (BIND_EXPR, NULL, stmt, NULL_TREE);
4149 if (flag_openmp)
4150 stmt = build2_loc (input_location, OMP_TARGET, void_type_node, stmt,
4151 omp_clauses);
4152 gfc_add_expr_to_block (&block, stmt);
4153 return gfc_finish_block (&block);
4156 static tree
4157 gfc_trans_omp_target_data (gfc_code *code)
4159 stmtblock_t block;
4160 tree stmt, omp_clauses;
4162 gfc_start_block (&block);
4163 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
4164 code->loc);
4165 stmt = gfc_trans_omp_code (code->block->next, true);
4166 stmt = build2_loc (input_location, OMP_TARGET_DATA, void_type_node, stmt,
4167 omp_clauses);
4168 gfc_add_expr_to_block (&block, stmt);
4169 return gfc_finish_block (&block);
4172 static tree
4173 gfc_trans_omp_target_update (gfc_code *code)
4175 stmtblock_t block;
4176 tree stmt, omp_clauses;
4178 gfc_start_block (&block);
4179 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
4180 code->loc);
4181 stmt = build1_loc (input_location, OMP_TARGET_UPDATE, void_type_node,
4182 omp_clauses);
4183 gfc_add_expr_to_block (&block, stmt);
4184 return gfc_finish_block (&block);
4187 static tree
4188 gfc_trans_omp_workshare (gfc_code *code, gfc_omp_clauses *clauses)
4190 tree res, tmp, stmt;
4191 stmtblock_t block, *pblock = NULL;
4192 stmtblock_t singleblock;
4193 int saved_ompws_flags;
4194 bool singleblock_in_progress = false;
4195 /* True if previous gfc_code in workshare construct is not workshared. */
4196 bool prev_singleunit;
4198 code = code->block->next;
4200 pushlevel ();
4202 gfc_start_block (&block);
4203 pblock = &block;
4205 ompws_flags = OMPWS_WORKSHARE_FLAG;
4206 prev_singleunit = false;
4208 /* Translate statements one by one to trees until we reach
4209 the end of the workshare construct. Adjacent gfc_codes that
4210 are a single unit of work are clustered and encapsulated in a
4211 single OMP_SINGLE construct. */
4212 for (; code; code = code->next)
4214 if (code->here != 0)
4216 res = gfc_trans_label_here (code);
4217 gfc_add_expr_to_block (pblock, res);
4220 /* No dependence analysis, use for clauses with wait.
4221 If this is the last gfc_code, use default omp_clauses. */
4222 if (code->next == NULL && clauses->nowait)
4223 ompws_flags |= OMPWS_NOWAIT;
4225 /* By default, every gfc_code is a single unit of work. */
4226 ompws_flags |= OMPWS_CURR_SINGLEUNIT;
4227 ompws_flags &= ~OMPWS_SCALARIZER_WS;
4229 switch (code->op)
4231 case EXEC_NOP:
4232 res = NULL_TREE;
4233 break;
4235 case EXEC_ASSIGN:
4236 res = gfc_trans_assign (code);
4237 break;
4239 case EXEC_POINTER_ASSIGN:
4240 res = gfc_trans_pointer_assign (code);
4241 break;
4243 case EXEC_INIT_ASSIGN:
4244 res = gfc_trans_init_assign (code);
4245 break;
4247 case EXEC_FORALL:
4248 res = gfc_trans_forall (code);
4249 break;
4251 case EXEC_WHERE:
4252 res = gfc_trans_where (code);
4253 break;
4255 case EXEC_OMP_ATOMIC:
4256 res = gfc_trans_omp_directive (code);
4257 break;
4259 case EXEC_OMP_PARALLEL:
4260 case EXEC_OMP_PARALLEL_DO:
4261 case EXEC_OMP_PARALLEL_SECTIONS:
4262 case EXEC_OMP_PARALLEL_WORKSHARE:
4263 case EXEC_OMP_CRITICAL:
4264 saved_ompws_flags = ompws_flags;
4265 ompws_flags = 0;
4266 res = gfc_trans_omp_directive (code);
4267 ompws_flags = saved_ompws_flags;
4268 break;
4270 default:
4271 gfc_internal_error ("gfc_trans_omp_workshare(): Bad statement code");
4274 gfc_set_backend_locus (&code->loc);
4276 if (res != NULL_TREE && ! IS_EMPTY_STMT (res))
4278 if (prev_singleunit)
4280 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
4281 /* Add current gfc_code to single block. */
4282 gfc_add_expr_to_block (&singleblock, res);
4283 else
4285 /* Finish single block and add it to pblock. */
4286 tmp = gfc_finish_block (&singleblock);
4287 tmp = build2_loc (input_location, OMP_SINGLE,
4288 void_type_node, tmp, NULL_TREE);
4289 gfc_add_expr_to_block (pblock, tmp);
4290 /* Add current gfc_code to pblock. */
4291 gfc_add_expr_to_block (pblock, res);
4292 singleblock_in_progress = false;
4295 else
4297 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
4299 /* Start single block. */
4300 gfc_init_block (&singleblock);
4301 gfc_add_expr_to_block (&singleblock, res);
4302 singleblock_in_progress = true;
4304 else
4305 /* Add the new statement to the block. */
4306 gfc_add_expr_to_block (pblock, res);
4308 prev_singleunit = (ompws_flags & OMPWS_CURR_SINGLEUNIT) != 0;
4312 /* Finish remaining SINGLE block, if we were in the middle of one. */
4313 if (singleblock_in_progress)
4315 /* Finish single block and add it to pblock. */
4316 tmp = gfc_finish_block (&singleblock);
4317 tmp = build2_loc (input_location, OMP_SINGLE, void_type_node, tmp,
4318 clauses->nowait
4319 ? build_omp_clause (input_location, OMP_CLAUSE_NOWAIT)
4320 : NULL_TREE);
4321 gfc_add_expr_to_block (pblock, tmp);
4324 stmt = gfc_finish_block (pblock);
4325 if (TREE_CODE (stmt) != BIND_EXPR)
4327 if (!IS_EMPTY_STMT (stmt))
4329 tree bindblock = poplevel (1, 0);
4330 stmt = build3_v (BIND_EXPR, NULL, stmt, bindblock);
4332 else
4333 poplevel (0, 0);
4335 else
4336 poplevel (0, 0);
4338 if (IS_EMPTY_STMT (stmt) && !clauses->nowait)
4339 stmt = gfc_trans_omp_barrier ();
4341 ompws_flags = 0;
4342 return stmt;
4345 tree
4346 gfc_trans_oacc_declare (stmtblock_t *block, gfc_namespace *ns)
4348 tree oacc_clauses;
4349 oacc_clauses = gfc_trans_omp_clauses (block, ns->oacc_declare_clauses,
4350 ns->oacc_declare_clauses->loc);
4351 return build1_loc (ns->oacc_declare_clauses->loc.lb->location,
4352 OACC_DECLARE, void_type_node, oacc_clauses);
4355 tree
4356 gfc_trans_oacc_directive (gfc_code *code)
4358 switch (code->op)
4360 case EXEC_OACC_PARALLEL_LOOP:
4361 case EXEC_OACC_KERNELS_LOOP:
4362 return gfc_trans_oacc_combined_directive (code);
4363 case EXEC_OACC_PARALLEL:
4364 case EXEC_OACC_KERNELS:
4365 case EXEC_OACC_DATA:
4366 case EXEC_OACC_HOST_DATA:
4367 return gfc_trans_oacc_construct (code);
4368 case EXEC_OACC_LOOP:
4369 return gfc_trans_omp_do (code, code->op, NULL, code->ext.omp_clauses,
4370 NULL);
4371 case EXEC_OACC_UPDATE:
4372 case EXEC_OACC_CACHE:
4373 case EXEC_OACC_ENTER_DATA:
4374 case EXEC_OACC_EXIT_DATA:
4375 return gfc_trans_oacc_executable_directive (code);
4376 case EXEC_OACC_WAIT:
4377 return gfc_trans_oacc_wait_directive (code);
4378 default:
4379 gcc_unreachable ();
4383 tree
4384 gfc_trans_omp_directive (gfc_code *code)
4386 switch (code->op)
4388 case EXEC_OMP_ATOMIC:
4389 return gfc_trans_omp_atomic (code);
4390 case EXEC_OMP_BARRIER:
4391 return gfc_trans_omp_barrier ();
4392 case EXEC_OMP_CANCEL:
4393 return gfc_trans_omp_cancel (code);
4394 case EXEC_OMP_CANCELLATION_POINT:
4395 return gfc_trans_omp_cancellation_point (code);
4396 case EXEC_OMP_CRITICAL:
4397 return gfc_trans_omp_critical (code);
4398 case EXEC_OMP_DISTRIBUTE:
4399 case EXEC_OMP_DO:
4400 case EXEC_OMP_SIMD:
4401 return gfc_trans_omp_do (code, code->op, NULL, code->ext.omp_clauses,
4402 NULL);
4403 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO:
4404 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD:
4405 case EXEC_OMP_DISTRIBUTE_SIMD:
4406 return gfc_trans_omp_distribute (code, NULL);
4407 case EXEC_OMP_DO_SIMD:
4408 return gfc_trans_omp_do_simd (code, NULL, NULL, NULL_TREE);
4409 case EXEC_OMP_FLUSH:
4410 return gfc_trans_omp_flush ();
4411 case EXEC_OMP_MASTER:
4412 return gfc_trans_omp_master (code);
4413 case EXEC_OMP_ORDERED:
4414 return gfc_trans_omp_ordered (code);
4415 case EXEC_OMP_PARALLEL:
4416 return gfc_trans_omp_parallel (code);
4417 case EXEC_OMP_PARALLEL_DO:
4418 return gfc_trans_omp_parallel_do (code, NULL, NULL);
4419 case EXEC_OMP_PARALLEL_DO_SIMD:
4420 return gfc_trans_omp_parallel_do_simd (code, NULL, NULL);
4421 case EXEC_OMP_PARALLEL_SECTIONS:
4422 return gfc_trans_omp_parallel_sections (code);
4423 case EXEC_OMP_PARALLEL_WORKSHARE:
4424 return gfc_trans_omp_parallel_workshare (code);
4425 case EXEC_OMP_SECTIONS:
4426 return gfc_trans_omp_sections (code, code->ext.omp_clauses);
4427 case EXEC_OMP_SINGLE:
4428 return gfc_trans_omp_single (code, code->ext.omp_clauses);
4429 case EXEC_OMP_TARGET:
4430 case EXEC_OMP_TARGET_TEAMS:
4431 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE:
4432 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO:
4433 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
4434 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD:
4435 return gfc_trans_omp_target (code);
4436 case EXEC_OMP_TARGET_DATA:
4437 return gfc_trans_omp_target_data (code);
4438 case EXEC_OMP_TARGET_UPDATE:
4439 return gfc_trans_omp_target_update (code);
4440 case EXEC_OMP_TASK:
4441 return gfc_trans_omp_task (code);
4442 case EXEC_OMP_TASKGROUP:
4443 return gfc_trans_omp_taskgroup (code);
4444 case EXEC_OMP_TASKWAIT:
4445 return gfc_trans_omp_taskwait ();
4446 case EXEC_OMP_TASKYIELD:
4447 return gfc_trans_omp_taskyield ();
4448 case EXEC_OMP_TEAMS:
4449 case EXEC_OMP_TEAMS_DISTRIBUTE:
4450 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO:
4451 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
4452 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD:
4453 return gfc_trans_omp_teams (code, NULL);
4454 case EXEC_OMP_WORKSHARE:
4455 return gfc_trans_omp_workshare (code, code->ext.omp_clauses);
4456 default:
4457 gcc_unreachable ();
4461 void
4462 gfc_trans_omp_declare_simd (gfc_namespace *ns)
4464 if (ns->entries)
4465 return;
4467 gfc_omp_declare_simd *ods;
4468 for (ods = ns->omp_declare_simd; ods; ods = ods->next)
4470 tree c = gfc_trans_omp_clauses (NULL, ods->clauses, ods->where, true);
4471 tree fndecl = ns->proc_name->backend_decl;
4472 if (c != NULL_TREE)
4473 c = tree_cons (NULL_TREE, c, NULL_TREE);
4474 c = build_tree_list (get_identifier ("omp declare simd"), c);
4475 TREE_CHAIN (c) = DECL_ATTRIBUTES (fndecl);
4476 DECL_ATTRIBUTES (fndecl) = c;