* config/i386/i386.c (ix86_legitimize_address): Declare
[official-gcc.git] / gcc / fortran / trans-openmp.c
blob707a08970eee45275c15989d019603869079ba16
1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tree.h"
26 #include "gimple-expr.h"
27 #include "gimplify.h" /* For create_tmp_var_raw. */
28 #include "stringpool.h"
29 #include "gfortran.h"
30 #include "diagnostic-core.h" /* For internal_error. */
31 #include "trans.h"
32 #include "trans-stmt.h"
33 #include "trans-types.h"
34 #include "trans-array.h"
35 #include "trans-const.h"
36 #include "arith.h"
37 #include "omp-low.h"
39 int ompws_flags;
41 /* True if OpenMP should privatize what this DECL points to rather
42 than the DECL itself. */
44 bool
45 gfc_omp_privatize_by_reference (const_tree decl)
47 tree type = TREE_TYPE (decl);
49 if (TREE_CODE (type) == REFERENCE_TYPE
50 && (!DECL_ARTIFICIAL (decl) || TREE_CODE (decl) == PARM_DECL))
51 return true;
53 if (TREE_CODE (type) == POINTER_TYPE)
55 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
56 that have POINTER_TYPE type and aren't scalar pointers, scalar
57 allocatables, Cray pointees or C pointers are supposed to be
58 privatized by reference. */
59 if (GFC_DECL_GET_SCALAR_POINTER (decl)
60 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl)
61 || GFC_DECL_CRAY_POINTEE (decl)
62 || VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
63 return false;
65 if (!DECL_ARTIFICIAL (decl)
66 && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
67 return true;
69 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
70 by the frontend. */
71 if (DECL_LANG_SPECIFIC (decl)
72 && GFC_DECL_SAVED_DESCRIPTOR (decl))
73 return true;
76 return false;
79 /* True if OpenMP sharing attribute of DECL is predetermined. */
81 enum omp_clause_default_kind
82 gfc_omp_predetermined_sharing (tree decl)
84 /* Associate names preserve the association established during ASSOCIATE.
85 As they are implemented either as pointers to the selector or array
86 descriptor and shouldn't really change in the ASSOCIATE region,
87 this decl can be either shared or firstprivate. If it is a pointer,
88 use firstprivate, as it is cheaper that way, otherwise make it shared. */
89 if (GFC_DECL_ASSOCIATE_VAR_P (decl))
91 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
92 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
93 else
94 return OMP_CLAUSE_DEFAULT_SHARED;
97 if (DECL_ARTIFICIAL (decl)
98 && ! GFC_DECL_RESULT (decl)
99 && ! (DECL_LANG_SPECIFIC (decl)
100 && GFC_DECL_SAVED_DESCRIPTOR (decl)))
101 return OMP_CLAUSE_DEFAULT_SHARED;
103 /* Cray pointees shouldn't be listed in any clauses and should be
104 gimplified to dereference of the corresponding Cray pointer.
105 Make them all private, so that they are emitted in the debug
106 information. */
107 if (GFC_DECL_CRAY_POINTEE (decl))
108 return OMP_CLAUSE_DEFAULT_PRIVATE;
110 /* Assumed-size arrays are predetermined shared. */
111 if (TREE_CODE (decl) == PARM_DECL
112 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl))
113 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_UNKNOWN
114 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl),
115 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl)) - 1)
116 == NULL)
117 return OMP_CLAUSE_DEFAULT_SHARED;
119 /* Dummy procedures aren't considered variables by OpenMP, thus are
120 disallowed in OpenMP clauses. They are represented as PARM_DECLs
121 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
122 to avoid complaining about their uses with default(none). */
123 if (TREE_CODE (decl) == PARM_DECL
124 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
125 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == FUNCTION_TYPE)
126 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
128 /* COMMON and EQUIVALENCE decls are shared. They
129 are only referenced through DECL_VALUE_EXPR of the variables
130 contained in them. If those are privatized, they will not be
131 gimplified to the COMMON or EQUIVALENCE decls. */
132 if (GFC_DECL_COMMON_OR_EQUIV (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
133 return OMP_CLAUSE_DEFAULT_SHARED;
135 if (GFC_DECL_RESULT (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
136 return OMP_CLAUSE_DEFAULT_SHARED;
138 /* These are either array or derived parameters, or vtables.
139 In the former cases, the OpenMP standard doesn't consider them to be
140 variables at all (they can't be redefined), but they can nevertheless appear
141 in parallel/task regions and for default(none) purposes treat them as shared.
142 For vtables likely the same handling is desirable. */
143 if (TREE_CODE (decl) == VAR_DECL
144 && TREE_READONLY (decl)
145 && TREE_STATIC (decl))
146 return OMP_CLAUSE_DEFAULT_SHARED;
148 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
151 /* Return decl that should be used when reporting DEFAULT(NONE)
152 diagnostics. */
154 tree
155 gfc_omp_report_decl (tree decl)
157 if (DECL_ARTIFICIAL (decl)
158 && DECL_LANG_SPECIFIC (decl)
159 && GFC_DECL_SAVED_DESCRIPTOR (decl))
160 return GFC_DECL_SAVED_DESCRIPTOR (decl);
162 return decl;
165 /* Return true if TYPE has any allocatable components. */
167 static bool
168 gfc_has_alloc_comps (tree type, tree decl)
170 tree field, ftype;
172 if (POINTER_TYPE_P (type))
174 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl))
175 type = TREE_TYPE (type);
176 else if (GFC_DECL_GET_SCALAR_POINTER (decl))
177 return false;
180 while (GFC_DESCRIPTOR_TYPE_P (type) || GFC_ARRAY_TYPE_P (type))
181 type = gfc_get_element_type (type);
183 if (TREE_CODE (type) != RECORD_TYPE)
184 return false;
186 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
188 ftype = TREE_TYPE (field);
189 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
190 return true;
191 if (GFC_DESCRIPTOR_TYPE_P (ftype)
192 && GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
193 return true;
194 if (gfc_has_alloc_comps (ftype, field))
195 return true;
197 return false;
200 /* Return true if DECL in private clause needs
201 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
202 bool
203 gfc_omp_private_outer_ref (tree decl)
205 tree type = TREE_TYPE (decl);
207 if (GFC_DESCRIPTOR_TYPE_P (type)
208 && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
209 return true;
211 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl))
212 return true;
214 if (gfc_omp_privatize_by_reference (decl))
215 type = TREE_TYPE (type);
217 if (gfc_has_alloc_comps (type, decl))
218 return true;
220 return false;
223 /* Callback for gfc_omp_unshare_expr. */
225 static tree
226 gfc_omp_unshare_expr_r (tree *tp, int *walk_subtrees, void *)
228 tree t = *tp;
229 enum tree_code code = TREE_CODE (t);
231 /* Stop at types, decls, constants like copy_tree_r. */
232 if (TREE_CODE_CLASS (code) == tcc_type
233 || TREE_CODE_CLASS (code) == tcc_declaration
234 || TREE_CODE_CLASS (code) == tcc_constant
235 || code == BLOCK)
236 *walk_subtrees = 0;
237 else if (handled_component_p (t)
238 || TREE_CODE (t) == MEM_REF)
240 *tp = unshare_expr (t);
241 *walk_subtrees = 0;
244 return NULL_TREE;
247 /* Unshare in expr anything that the FE which normally doesn't
248 care much about tree sharing (because during gimplification
249 everything is unshared) could cause problems with tree sharing
250 at omp-low.c time. */
252 static tree
253 gfc_omp_unshare_expr (tree expr)
255 walk_tree (&expr, gfc_omp_unshare_expr_r, NULL, NULL);
256 return expr;
259 enum walk_alloc_comps
261 WALK_ALLOC_COMPS_DTOR,
262 WALK_ALLOC_COMPS_DEFAULT_CTOR,
263 WALK_ALLOC_COMPS_COPY_CTOR
266 /* Handle allocatable components in OpenMP clauses. */
268 static tree
269 gfc_walk_alloc_comps (tree decl, tree dest, tree var,
270 enum walk_alloc_comps kind)
272 stmtblock_t block, tmpblock;
273 tree type = TREE_TYPE (decl), then_b, tem, field;
274 gfc_init_block (&block);
276 if (GFC_ARRAY_TYPE_P (type) || GFC_DESCRIPTOR_TYPE_P (type))
278 if (GFC_DESCRIPTOR_TYPE_P (type))
280 gfc_init_block (&tmpblock);
281 tem = gfc_full_array_size (&tmpblock, decl,
282 GFC_TYPE_ARRAY_RANK (type));
283 then_b = gfc_finish_block (&tmpblock);
284 gfc_add_expr_to_block (&block, gfc_omp_unshare_expr (then_b));
285 tem = gfc_omp_unshare_expr (tem);
286 tem = fold_build2_loc (input_location, MINUS_EXPR,
287 gfc_array_index_type, tem,
288 gfc_index_one_node);
290 else
292 if (!TYPE_DOMAIN (type)
293 || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE
294 || TYPE_MIN_VALUE (TYPE_DOMAIN (type)) == error_mark_node
295 || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == error_mark_node)
297 tem = fold_build2 (EXACT_DIV_EXPR, sizetype,
298 TYPE_SIZE_UNIT (type),
299 TYPE_SIZE_UNIT (TREE_TYPE (type)));
300 tem = size_binop (MINUS_EXPR, tem, size_one_node);
302 else
303 tem = array_type_nelts (type);
304 tem = fold_convert (gfc_array_index_type, tem);
307 tree nelems = gfc_evaluate_now (tem, &block);
308 tree index = gfc_create_var (gfc_array_index_type, "S");
310 gfc_init_block (&tmpblock);
311 tem = gfc_conv_array_data (decl);
312 tree declvar = build_fold_indirect_ref_loc (input_location, tem);
313 tree declvref = gfc_build_array_ref (declvar, index, NULL);
314 tree destvar, destvref = NULL_TREE;
315 if (dest)
317 tem = gfc_conv_array_data (dest);
318 destvar = build_fold_indirect_ref_loc (input_location, tem);
319 destvref = gfc_build_array_ref (destvar, index, NULL);
321 gfc_add_expr_to_block (&tmpblock,
322 gfc_walk_alloc_comps (declvref, destvref,
323 var, kind));
325 gfc_loopinfo loop;
326 gfc_init_loopinfo (&loop);
327 loop.dimen = 1;
328 loop.from[0] = gfc_index_zero_node;
329 loop.loopvar[0] = index;
330 loop.to[0] = nelems;
331 gfc_trans_scalarizing_loops (&loop, &tmpblock);
332 gfc_add_block_to_block (&block, &loop.pre);
333 return gfc_finish_block (&block);
335 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (var))
337 decl = build_fold_indirect_ref_loc (input_location, decl);
338 if (dest)
339 dest = build_fold_indirect_ref_loc (input_location, dest);
340 type = TREE_TYPE (decl);
343 gcc_assert (TREE_CODE (type) == RECORD_TYPE);
344 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
346 tree ftype = TREE_TYPE (field);
347 tree declf, destf = NULL_TREE;
348 bool has_alloc_comps = gfc_has_alloc_comps (ftype, field);
349 if ((!GFC_DESCRIPTOR_TYPE_P (ftype)
350 || GFC_TYPE_ARRAY_AKIND (ftype) != GFC_ARRAY_ALLOCATABLE)
351 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (field)
352 && !has_alloc_comps)
353 continue;
354 declf = fold_build3_loc (input_location, COMPONENT_REF, ftype,
355 decl, field, NULL_TREE);
356 if (dest)
357 destf = fold_build3_loc (input_location, COMPONENT_REF, ftype,
358 dest, field, NULL_TREE);
360 tem = NULL_TREE;
361 switch (kind)
363 case WALK_ALLOC_COMPS_DTOR:
364 break;
365 case WALK_ALLOC_COMPS_DEFAULT_CTOR:
366 if (GFC_DESCRIPTOR_TYPE_P (ftype)
367 && GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
369 gfc_add_modify (&block, unshare_expr (destf),
370 unshare_expr (declf));
371 tem = gfc_duplicate_allocatable_nocopy
372 (destf, declf, ftype,
373 GFC_TYPE_ARRAY_RANK (ftype));
375 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
376 tem = gfc_duplicate_allocatable_nocopy (destf, declf, ftype, 0);
377 break;
378 case WALK_ALLOC_COMPS_COPY_CTOR:
379 if (GFC_DESCRIPTOR_TYPE_P (ftype)
380 && GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
381 tem = gfc_duplicate_allocatable (destf, declf, ftype,
382 GFC_TYPE_ARRAY_RANK (ftype));
383 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
384 tem = gfc_duplicate_allocatable (destf, declf, ftype, 0);
385 break;
387 if (tem)
388 gfc_add_expr_to_block (&block, gfc_omp_unshare_expr (tem));
389 if (has_alloc_comps)
391 gfc_init_block (&tmpblock);
392 gfc_add_expr_to_block (&tmpblock,
393 gfc_walk_alloc_comps (declf, destf,
394 field, kind));
395 then_b = gfc_finish_block (&tmpblock);
396 if (GFC_DESCRIPTOR_TYPE_P (ftype)
397 && GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
398 tem = gfc_conv_descriptor_data_get (unshare_expr (declf));
399 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
400 tem = unshare_expr (declf);
401 else
402 tem = NULL_TREE;
403 if (tem)
405 tem = fold_convert (pvoid_type_node, tem);
406 tem = fold_build2_loc (input_location, NE_EXPR,
407 boolean_type_node, tem,
408 null_pointer_node);
409 then_b = build3_loc (input_location, COND_EXPR, void_type_node,
410 tem, then_b,
411 build_empty_stmt (input_location));
413 gfc_add_expr_to_block (&block, then_b);
415 if (kind == WALK_ALLOC_COMPS_DTOR)
417 if (GFC_DESCRIPTOR_TYPE_P (ftype)
418 && GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
420 tem = gfc_trans_dealloc_allocated (unshare_expr (declf),
421 false, NULL);
422 gfc_add_expr_to_block (&block, gfc_omp_unshare_expr (tem));
424 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
426 tem = gfc_call_free (unshare_expr (declf));
427 gfc_add_expr_to_block (&block, gfc_omp_unshare_expr (tem));
432 return gfc_finish_block (&block);
435 /* Return code to initialize DECL with its default constructor, or
436 NULL if there's nothing to do. */
438 tree
439 gfc_omp_clause_default_ctor (tree clause, tree decl, tree outer)
441 tree type = TREE_TYPE (decl), size, ptr, cond, then_b, else_b;
442 stmtblock_t block, cond_block;
444 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_PRIVATE
445 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LASTPRIVATE
446 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LINEAR
447 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_REDUCTION);
449 if ((! GFC_DESCRIPTOR_TYPE_P (type)
450 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
451 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause)))
453 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
455 gcc_assert (outer);
456 gfc_start_block (&block);
457 tree tem = gfc_walk_alloc_comps (outer, decl,
458 OMP_CLAUSE_DECL (clause),
459 WALK_ALLOC_COMPS_DEFAULT_CTOR);
460 gfc_add_expr_to_block (&block, tem);
461 return gfc_finish_block (&block);
463 return NULL_TREE;
466 gcc_assert (outer != NULL_TREE);
468 /* Allocatable arrays and scalars in PRIVATE clauses need to be set to
469 "not currently allocated" allocation status if outer
470 array is "not currently allocated", otherwise should be allocated. */
471 gfc_start_block (&block);
473 gfc_init_block (&cond_block);
475 if (GFC_DESCRIPTOR_TYPE_P (type))
477 gfc_add_modify (&cond_block, decl, outer);
478 tree rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
479 size = gfc_conv_descriptor_ubound_get (decl, rank);
480 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
481 size,
482 gfc_conv_descriptor_lbound_get (decl, rank));
483 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
484 size, gfc_index_one_node);
485 if (GFC_TYPE_ARRAY_RANK (type) > 1)
486 size = fold_build2_loc (input_location, MULT_EXPR,
487 gfc_array_index_type, size,
488 gfc_conv_descriptor_stride_get (decl, rank));
489 tree esize = fold_convert (gfc_array_index_type,
490 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
491 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
492 size, esize);
493 size = unshare_expr (size);
494 size = gfc_evaluate_now (fold_convert (size_type_node, size),
495 &cond_block);
497 else
498 size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (type)));
499 ptr = gfc_create_var (pvoid_type_node, NULL);
500 gfc_allocate_using_malloc (&cond_block, ptr, size, NULL_TREE);
501 if (GFC_DESCRIPTOR_TYPE_P (type))
502 gfc_conv_descriptor_data_set (&cond_block, unshare_expr (decl), ptr);
503 else
504 gfc_add_modify (&cond_block, unshare_expr (decl),
505 fold_convert (TREE_TYPE (decl), ptr));
506 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
508 tree tem = gfc_walk_alloc_comps (outer, decl,
509 OMP_CLAUSE_DECL (clause),
510 WALK_ALLOC_COMPS_DEFAULT_CTOR);
511 gfc_add_expr_to_block (&cond_block, tem);
513 then_b = gfc_finish_block (&cond_block);
515 /* Reduction clause requires allocated ALLOCATABLE. */
516 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_REDUCTION)
518 gfc_init_block (&cond_block);
519 if (GFC_DESCRIPTOR_TYPE_P (type))
520 gfc_conv_descriptor_data_set (&cond_block, unshare_expr (decl),
521 null_pointer_node);
522 else
523 gfc_add_modify (&cond_block, unshare_expr (decl),
524 build_zero_cst (TREE_TYPE (decl)));
525 else_b = gfc_finish_block (&cond_block);
527 tree tem = fold_convert (pvoid_type_node,
528 GFC_DESCRIPTOR_TYPE_P (type)
529 ? gfc_conv_descriptor_data_get (outer) : outer);
530 tem = unshare_expr (tem);
531 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
532 tem, null_pointer_node);
533 gfc_add_expr_to_block (&block,
534 build3_loc (input_location, COND_EXPR,
535 void_type_node, cond, then_b,
536 else_b));
538 else
539 gfc_add_expr_to_block (&block, then_b);
541 return gfc_finish_block (&block);
544 /* Build and return code for a copy constructor from SRC to DEST. */
546 tree
547 gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src)
549 tree type = TREE_TYPE (dest), ptr, size, call;
550 tree cond, then_b, else_b;
551 stmtblock_t block, cond_block;
553 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_FIRSTPRIVATE
554 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LINEAR);
556 if ((! GFC_DESCRIPTOR_TYPE_P (type)
557 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
558 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause)))
560 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
562 gfc_start_block (&block);
563 gfc_add_modify (&block, dest, src);
564 tree tem = gfc_walk_alloc_comps (src, dest, OMP_CLAUSE_DECL (clause),
565 WALK_ALLOC_COMPS_COPY_CTOR);
566 gfc_add_expr_to_block (&block, tem);
567 return gfc_finish_block (&block);
569 else
570 return build2_v (MODIFY_EXPR, dest, src);
573 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
574 and copied from SRC. */
575 gfc_start_block (&block);
577 gfc_init_block (&cond_block);
579 gfc_add_modify (&cond_block, dest, src);
580 if (GFC_DESCRIPTOR_TYPE_P (type))
582 tree rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
583 size = gfc_conv_descriptor_ubound_get (dest, rank);
584 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
585 size,
586 gfc_conv_descriptor_lbound_get (dest, rank));
587 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
588 size, gfc_index_one_node);
589 if (GFC_TYPE_ARRAY_RANK (type) > 1)
590 size = fold_build2_loc (input_location, MULT_EXPR,
591 gfc_array_index_type, size,
592 gfc_conv_descriptor_stride_get (dest, rank));
593 tree esize = fold_convert (gfc_array_index_type,
594 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
595 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
596 size, esize);
597 size = unshare_expr (size);
598 size = gfc_evaluate_now (fold_convert (size_type_node, size),
599 &cond_block);
601 else
602 size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (type)));
603 ptr = gfc_create_var (pvoid_type_node, NULL);
604 gfc_allocate_using_malloc (&cond_block, ptr, size, NULL_TREE);
605 if (GFC_DESCRIPTOR_TYPE_P (type))
606 gfc_conv_descriptor_data_set (&cond_block, unshare_expr (dest), ptr);
607 else
608 gfc_add_modify (&cond_block, unshare_expr (dest),
609 fold_convert (TREE_TYPE (dest), ptr));
611 tree srcptr = GFC_DESCRIPTOR_TYPE_P (type)
612 ? gfc_conv_descriptor_data_get (src) : src;
613 srcptr = unshare_expr (srcptr);
614 srcptr = fold_convert (pvoid_type_node, srcptr);
615 call = build_call_expr_loc (input_location,
616 builtin_decl_explicit (BUILT_IN_MEMCPY), 3, ptr,
617 srcptr, size);
618 gfc_add_expr_to_block (&cond_block, fold_convert (void_type_node, call));
619 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
621 tree tem = gfc_walk_alloc_comps (src, dest,
622 OMP_CLAUSE_DECL (clause),
623 WALK_ALLOC_COMPS_COPY_CTOR);
624 gfc_add_expr_to_block (&cond_block, tem);
626 then_b = gfc_finish_block (&cond_block);
628 gfc_init_block (&cond_block);
629 if (GFC_DESCRIPTOR_TYPE_P (type))
630 gfc_conv_descriptor_data_set (&cond_block, unshare_expr (dest),
631 null_pointer_node);
632 else
633 gfc_add_modify (&cond_block, unshare_expr (dest),
634 build_zero_cst (TREE_TYPE (dest)));
635 else_b = gfc_finish_block (&cond_block);
637 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
638 unshare_expr (srcptr), null_pointer_node);
639 gfc_add_expr_to_block (&block,
640 build3_loc (input_location, COND_EXPR,
641 void_type_node, cond, then_b, else_b));
643 return gfc_finish_block (&block);
646 /* Similarly, except use an intrinsic or pointer assignment operator
647 instead. */
649 tree
650 gfc_omp_clause_assign_op (tree clause, tree dest, tree src)
652 tree type = TREE_TYPE (dest), ptr, size, call, nonalloc;
653 tree cond, then_b, else_b;
654 stmtblock_t block, cond_block, cond_block2, inner_block;
656 if ((! GFC_DESCRIPTOR_TYPE_P (type)
657 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
658 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause)))
660 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
662 gfc_start_block (&block);
663 /* First dealloc any allocatable components in DEST. */
664 tree tem = gfc_walk_alloc_comps (dest, NULL_TREE,
665 OMP_CLAUSE_DECL (clause),
666 WALK_ALLOC_COMPS_DTOR);
667 gfc_add_expr_to_block (&block, tem);
668 /* Then copy over toplevel data. */
669 gfc_add_modify (&block, dest, src);
670 /* Finally allocate any allocatable components and copy. */
671 tem = gfc_walk_alloc_comps (src, dest, OMP_CLAUSE_DECL (clause),
672 WALK_ALLOC_COMPS_COPY_CTOR);
673 gfc_add_expr_to_block (&block, tem);
674 return gfc_finish_block (&block);
676 else
677 return build2_v (MODIFY_EXPR, dest, src);
680 gfc_start_block (&block);
682 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
684 then_b = gfc_walk_alloc_comps (dest, NULL_TREE, OMP_CLAUSE_DECL (clause),
685 WALK_ALLOC_COMPS_DTOR);
686 tree tem = fold_convert (pvoid_type_node,
687 GFC_DESCRIPTOR_TYPE_P (type)
688 ? gfc_conv_descriptor_data_get (dest) : dest);
689 tem = unshare_expr (tem);
690 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
691 tem, null_pointer_node);
692 tem = build3_loc (input_location, COND_EXPR, void_type_node, cond,
693 then_b, build_empty_stmt (input_location));
694 gfc_add_expr_to_block (&block, tem);
697 gfc_init_block (&cond_block);
699 if (GFC_DESCRIPTOR_TYPE_P (type))
701 tree rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
702 size = gfc_conv_descriptor_ubound_get (src, rank);
703 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
704 size,
705 gfc_conv_descriptor_lbound_get (src, rank));
706 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
707 size, gfc_index_one_node);
708 if (GFC_TYPE_ARRAY_RANK (type) > 1)
709 size = fold_build2_loc (input_location, MULT_EXPR,
710 gfc_array_index_type, size,
711 gfc_conv_descriptor_stride_get (src, rank));
712 tree esize = fold_convert (gfc_array_index_type,
713 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
714 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
715 size, esize);
716 size = unshare_expr (size);
717 size = gfc_evaluate_now (fold_convert (size_type_node, size),
718 &cond_block);
720 else
721 size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (type)));
722 ptr = gfc_create_var (pvoid_type_node, NULL);
724 tree destptr = GFC_DESCRIPTOR_TYPE_P (type)
725 ? gfc_conv_descriptor_data_get (dest) : dest;
726 destptr = unshare_expr (destptr);
727 destptr = fold_convert (pvoid_type_node, destptr);
728 gfc_add_modify (&cond_block, ptr, destptr);
730 nonalloc = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
731 destptr, null_pointer_node);
732 cond = nonalloc;
733 if (GFC_DESCRIPTOR_TYPE_P (type))
735 int i;
736 for (i = 0; i < GFC_TYPE_ARRAY_RANK (type); i++)
738 tree rank = gfc_rank_cst[i];
739 tree tem = gfc_conv_descriptor_ubound_get (src, rank);
740 tem = fold_build2_loc (input_location, MINUS_EXPR,
741 gfc_array_index_type, tem,
742 gfc_conv_descriptor_lbound_get (src, rank));
743 tem = fold_build2_loc (input_location, PLUS_EXPR,
744 gfc_array_index_type, tem,
745 gfc_conv_descriptor_lbound_get (dest, rank));
746 tem = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
747 tem, gfc_conv_descriptor_ubound_get (dest,
748 rank));
749 cond = fold_build2_loc (input_location, TRUTH_ORIF_EXPR,
750 boolean_type_node, cond, tem);
754 gfc_init_block (&cond_block2);
756 if (GFC_DESCRIPTOR_TYPE_P (type))
758 gfc_init_block (&inner_block);
759 gfc_allocate_using_malloc (&inner_block, ptr, size, NULL_TREE);
760 then_b = gfc_finish_block (&inner_block);
762 gfc_init_block (&inner_block);
763 gfc_add_modify (&inner_block, ptr,
764 gfc_call_realloc (&inner_block, ptr, size));
765 else_b = gfc_finish_block (&inner_block);
767 gfc_add_expr_to_block (&cond_block2,
768 build3_loc (input_location, COND_EXPR,
769 void_type_node,
770 unshare_expr (nonalloc),
771 then_b, else_b));
772 gfc_add_modify (&cond_block2, dest, src);
773 gfc_conv_descriptor_data_set (&cond_block2, unshare_expr (dest), ptr);
775 else
777 gfc_allocate_using_malloc (&cond_block2, ptr, size, NULL_TREE);
778 gfc_add_modify (&cond_block2, unshare_expr (dest),
779 fold_convert (type, ptr));
781 then_b = gfc_finish_block (&cond_block2);
782 else_b = build_empty_stmt (input_location);
784 gfc_add_expr_to_block (&cond_block,
785 build3_loc (input_location, COND_EXPR,
786 void_type_node, unshare_expr (cond),
787 then_b, else_b));
789 tree srcptr = GFC_DESCRIPTOR_TYPE_P (type)
790 ? gfc_conv_descriptor_data_get (src) : src;
791 srcptr = unshare_expr (srcptr);
792 srcptr = fold_convert (pvoid_type_node, srcptr);
793 call = build_call_expr_loc (input_location,
794 builtin_decl_explicit (BUILT_IN_MEMCPY), 3, ptr,
795 srcptr, size);
796 gfc_add_expr_to_block (&cond_block, fold_convert (void_type_node, call));
797 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
799 tree tem = gfc_walk_alloc_comps (src, dest,
800 OMP_CLAUSE_DECL (clause),
801 WALK_ALLOC_COMPS_COPY_CTOR);
802 gfc_add_expr_to_block (&cond_block, tem);
804 then_b = gfc_finish_block (&cond_block);
806 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_COPYIN)
808 gfc_init_block (&cond_block);
809 if (GFC_DESCRIPTOR_TYPE_P (type))
810 gfc_add_expr_to_block (&cond_block,
811 gfc_trans_dealloc_allocated (unshare_expr (dest),
812 false, NULL));
813 else
815 destptr = gfc_evaluate_now (destptr, &cond_block);
816 gfc_add_expr_to_block (&cond_block, gfc_call_free (destptr));
817 gfc_add_modify (&cond_block, unshare_expr (dest),
818 build_zero_cst (TREE_TYPE (dest)));
820 else_b = gfc_finish_block (&cond_block);
822 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
823 unshare_expr (srcptr), null_pointer_node);
824 gfc_add_expr_to_block (&block,
825 build3_loc (input_location, COND_EXPR,
826 void_type_node, cond,
827 then_b, else_b));
829 else
830 gfc_add_expr_to_block (&block, then_b);
832 return gfc_finish_block (&block);
835 static void
836 gfc_omp_linear_clause_add_loop (stmtblock_t *block, tree dest, tree src,
837 tree add, tree nelems)
839 stmtblock_t tmpblock;
840 tree desta, srca, index = gfc_create_var (gfc_array_index_type, "S");
841 nelems = gfc_evaluate_now (nelems, block);
843 gfc_init_block (&tmpblock);
844 if (TREE_CODE (TREE_TYPE (dest)) == ARRAY_TYPE)
846 desta = gfc_build_array_ref (dest, index, NULL);
847 srca = gfc_build_array_ref (src, index, NULL);
849 else
851 gcc_assert (POINTER_TYPE_P (TREE_TYPE (dest)));
852 tree idx = fold_build2 (MULT_EXPR, sizetype,
853 fold_convert (sizetype, index),
854 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dest))));
855 desta = build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR,
856 TREE_TYPE (dest), dest,
857 idx));
858 srca = build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR,
859 TREE_TYPE (src), src,
860 idx));
862 gfc_add_modify (&tmpblock, desta,
863 fold_build2 (PLUS_EXPR, TREE_TYPE (desta),
864 srca, add));
866 gfc_loopinfo loop;
867 gfc_init_loopinfo (&loop);
868 loop.dimen = 1;
869 loop.from[0] = gfc_index_zero_node;
870 loop.loopvar[0] = index;
871 loop.to[0] = nelems;
872 gfc_trans_scalarizing_loops (&loop, &tmpblock);
873 gfc_add_block_to_block (block, &loop.pre);
876 /* Build and return code for a constructor of DEST that initializes
877 it to SRC plus ADD (ADD is scalar integer). */
879 tree
880 gfc_omp_clause_linear_ctor (tree clause, tree dest, tree src, tree add)
882 tree type = TREE_TYPE (dest), ptr, size, nelems = NULL_TREE;
883 stmtblock_t block;
885 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LINEAR);
887 gfc_start_block (&block);
888 add = gfc_evaluate_now (add, &block);
890 if ((! GFC_DESCRIPTOR_TYPE_P (type)
891 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
892 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause)))
894 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
895 if (!TYPE_DOMAIN (type)
896 || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE
897 || TYPE_MIN_VALUE (TYPE_DOMAIN (type)) == error_mark_node
898 || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == error_mark_node)
900 nelems = fold_build2 (EXACT_DIV_EXPR, sizetype,
901 TYPE_SIZE_UNIT (type),
902 TYPE_SIZE_UNIT (TREE_TYPE (type)));
903 nelems = size_binop (MINUS_EXPR, nelems, size_one_node);
905 else
906 nelems = array_type_nelts (type);
907 nelems = fold_convert (gfc_array_index_type, nelems);
909 gfc_omp_linear_clause_add_loop (&block, dest, src, add, nelems);
910 return gfc_finish_block (&block);
913 /* Allocatable arrays in LINEAR clauses need to be allocated
914 and copied from SRC. */
915 gfc_add_modify (&block, dest, src);
916 if (GFC_DESCRIPTOR_TYPE_P (type))
918 tree rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
919 size = gfc_conv_descriptor_ubound_get (dest, rank);
920 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
921 size,
922 gfc_conv_descriptor_lbound_get (dest, rank));
923 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
924 size, gfc_index_one_node);
925 if (GFC_TYPE_ARRAY_RANK (type) > 1)
926 size = fold_build2_loc (input_location, MULT_EXPR,
927 gfc_array_index_type, size,
928 gfc_conv_descriptor_stride_get (dest, rank));
929 tree esize = fold_convert (gfc_array_index_type,
930 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
931 nelems = gfc_evaluate_now (unshare_expr (size), &block);
932 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
933 nelems, unshare_expr (esize));
934 size = gfc_evaluate_now (fold_convert (size_type_node, size),
935 &block);
936 nelems = fold_build2_loc (input_location, MINUS_EXPR,
937 gfc_array_index_type, nelems,
938 gfc_index_one_node);
940 else
941 size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (type)));
942 ptr = gfc_create_var (pvoid_type_node, NULL);
943 gfc_allocate_using_malloc (&block, ptr, size, NULL_TREE);
944 if (GFC_DESCRIPTOR_TYPE_P (type))
946 gfc_conv_descriptor_data_set (&block, unshare_expr (dest), ptr);
947 tree etype = gfc_get_element_type (type);
948 ptr = fold_convert (build_pointer_type (etype), ptr);
949 tree srcptr = gfc_conv_descriptor_data_get (unshare_expr (src));
950 srcptr = fold_convert (build_pointer_type (etype), srcptr);
951 gfc_omp_linear_clause_add_loop (&block, ptr, srcptr, add, nelems);
953 else
955 gfc_add_modify (&block, unshare_expr (dest),
956 fold_convert (TREE_TYPE (dest), ptr));
957 ptr = fold_convert (TREE_TYPE (dest), ptr);
958 tree dstm = build_fold_indirect_ref (ptr);
959 tree srcm = build_fold_indirect_ref (unshare_expr (src));
960 gfc_add_modify (&block, dstm,
961 fold_build2 (PLUS_EXPR, TREE_TYPE (add), srcm, add));
963 return gfc_finish_block (&block);
966 /* Build and return code destructing DECL. Return NULL if nothing
967 to be done. */
969 tree
970 gfc_omp_clause_dtor (tree clause, tree decl)
972 tree type = TREE_TYPE (decl), tem;
974 if ((! GFC_DESCRIPTOR_TYPE_P (type)
975 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
976 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause)))
978 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
979 return gfc_walk_alloc_comps (decl, NULL_TREE,
980 OMP_CLAUSE_DECL (clause),
981 WALK_ALLOC_COMPS_DTOR);
982 return NULL_TREE;
985 if (GFC_DESCRIPTOR_TYPE_P (type))
986 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
987 to be deallocated if they were allocated. */
988 tem = gfc_trans_dealloc_allocated (decl, false, NULL);
989 else
990 tem = gfc_call_free (decl);
991 tem = gfc_omp_unshare_expr (tem);
993 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
995 stmtblock_t block;
996 tree then_b;
998 gfc_init_block (&block);
999 gfc_add_expr_to_block (&block,
1000 gfc_walk_alloc_comps (decl, NULL_TREE,
1001 OMP_CLAUSE_DECL (clause),
1002 WALK_ALLOC_COMPS_DTOR));
1003 gfc_add_expr_to_block (&block, tem);
1004 then_b = gfc_finish_block (&block);
1006 tem = fold_convert (pvoid_type_node,
1007 GFC_DESCRIPTOR_TYPE_P (type)
1008 ? gfc_conv_descriptor_data_get (decl) : decl);
1009 tem = unshare_expr (tem);
1010 tree cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
1011 tem, null_pointer_node);
1012 tem = build3_loc (input_location, COND_EXPR, void_type_node, cond,
1013 then_b, build_empty_stmt (input_location));
1015 return tem;
1019 void
1020 gfc_omp_finish_clause (tree c, gimple_seq *pre_p)
1022 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
1023 return;
1025 tree decl = OMP_CLAUSE_DECL (c);
1026 tree c2 = NULL_TREE, c3 = NULL_TREE, c4 = NULL_TREE;
1027 if (POINTER_TYPE_P (TREE_TYPE (decl)))
1029 if (!gfc_omp_privatize_by_reference (decl)
1030 && !GFC_DECL_GET_SCALAR_POINTER (decl)
1031 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (decl)
1032 && !GFC_DECL_CRAY_POINTEE (decl)
1033 && !GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
1034 return;
1035 tree orig_decl = decl;
1036 c4 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
1037 OMP_CLAUSE_MAP_KIND (c4) = OMP_CLAUSE_MAP_POINTER;
1038 OMP_CLAUSE_DECL (c4) = decl;
1039 OMP_CLAUSE_SIZE (c4) = size_int (0);
1040 decl = build_fold_indirect_ref (decl);
1041 OMP_CLAUSE_DECL (c) = decl;
1042 OMP_CLAUSE_SIZE (c) = NULL_TREE;
1043 if (TREE_CODE (TREE_TYPE (orig_decl)) == REFERENCE_TYPE
1044 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl)
1045 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl)))
1047 c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
1048 OMP_CLAUSE_MAP_KIND (c3) = OMP_CLAUSE_MAP_POINTER;
1049 OMP_CLAUSE_DECL (c3) = unshare_expr (decl);
1050 OMP_CLAUSE_SIZE (c3) = size_int (0);
1051 decl = build_fold_indirect_ref (decl);
1052 OMP_CLAUSE_DECL (c) = decl;
1055 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
1057 stmtblock_t block;
1058 gfc_start_block (&block);
1059 tree type = TREE_TYPE (decl);
1060 tree ptr = gfc_conv_descriptor_data_get (decl);
1061 ptr = fold_convert (build_pointer_type (char_type_node), ptr);
1062 ptr = build_fold_indirect_ref (ptr);
1063 OMP_CLAUSE_DECL (c) = ptr;
1064 c2 = build_omp_clause (input_location, OMP_CLAUSE_MAP);
1065 OMP_CLAUSE_MAP_KIND (c2) = OMP_CLAUSE_MAP_TO_PSET;
1066 OMP_CLAUSE_DECL (c2) = decl;
1067 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (type);
1068 c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
1069 OMP_CLAUSE_MAP_KIND (c3) = OMP_CLAUSE_MAP_POINTER;
1070 OMP_CLAUSE_DECL (c3) = gfc_conv_descriptor_data_get (decl);
1071 OMP_CLAUSE_SIZE (c3) = size_int (0);
1072 tree size = create_tmp_var (gfc_array_index_type);
1073 tree elemsz = TYPE_SIZE_UNIT (gfc_get_element_type (type));
1074 elemsz = fold_convert (gfc_array_index_type, elemsz);
1075 if (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER
1076 || GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER_CONT)
1078 stmtblock_t cond_block;
1079 tree tem, then_b, else_b, zero, cond;
1081 gfc_init_block (&cond_block);
1082 tem = gfc_full_array_size (&cond_block, decl,
1083 GFC_TYPE_ARRAY_RANK (type));
1084 gfc_add_modify (&cond_block, size, tem);
1085 gfc_add_modify (&cond_block, size,
1086 fold_build2 (MULT_EXPR, gfc_array_index_type,
1087 size, elemsz));
1088 then_b = gfc_finish_block (&cond_block);
1089 gfc_init_block (&cond_block);
1090 zero = build_int_cst (gfc_array_index_type, 0);
1091 gfc_add_modify (&cond_block, size, zero);
1092 else_b = gfc_finish_block (&cond_block);
1093 tem = gfc_conv_descriptor_data_get (decl);
1094 tem = fold_convert (pvoid_type_node, tem);
1095 cond = fold_build2_loc (input_location, NE_EXPR,
1096 boolean_type_node, tem, null_pointer_node);
1097 gfc_add_expr_to_block (&block, build3_loc (input_location, COND_EXPR,
1098 void_type_node, cond,
1099 then_b, else_b));
1101 else
1103 gfc_add_modify (&block, size,
1104 gfc_full_array_size (&block, decl,
1105 GFC_TYPE_ARRAY_RANK (type)));
1106 gfc_add_modify (&block, size,
1107 fold_build2 (MULT_EXPR, gfc_array_index_type,
1108 size, elemsz));
1110 OMP_CLAUSE_SIZE (c) = size;
1111 tree stmt = gfc_finish_block (&block);
1112 gimplify_and_add (stmt, pre_p);
1114 tree last = c;
1115 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
1116 OMP_CLAUSE_SIZE (c)
1117 = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
1118 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
1119 if (c2)
1121 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (last);
1122 OMP_CLAUSE_CHAIN (last) = c2;
1123 last = c2;
1125 if (c3)
1127 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (last);
1128 OMP_CLAUSE_CHAIN (last) = c3;
1129 last = c3;
1131 if (c4)
1133 OMP_CLAUSE_CHAIN (c4) = OMP_CLAUSE_CHAIN (last);
1134 OMP_CLAUSE_CHAIN (last) = c4;
1135 last = c4;
1140 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1141 disregarded in OpenMP construct, because it is going to be
1142 remapped during OpenMP lowering. SHARED is true if DECL
1143 is going to be shared, false if it is going to be privatized. */
1145 bool
1146 gfc_omp_disregard_value_expr (tree decl, bool shared)
1148 if (GFC_DECL_COMMON_OR_EQUIV (decl)
1149 && DECL_HAS_VALUE_EXPR_P (decl))
1151 tree value = DECL_VALUE_EXPR (decl);
1153 if (TREE_CODE (value) == COMPONENT_REF
1154 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
1155 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
1157 /* If variable in COMMON or EQUIVALENCE is privatized, return
1158 true, as just that variable is supposed to be privatized,
1159 not the whole COMMON or whole EQUIVALENCE.
1160 For shared variables in COMMON or EQUIVALENCE, let them be
1161 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
1162 from the same COMMON or EQUIVALENCE just one sharing of the
1163 whole COMMON or EQUIVALENCE is enough. */
1164 return ! shared;
1168 if (GFC_DECL_RESULT (decl) && DECL_HAS_VALUE_EXPR_P (decl))
1169 return ! shared;
1171 return false;
1174 /* Return true if DECL that is shared iff SHARED is true should
1175 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
1176 flag set. */
1178 bool
1179 gfc_omp_private_debug_clause (tree decl, bool shared)
1181 if (GFC_DECL_CRAY_POINTEE (decl))
1182 return true;
1184 if (GFC_DECL_COMMON_OR_EQUIV (decl)
1185 && DECL_HAS_VALUE_EXPR_P (decl))
1187 tree value = DECL_VALUE_EXPR (decl);
1189 if (TREE_CODE (value) == COMPONENT_REF
1190 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
1191 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
1192 return shared;
1195 return false;
1198 /* Register language specific type size variables as potentially OpenMP
1199 firstprivate variables. */
1201 void
1202 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
1204 if (GFC_ARRAY_TYPE_P (type) || GFC_DESCRIPTOR_TYPE_P (type))
1206 int r;
1208 gcc_assert (TYPE_LANG_SPECIFIC (type) != NULL);
1209 for (r = 0; r < GFC_TYPE_ARRAY_RANK (type); r++)
1211 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_LBOUND (type, r));
1212 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_UBOUND (type, r));
1213 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_STRIDE (type, r));
1215 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_SIZE (type));
1216 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_OFFSET (type));
1221 static inline tree
1222 gfc_trans_add_clause (tree node, tree tail)
1224 OMP_CLAUSE_CHAIN (node) = tail;
1225 return node;
1228 static tree
1229 gfc_trans_omp_variable (gfc_symbol *sym, bool declare_simd)
1231 if (declare_simd)
1233 int cnt = 0;
1234 gfc_symbol *proc_sym;
1235 gfc_formal_arglist *f;
1237 gcc_assert (sym->attr.dummy);
1238 proc_sym = sym->ns->proc_name;
1239 if (proc_sym->attr.entry_master)
1240 ++cnt;
1241 if (gfc_return_by_reference (proc_sym))
1243 ++cnt;
1244 if (proc_sym->ts.type == BT_CHARACTER)
1245 ++cnt;
1247 for (f = gfc_sym_get_dummy_args (proc_sym); f; f = f->next)
1248 if (f->sym == sym)
1249 break;
1250 else if (f->sym)
1251 ++cnt;
1252 gcc_assert (f);
1253 return build_int_cst (integer_type_node, cnt);
1256 tree t = gfc_get_symbol_decl (sym);
1257 tree parent_decl;
1258 int parent_flag;
1259 bool return_value;
1260 bool alternate_entry;
1261 bool entry_master;
1263 return_value = sym->attr.function && sym->result == sym;
1264 alternate_entry = sym->attr.function && sym->attr.entry
1265 && sym->result == sym;
1266 entry_master = sym->attr.result
1267 && sym->ns->proc_name->attr.entry_master
1268 && !gfc_return_by_reference (sym->ns->proc_name);
1269 parent_decl = current_function_decl
1270 ? DECL_CONTEXT (current_function_decl) : NULL_TREE;
1272 if ((t == parent_decl && return_value)
1273 || (sym->ns && sym->ns->proc_name
1274 && sym->ns->proc_name->backend_decl == parent_decl
1275 && (alternate_entry || entry_master)))
1276 parent_flag = 1;
1277 else
1278 parent_flag = 0;
1280 /* Special case for assigning the return value of a function.
1281 Self recursive functions must have an explicit return value. */
1282 if (return_value && (t == current_function_decl || parent_flag))
1283 t = gfc_get_fake_result_decl (sym, parent_flag);
1285 /* Similarly for alternate entry points. */
1286 else if (alternate_entry
1287 && (sym->ns->proc_name->backend_decl == current_function_decl
1288 || parent_flag))
1290 gfc_entry_list *el = NULL;
1292 for (el = sym->ns->entries; el; el = el->next)
1293 if (sym == el->sym)
1295 t = gfc_get_fake_result_decl (sym, parent_flag);
1296 break;
1300 else if (entry_master
1301 && (sym->ns->proc_name->backend_decl == current_function_decl
1302 || parent_flag))
1303 t = gfc_get_fake_result_decl (sym, parent_flag);
1305 return t;
1308 static tree
1309 gfc_trans_omp_variable_list (enum omp_clause_code code,
1310 gfc_omp_namelist *namelist, tree list,
1311 bool declare_simd)
1313 for (; namelist != NULL; namelist = namelist->next)
1314 if (namelist->sym->attr.referenced || declare_simd)
1316 tree t = gfc_trans_omp_variable (namelist->sym, declare_simd);
1317 if (t != error_mark_node)
1319 tree node = build_omp_clause (input_location, code);
1320 OMP_CLAUSE_DECL (node) = t;
1321 list = gfc_trans_add_clause (node, list);
1324 return list;
1327 struct omp_udr_find_orig_data
1329 gfc_omp_udr *omp_udr;
1330 bool omp_orig_seen;
1333 static int
1334 omp_udr_find_orig (gfc_expr **e, int *walk_subtrees ATTRIBUTE_UNUSED,
1335 void *data)
1337 struct omp_udr_find_orig_data *cd = (struct omp_udr_find_orig_data *) data;
1338 if ((*e)->expr_type == EXPR_VARIABLE
1339 && (*e)->symtree->n.sym == cd->omp_udr->omp_orig)
1340 cd->omp_orig_seen = true;
1342 return 0;
1345 static void
1346 gfc_trans_omp_array_reduction_or_udr (tree c, gfc_omp_namelist *n, locus where)
1348 gfc_symbol *sym = n->sym;
1349 gfc_symtree *root1 = NULL, *root2 = NULL, *root3 = NULL, *root4 = NULL;
1350 gfc_symtree *symtree1, *symtree2, *symtree3, *symtree4 = NULL;
1351 gfc_symbol init_val_sym, outer_sym, intrinsic_sym;
1352 gfc_symbol omp_var_copy[4];
1353 gfc_expr *e1, *e2, *e3, *e4;
1354 gfc_ref *ref;
1355 tree decl, backend_decl, stmt, type, outer_decl;
1356 locus old_loc = gfc_current_locus;
1357 const char *iname;
1358 bool t;
1359 gfc_omp_udr *udr = n->udr ? n->udr->udr : NULL;
1361 decl = OMP_CLAUSE_DECL (c);
1362 gfc_current_locus = where;
1363 type = TREE_TYPE (decl);
1364 outer_decl = create_tmp_var_raw (type);
1365 if (TREE_CODE (decl) == PARM_DECL
1366 && TREE_CODE (type) == REFERENCE_TYPE
1367 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type))
1368 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type)) == GFC_ARRAY_ALLOCATABLE)
1370 decl = build_fold_indirect_ref (decl);
1371 type = TREE_TYPE (type);
1374 /* Create a fake symbol for init value. */
1375 memset (&init_val_sym, 0, sizeof (init_val_sym));
1376 init_val_sym.ns = sym->ns;
1377 init_val_sym.name = sym->name;
1378 init_val_sym.ts = sym->ts;
1379 init_val_sym.attr.referenced = 1;
1380 init_val_sym.declared_at = where;
1381 init_val_sym.attr.flavor = FL_VARIABLE;
1382 if (OMP_CLAUSE_REDUCTION_CODE (c) != ERROR_MARK)
1383 backend_decl = omp_reduction_init (c, gfc_sym_type (&init_val_sym));
1384 else if (udr->initializer_ns)
1385 backend_decl = NULL;
1386 else
1387 switch (sym->ts.type)
1389 case BT_LOGICAL:
1390 case BT_INTEGER:
1391 case BT_REAL:
1392 case BT_COMPLEX:
1393 backend_decl = build_zero_cst (gfc_sym_type (&init_val_sym));
1394 break;
1395 default:
1396 backend_decl = NULL_TREE;
1397 break;
1399 init_val_sym.backend_decl = backend_decl;
1401 /* Create a fake symbol for the outer array reference. */
1402 outer_sym = *sym;
1403 if (sym->as)
1404 outer_sym.as = gfc_copy_array_spec (sym->as);
1405 outer_sym.attr.dummy = 0;
1406 outer_sym.attr.result = 0;
1407 outer_sym.attr.flavor = FL_VARIABLE;
1408 outer_sym.backend_decl = outer_decl;
1409 if (decl != OMP_CLAUSE_DECL (c))
1410 outer_sym.backend_decl = build_fold_indirect_ref (outer_decl);
1412 /* Create fake symtrees for it. */
1413 symtree1 = gfc_new_symtree (&root1, sym->name);
1414 symtree1->n.sym = sym;
1415 gcc_assert (symtree1 == root1);
1417 symtree2 = gfc_new_symtree (&root2, sym->name);
1418 symtree2->n.sym = &init_val_sym;
1419 gcc_assert (symtree2 == root2);
1421 symtree3 = gfc_new_symtree (&root3, sym->name);
1422 symtree3->n.sym = &outer_sym;
1423 gcc_assert (symtree3 == root3);
1425 memset (omp_var_copy, 0, sizeof omp_var_copy);
1426 if (udr)
1428 omp_var_copy[0] = *udr->omp_out;
1429 omp_var_copy[1] = *udr->omp_in;
1430 *udr->omp_out = outer_sym;
1431 *udr->omp_in = *sym;
1432 if (udr->initializer_ns)
1434 omp_var_copy[2] = *udr->omp_priv;
1435 omp_var_copy[3] = *udr->omp_orig;
1436 *udr->omp_priv = *sym;
1437 *udr->omp_orig = outer_sym;
1441 /* Create expressions. */
1442 e1 = gfc_get_expr ();
1443 e1->expr_type = EXPR_VARIABLE;
1444 e1->where = where;
1445 e1->symtree = symtree1;
1446 e1->ts = sym->ts;
1447 if (sym->attr.dimension)
1449 e1->ref = ref = gfc_get_ref ();
1450 ref->type = REF_ARRAY;
1451 ref->u.ar.where = where;
1452 ref->u.ar.as = sym->as;
1453 ref->u.ar.type = AR_FULL;
1454 ref->u.ar.dimen = 0;
1456 t = gfc_resolve_expr (e1);
1457 gcc_assert (t);
1459 e2 = NULL;
1460 if (backend_decl != NULL_TREE)
1462 e2 = gfc_get_expr ();
1463 e2->expr_type = EXPR_VARIABLE;
1464 e2->where = where;
1465 e2->symtree = symtree2;
1466 e2->ts = sym->ts;
1467 t = gfc_resolve_expr (e2);
1468 gcc_assert (t);
1470 else if (udr->initializer_ns == NULL)
1472 gcc_assert (sym->ts.type == BT_DERIVED);
1473 e2 = gfc_default_initializer (&sym->ts);
1474 gcc_assert (e2);
1475 t = gfc_resolve_expr (e2);
1476 gcc_assert (t);
1478 else if (n->udr->initializer->op == EXEC_ASSIGN)
1480 e2 = gfc_copy_expr (n->udr->initializer->expr2);
1481 t = gfc_resolve_expr (e2);
1482 gcc_assert (t);
1484 if (udr && udr->initializer_ns)
1486 struct omp_udr_find_orig_data cd;
1487 cd.omp_udr = udr;
1488 cd.omp_orig_seen = false;
1489 gfc_code_walker (&n->udr->initializer,
1490 gfc_dummy_code_callback, omp_udr_find_orig, &cd);
1491 if (cd.omp_orig_seen)
1492 OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c) = 1;
1495 e3 = gfc_copy_expr (e1);
1496 e3->symtree = symtree3;
1497 t = gfc_resolve_expr (e3);
1498 gcc_assert (t);
1500 iname = NULL;
1501 e4 = NULL;
1502 switch (OMP_CLAUSE_REDUCTION_CODE (c))
1504 case PLUS_EXPR:
1505 case MINUS_EXPR:
1506 e4 = gfc_add (e3, e1);
1507 break;
1508 case MULT_EXPR:
1509 e4 = gfc_multiply (e3, e1);
1510 break;
1511 case TRUTH_ANDIF_EXPR:
1512 e4 = gfc_and (e3, e1);
1513 break;
1514 case TRUTH_ORIF_EXPR:
1515 e4 = gfc_or (e3, e1);
1516 break;
1517 case EQ_EXPR:
1518 e4 = gfc_eqv (e3, e1);
1519 break;
1520 case NE_EXPR:
1521 e4 = gfc_neqv (e3, e1);
1522 break;
1523 case MIN_EXPR:
1524 iname = "min";
1525 break;
1526 case MAX_EXPR:
1527 iname = "max";
1528 break;
1529 case BIT_AND_EXPR:
1530 iname = "iand";
1531 break;
1532 case BIT_IOR_EXPR:
1533 iname = "ior";
1534 break;
1535 case BIT_XOR_EXPR:
1536 iname = "ieor";
1537 break;
1538 case ERROR_MARK:
1539 if (n->udr->combiner->op == EXEC_ASSIGN)
1541 gfc_free_expr (e3);
1542 e3 = gfc_copy_expr (n->udr->combiner->expr1);
1543 e4 = gfc_copy_expr (n->udr->combiner->expr2);
1544 t = gfc_resolve_expr (e3);
1545 gcc_assert (t);
1546 t = gfc_resolve_expr (e4);
1547 gcc_assert (t);
1549 break;
1550 default:
1551 gcc_unreachable ();
1553 if (iname != NULL)
1555 memset (&intrinsic_sym, 0, sizeof (intrinsic_sym));
1556 intrinsic_sym.ns = sym->ns;
1557 intrinsic_sym.name = iname;
1558 intrinsic_sym.ts = sym->ts;
1559 intrinsic_sym.attr.referenced = 1;
1560 intrinsic_sym.attr.intrinsic = 1;
1561 intrinsic_sym.attr.function = 1;
1562 intrinsic_sym.result = &intrinsic_sym;
1563 intrinsic_sym.declared_at = where;
1565 symtree4 = gfc_new_symtree (&root4, iname);
1566 symtree4->n.sym = &intrinsic_sym;
1567 gcc_assert (symtree4 == root4);
1569 e4 = gfc_get_expr ();
1570 e4->expr_type = EXPR_FUNCTION;
1571 e4->where = where;
1572 e4->symtree = symtree4;
1573 e4->value.function.actual = gfc_get_actual_arglist ();
1574 e4->value.function.actual->expr = e3;
1575 e4->value.function.actual->next = gfc_get_actual_arglist ();
1576 e4->value.function.actual->next->expr = e1;
1578 if (OMP_CLAUSE_REDUCTION_CODE (c) != ERROR_MARK)
1580 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
1581 e1 = gfc_copy_expr (e1);
1582 e3 = gfc_copy_expr (e3);
1583 t = gfc_resolve_expr (e4);
1584 gcc_assert (t);
1587 /* Create the init statement list. */
1588 pushlevel ();
1589 if (e2)
1590 stmt = gfc_trans_assignment (e1, e2, false, false);
1591 else
1592 stmt = gfc_trans_call (n->udr->initializer, false,
1593 NULL_TREE, NULL_TREE, false);
1594 if (TREE_CODE (stmt) != BIND_EXPR)
1595 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
1596 else
1597 poplevel (0, 0);
1598 OMP_CLAUSE_REDUCTION_INIT (c) = stmt;
1600 /* Create the merge statement list. */
1601 pushlevel ();
1602 if (e4)
1603 stmt = gfc_trans_assignment (e3, e4, false, true);
1604 else
1605 stmt = gfc_trans_call (n->udr->combiner, false,
1606 NULL_TREE, NULL_TREE, false);
1607 if (TREE_CODE (stmt) != BIND_EXPR)
1608 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
1609 else
1610 poplevel (0, 0);
1611 OMP_CLAUSE_REDUCTION_MERGE (c) = stmt;
1613 /* And stick the placeholder VAR_DECL into the clause as well. */
1614 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = outer_decl;
1616 gfc_current_locus = old_loc;
1618 gfc_free_expr (e1);
1619 if (e2)
1620 gfc_free_expr (e2);
1621 gfc_free_expr (e3);
1622 if (e4)
1623 gfc_free_expr (e4);
1624 free (symtree1);
1625 free (symtree2);
1626 free (symtree3);
1627 free (symtree4);
1628 if (outer_sym.as)
1629 gfc_free_array_spec (outer_sym.as);
1631 if (udr)
1633 *udr->omp_out = omp_var_copy[0];
1634 *udr->omp_in = omp_var_copy[1];
1635 if (udr->initializer_ns)
1637 *udr->omp_priv = omp_var_copy[2];
1638 *udr->omp_orig = omp_var_copy[3];
1643 static tree
1644 gfc_trans_omp_reduction_list (gfc_omp_namelist *namelist, tree list,
1645 locus where)
1647 for (; namelist != NULL; namelist = namelist->next)
1648 if (namelist->sym->attr.referenced)
1650 tree t = gfc_trans_omp_variable (namelist->sym, false);
1651 if (t != error_mark_node)
1653 tree node = build_omp_clause (where.lb->location,
1654 OMP_CLAUSE_REDUCTION);
1655 OMP_CLAUSE_DECL (node) = t;
1656 switch (namelist->u.reduction_op)
1658 case OMP_REDUCTION_PLUS:
1659 OMP_CLAUSE_REDUCTION_CODE (node) = PLUS_EXPR;
1660 break;
1661 case OMP_REDUCTION_MINUS:
1662 OMP_CLAUSE_REDUCTION_CODE (node) = MINUS_EXPR;
1663 break;
1664 case OMP_REDUCTION_TIMES:
1665 OMP_CLAUSE_REDUCTION_CODE (node) = MULT_EXPR;
1666 break;
1667 case OMP_REDUCTION_AND:
1668 OMP_CLAUSE_REDUCTION_CODE (node) = TRUTH_ANDIF_EXPR;
1669 break;
1670 case OMP_REDUCTION_OR:
1671 OMP_CLAUSE_REDUCTION_CODE (node) = TRUTH_ORIF_EXPR;
1672 break;
1673 case OMP_REDUCTION_EQV:
1674 OMP_CLAUSE_REDUCTION_CODE (node) = EQ_EXPR;
1675 break;
1676 case OMP_REDUCTION_NEQV:
1677 OMP_CLAUSE_REDUCTION_CODE (node) = NE_EXPR;
1678 break;
1679 case OMP_REDUCTION_MAX:
1680 OMP_CLAUSE_REDUCTION_CODE (node) = MAX_EXPR;
1681 break;
1682 case OMP_REDUCTION_MIN:
1683 OMP_CLAUSE_REDUCTION_CODE (node) = MIN_EXPR;
1684 break;
1685 case OMP_REDUCTION_IAND:
1686 OMP_CLAUSE_REDUCTION_CODE (node) = BIT_AND_EXPR;
1687 break;
1688 case OMP_REDUCTION_IOR:
1689 OMP_CLAUSE_REDUCTION_CODE (node) = BIT_IOR_EXPR;
1690 break;
1691 case OMP_REDUCTION_IEOR:
1692 OMP_CLAUSE_REDUCTION_CODE (node) = BIT_XOR_EXPR;
1693 break;
1694 case OMP_REDUCTION_USER:
1695 OMP_CLAUSE_REDUCTION_CODE (node) = ERROR_MARK;
1696 break;
1697 default:
1698 gcc_unreachable ();
1700 if (namelist->sym->attr.dimension
1701 || namelist->u.reduction_op == OMP_REDUCTION_USER
1702 || namelist->sym->attr.allocatable)
1703 gfc_trans_omp_array_reduction_or_udr (node, namelist, where);
1704 list = gfc_trans_add_clause (node, list);
1707 return list;
1710 static tree
1711 gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
1712 locus where, bool declare_simd = false)
1714 tree omp_clauses = NULL_TREE, chunk_size, c;
1715 int list;
1716 enum omp_clause_code clause_code;
1717 gfc_se se;
1719 if (clauses == NULL)
1720 return NULL_TREE;
1722 for (list = 0; list < OMP_LIST_NUM; list++)
1724 gfc_omp_namelist *n = clauses->lists[list];
1726 if (n == NULL)
1727 continue;
1728 switch (list)
1730 case OMP_LIST_REDUCTION:
1731 omp_clauses = gfc_trans_omp_reduction_list (n, omp_clauses, where);
1732 break;
1733 case OMP_LIST_PRIVATE:
1734 clause_code = OMP_CLAUSE_PRIVATE;
1735 goto add_clause;
1736 case OMP_LIST_SHARED:
1737 clause_code = OMP_CLAUSE_SHARED;
1738 goto add_clause;
1739 case OMP_LIST_FIRSTPRIVATE:
1740 clause_code = OMP_CLAUSE_FIRSTPRIVATE;
1741 goto add_clause;
1742 case OMP_LIST_LASTPRIVATE:
1743 clause_code = OMP_CLAUSE_LASTPRIVATE;
1744 goto add_clause;
1745 case OMP_LIST_COPYIN:
1746 clause_code = OMP_CLAUSE_COPYIN;
1747 goto add_clause;
1748 case OMP_LIST_COPYPRIVATE:
1749 clause_code = OMP_CLAUSE_COPYPRIVATE;
1750 goto add_clause;
1751 case OMP_LIST_UNIFORM:
1752 clause_code = OMP_CLAUSE_UNIFORM;
1753 /* FALLTHROUGH */
1754 add_clause:
1755 omp_clauses
1756 = gfc_trans_omp_variable_list (clause_code, n, omp_clauses,
1757 declare_simd);
1758 break;
1759 case OMP_LIST_ALIGNED:
1760 for (; n != NULL; n = n->next)
1761 if (n->sym->attr.referenced || declare_simd)
1763 tree t = gfc_trans_omp_variable (n->sym, declare_simd);
1764 if (t != error_mark_node)
1766 tree node = build_omp_clause (input_location,
1767 OMP_CLAUSE_ALIGNED);
1768 OMP_CLAUSE_DECL (node) = t;
1769 if (n->expr)
1771 tree alignment_var;
1773 if (block == NULL)
1774 alignment_var = gfc_conv_constant_to_tree (n->expr);
1775 else
1777 gfc_init_se (&se, NULL);
1778 gfc_conv_expr (&se, n->expr);
1779 gfc_add_block_to_block (block, &se.pre);
1780 alignment_var = gfc_evaluate_now (se.expr, block);
1781 gfc_add_block_to_block (block, &se.post);
1783 OMP_CLAUSE_ALIGNED_ALIGNMENT (node) = alignment_var;
1785 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
1788 break;
1789 case OMP_LIST_LINEAR:
1791 gfc_expr *last_step_expr = NULL;
1792 tree last_step = NULL_TREE;
1794 for (; n != NULL; n = n->next)
1796 if (n->expr)
1798 last_step_expr = n->expr;
1799 last_step = NULL_TREE;
1801 if (n->sym->attr.referenced || declare_simd)
1803 tree t = gfc_trans_omp_variable (n->sym, declare_simd);
1804 if (t != error_mark_node)
1806 tree node = build_omp_clause (input_location,
1807 OMP_CLAUSE_LINEAR);
1808 OMP_CLAUSE_DECL (node) = t;
1809 if (last_step_expr && last_step == NULL_TREE)
1811 if (block == NULL)
1812 last_step
1813 = gfc_conv_constant_to_tree (last_step_expr);
1814 else
1816 gfc_init_se (&se, NULL);
1817 gfc_conv_expr (&se, last_step_expr);
1818 gfc_add_block_to_block (block, &se.pre);
1819 last_step = gfc_evaluate_now (se.expr, block);
1820 gfc_add_block_to_block (block, &se.post);
1823 OMP_CLAUSE_LINEAR_STEP (node)
1824 = fold_convert (gfc_typenode_for_spec (&n->sym->ts),
1825 last_step);
1826 if (n->sym->attr.dimension || n->sym->attr.allocatable)
1827 OMP_CLAUSE_LINEAR_ARRAY (node) = 1;
1828 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
1833 break;
1834 case OMP_LIST_DEPEND:
1835 for (; n != NULL; n = n->next)
1837 if (!n->sym->attr.referenced)
1838 continue;
1840 tree node = build_omp_clause (input_location, OMP_CLAUSE_DEPEND);
1841 if (n->expr == NULL || n->expr->ref->u.ar.type == AR_FULL)
1843 tree decl = gfc_get_symbol_decl (n->sym);
1844 if (gfc_omp_privatize_by_reference (decl))
1845 decl = build_fold_indirect_ref (decl);
1846 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
1848 decl = gfc_conv_descriptor_data_get (decl);
1849 decl = fold_convert (build_pointer_type (char_type_node),
1850 decl);
1851 decl = build_fold_indirect_ref (decl);
1853 else if (DECL_P (decl))
1854 TREE_ADDRESSABLE (decl) = 1;
1855 OMP_CLAUSE_DECL (node) = decl;
1857 else
1859 tree ptr;
1860 gfc_init_se (&se, NULL);
1861 if (n->expr->ref->u.ar.type == AR_ELEMENT)
1863 gfc_conv_expr_reference (&se, n->expr);
1864 ptr = se.expr;
1866 else
1868 gfc_conv_expr_descriptor (&se, n->expr);
1869 ptr = gfc_conv_array_data (se.expr);
1871 gfc_add_block_to_block (block, &se.pre);
1872 gfc_add_block_to_block (block, &se.post);
1873 ptr = fold_convert (build_pointer_type (char_type_node),
1874 ptr);
1875 OMP_CLAUSE_DECL (node) = build_fold_indirect_ref (ptr);
1877 switch (n->u.depend_op)
1879 case OMP_DEPEND_IN:
1880 OMP_CLAUSE_DEPEND_KIND (node) = OMP_CLAUSE_DEPEND_IN;
1881 break;
1882 case OMP_DEPEND_OUT:
1883 OMP_CLAUSE_DEPEND_KIND (node) = OMP_CLAUSE_DEPEND_OUT;
1884 break;
1885 case OMP_DEPEND_INOUT:
1886 OMP_CLAUSE_DEPEND_KIND (node) = OMP_CLAUSE_DEPEND_INOUT;
1887 break;
1888 default:
1889 gcc_unreachable ();
1891 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
1893 break;
1894 case OMP_LIST_MAP:
1895 for (; n != NULL; n = n->next)
1897 if (!n->sym->attr.referenced)
1898 continue;
1900 tree node = build_omp_clause (input_location, OMP_CLAUSE_MAP);
1901 tree node2 = NULL_TREE;
1902 tree node3 = NULL_TREE;
1903 tree node4 = NULL_TREE;
1904 tree decl = gfc_get_symbol_decl (n->sym);
1905 if (DECL_P (decl))
1906 TREE_ADDRESSABLE (decl) = 1;
1907 if (n->expr == NULL || n->expr->ref->u.ar.type == AR_FULL)
1909 if (POINTER_TYPE_P (TREE_TYPE (decl))
1910 && (gfc_omp_privatize_by_reference (decl)
1911 || GFC_DECL_GET_SCALAR_POINTER (decl)
1912 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl)
1913 || GFC_DECL_CRAY_POINTEE (decl)
1914 || GFC_DESCRIPTOR_TYPE_P
1915 (TREE_TYPE (TREE_TYPE (decl)))))
1917 tree orig_decl = decl;
1918 node4 = build_omp_clause (input_location,
1919 OMP_CLAUSE_MAP);
1920 OMP_CLAUSE_MAP_KIND (node4) = OMP_CLAUSE_MAP_POINTER;
1921 OMP_CLAUSE_DECL (node4) = decl;
1922 OMP_CLAUSE_SIZE (node4) = size_int (0);
1923 decl = build_fold_indirect_ref (decl);
1924 if (TREE_CODE (TREE_TYPE (orig_decl)) == REFERENCE_TYPE
1925 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl)
1926 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl)))
1928 node3 = build_omp_clause (input_location,
1929 OMP_CLAUSE_MAP);
1930 OMP_CLAUSE_MAP_KIND (node3) = OMP_CLAUSE_MAP_POINTER;
1931 OMP_CLAUSE_DECL (node3) = decl;
1932 OMP_CLAUSE_SIZE (node3) = size_int (0);
1933 decl = build_fold_indirect_ref (decl);
1936 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
1938 tree type = TREE_TYPE (decl);
1939 tree ptr = gfc_conv_descriptor_data_get (decl);
1940 ptr = fold_convert (build_pointer_type (char_type_node),
1941 ptr);
1942 ptr = build_fold_indirect_ref (ptr);
1943 OMP_CLAUSE_DECL (node) = ptr;
1944 node2 = build_omp_clause (input_location,
1945 OMP_CLAUSE_MAP);
1946 OMP_CLAUSE_MAP_KIND (node2) = OMP_CLAUSE_MAP_TO_PSET;
1947 OMP_CLAUSE_DECL (node2) = decl;
1948 OMP_CLAUSE_SIZE (node2) = TYPE_SIZE_UNIT (type);
1949 node3 = build_omp_clause (input_location,
1950 OMP_CLAUSE_MAP);
1951 OMP_CLAUSE_MAP_KIND (node3) = OMP_CLAUSE_MAP_POINTER;
1952 OMP_CLAUSE_DECL (node3)
1953 = gfc_conv_descriptor_data_get (decl);
1954 OMP_CLAUSE_SIZE (node3) = size_int (0);
1955 if (n->sym->attr.pointer)
1957 stmtblock_t cond_block;
1958 tree size
1959 = gfc_create_var (gfc_array_index_type, NULL);
1960 tree tem, then_b, else_b, zero, cond;
1962 gfc_init_block (&cond_block);
1964 = gfc_full_array_size (&cond_block, decl,
1965 GFC_TYPE_ARRAY_RANK (type));
1966 gfc_add_modify (&cond_block, size, tem);
1967 then_b = gfc_finish_block (&cond_block);
1968 gfc_init_block (&cond_block);
1969 zero = build_int_cst (gfc_array_index_type, 0);
1970 gfc_add_modify (&cond_block, size, zero);
1971 else_b = gfc_finish_block (&cond_block);
1972 tem = gfc_conv_descriptor_data_get (decl);
1973 tem = fold_convert (pvoid_type_node, tem);
1974 cond = fold_build2_loc (input_location, NE_EXPR,
1975 boolean_type_node,
1976 tem, null_pointer_node);
1977 gfc_add_expr_to_block (block,
1978 build3_loc (input_location,
1979 COND_EXPR,
1980 void_type_node,
1981 cond, then_b,
1982 else_b));
1983 OMP_CLAUSE_SIZE (node) = size;
1985 else
1986 OMP_CLAUSE_SIZE (node)
1987 = gfc_full_array_size (block, decl,
1988 GFC_TYPE_ARRAY_RANK (type));
1989 tree elemsz
1990 = TYPE_SIZE_UNIT (gfc_get_element_type (type));
1991 elemsz = fold_convert (gfc_array_index_type, elemsz);
1992 OMP_CLAUSE_SIZE (node)
1993 = fold_build2 (MULT_EXPR, gfc_array_index_type,
1994 OMP_CLAUSE_SIZE (node), elemsz);
1996 else
1997 OMP_CLAUSE_DECL (node) = decl;
1999 else
2001 tree ptr, ptr2;
2002 gfc_init_se (&se, NULL);
2003 if (n->expr->ref->u.ar.type == AR_ELEMENT)
2005 gfc_conv_expr_reference (&se, n->expr);
2006 gfc_add_block_to_block (block, &se.pre);
2007 ptr = se.expr;
2008 OMP_CLAUSE_SIZE (node)
2009 = TYPE_SIZE_UNIT (TREE_TYPE (ptr));
2011 else
2013 gfc_conv_expr_descriptor (&se, n->expr);
2014 ptr = gfc_conv_array_data (se.expr);
2015 tree type = TREE_TYPE (se.expr);
2016 gfc_add_block_to_block (block, &se.pre);
2017 OMP_CLAUSE_SIZE (node)
2018 = gfc_full_array_size (block, se.expr,
2019 GFC_TYPE_ARRAY_RANK (type));
2020 tree elemsz
2021 = TYPE_SIZE_UNIT (gfc_get_element_type (type));
2022 elemsz = fold_convert (gfc_array_index_type, elemsz);
2023 OMP_CLAUSE_SIZE (node)
2024 = fold_build2 (MULT_EXPR, gfc_array_index_type,
2025 OMP_CLAUSE_SIZE (node), elemsz);
2027 gfc_add_block_to_block (block, &se.post);
2028 ptr = fold_convert (build_pointer_type (char_type_node),
2029 ptr);
2030 OMP_CLAUSE_DECL (node) = build_fold_indirect_ref (ptr);
2032 if (POINTER_TYPE_P (TREE_TYPE (decl))
2033 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
2035 node4 = build_omp_clause (input_location,
2036 OMP_CLAUSE_MAP);
2037 OMP_CLAUSE_MAP_KIND (node4) = OMP_CLAUSE_MAP_POINTER;
2038 OMP_CLAUSE_DECL (node4) = decl;
2039 OMP_CLAUSE_SIZE (node4) = size_int (0);
2040 decl = build_fold_indirect_ref (decl);
2042 ptr = fold_convert (sizetype, ptr);
2043 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
2045 tree type = TREE_TYPE (decl);
2046 ptr2 = gfc_conv_descriptor_data_get (decl);
2047 node2 = build_omp_clause (input_location,
2048 OMP_CLAUSE_MAP);
2049 OMP_CLAUSE_MAP_KIND (node2) = OMP_CLAUSE_MAP_TO_PSET;
2050 OMP_CLAUSE_DECL (node2) = decl;
2051 OMP_CLAUSE_SIZE (node2) = TYPE_SIZE_UNIT (type);
2052 node3 = build_omp_clause (input_location,
2053 OMP_CLAUSE_MAP);
2054 OMP_CLAUSE_MAP_KIND (node3) = OMP_CLAUSE_MAP_POINTER;
2055 OMP_CLAUSE_DECL (node3)
2056 = gfc_conv_descriptor_data_get (decl);
2058 else
2060 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
2061 ptr2 = build_fold_addr_expr (decl);
2062 else
2064 gcc_assert (POINTER_TYPE_P (TREE_TYPE (decl)));
2065 ptr2 = decl;
2067 node3 = build_omp_clause (input_location,
2068 OMP_CLAUSE_MAP);
2069 OMP_CLAUSE_MAP_KIND (node3) = OMP_CLAUSE_MAP_POINTER;
2070 OMP_CLAUSE_DECL (node3) = decl;
2072 ptr2 = fold_convert (sizetype, ptr2);
2073 OMP_CLAUSE_SIZE (node3)
2074 = fold_build2 (MINUS_EXPR, sizetype, ptr, ptr2);
2076 switch (n->u.map_op)
2078 case OMP_MAP_ALLOC:
2079 OMP_CLAUSE_MAP_KIND (node) = OMP_CLAUSE_MAP_ALLOC;
2080 break;
2081 case OMP_MAP_TO:
2082 OMP_CLAUSE_MAP_KIND (node) = OMP_CLAUSE_MAP_TO;
2083 break;
2084 case OMP_MAP_FROM:
2085 OMP_CLAUSE_MAP_KIND (node) = OMP_CLAUSE_MAP_FROM;
2086 break;
2087 case OMP_MAP_TOFROM:
2088 OMP_CLAUSE_MAP_KIND (node) = OMP_CLAUSE_MAP_TOFROM;
2089 break;
2090 default:
2091 gcc_unreachable ();
2093 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
2094 if (node2)
2095 omp_clauses = gfc_trans_add_clause (node2, omp_clauses);
2096 if (node3)
2097 omp_clauses = gfc_trans_add_clause (node3, omp_clauses);
2098 if (node4)
2099 omp_clauses = gfc_trans_add_clause (node4, omp_clauses);
2101 break;
2102 case OMP_LIST_TO:
2103 case OMP_LIST_FROM:
2104 for (; n != NULL; n = n->next)
2106 if (!n->sym->attr.referenced)
2107 continue;
2109 tree node = build_omp_clause (input_location,
2110 list == OMP_LIST_TO
2111 ? OMP_CLAUSE_TO : OMP_CLAUSE_FROM);
2112 if (n->expr == NULL || n->expr->ref->u.ar.type == AR_FULL)
2114 tree decl = gfc_get_symbol_decl (n->sym);
2115 if (gfc_omp_privatize_by_reference (decl))
2116 decl = build_fold_indirect_ref (decl);
2117 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
2119 tree type = TREE_TYPE (decl);
2120 tree ptr = gfc_conv_descriptor_data_get (decl);
2121 ptr = fold_convert (build_pointer_type (char_type_node),
2122 ptr);
2123 ptr = build_fold_indirect_ref (ptr);
2124 OMP_CLAUSE_DECL (node) = ptr;
2125 OMP_CLAUSE_SIZE (node)
2126 = gfc_full_array_size (block, decl,
2127 GFC_TYPE_ARRAY_RANK (type));
2128 tree elemsz
2129 = TYPE_SIZE_UNIT (gfc_get_element_type (type));
2130 elemsz = fold_convert (gfc_array_index_type, elemsz);
2131 OMP_CLAUSE_SIZE (node)
2132 = fold_build2 (MULT_EXPR, gfc_array_index_type,
2133 OMP_CLAUSE_SIZE (node), elemsz);
2135 else
2136 OMP_CLAUSE_DECL (node) = decl;
2138 else
2140 tree ptr;
2141 gfc_init_se (&se, NULL);
2142 if (n->expr->ref->u.ar.type == AR_ELEMENT)
2144 gfc_conv_expr_reference (&se, n->expr);
2145 ptr = se.expr;
2146 gfc_add_block_to_block (block, &se.pre);
2147 OMP_CLAUSE_SIZE (node)
2148 = TYPE_SIZE_UNIT (TREE_TYPE (ptr));
2150 else
2152 gfc_conv_expr_descriptor (&se, n->expr);
2153 ptr = gfc_conv_array_data (se.expr);
2154 tree type = TREE_TYPE (se.expr);
2155 gfc_add_block_to_block (block, &se.pre);
2156 OMP_CLAUSE_SIZE (node)
2157 = gfc_full_array_size (block, se.expr,
2158 GFC_TYPE_ARRAY_RANK (type));
2159 tree elemsz
2160 = TYPE_SIZE_UNIT (gfc_get_element_type (type));
2161 elemsz = fold_convert (gfc_array_index_type, elemsz);
2162 OMP_CLAUSE_SIZE (node)
2163 = fold_build2 (MULT_EXPR, gfc_array_index_type,
2164 OMP_CLAUSE_SIZE (node), elemsz);
2166 gfc_add_block_to_block (block, &se.post);
2167 ptr = fold_convert (build_pointer_type (char_type_node),
2168 ptr);
2169 OMP_CLAUSE_DECL (node) = build_fold_indirect_ref (ptr);
2171 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
2173 break;
2174 default:
2175 break;
2179 if (clauses->if_expr)
2181 tree if_var;
2183 gfc_init_se (&se, NULL);
2184 gfc_conv_expr (&se, clauses->if_expr);
2185 gfc_add_block_to_block (block, &se.pre);
2186 if_var = gfc_evaluate_now (se.expr, block);
2187 gfc_add_block_to_block (block, &se.post);
2189 c = build_omp_clause (where.lb->location, OMP_CLAUSE_IF);
2190 OMP_CLAUSE_IF_EXPR (c) = if_var;
2191 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2194 if (clauses->final_expr)
2196 tree final_var;
2198 gfc_init_se (&se, NULL);
2199 gfc_conv_expr (&se, clauses->final_expr);
2200 gfc_add_block_to_block (block, &se.pre);
2201 final_var = gfc_evaluate_now (se.expr, block);
2202 gfc_add_block_to_block (block, &se.post);
2204 c = build_omp_clause (where.lb->location, OMP_CLAUSE_FINAL);
2205 OMP_CLAUSE_FINAL_EXPR (c) = final_var;
2206 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2209 if (clauses->num_threads)
2211 tree num_threads;
2213 gfc_init_se (&se, NULL);
2214 gfc_conv_expr (&se, clauses->num_threads);
2215 gfc_add_block_to_block (block, &se.pre);
2216 num_threads = gfc_evaluate_now (se.expr, block);
2217 gfc_add_block_to_block (block, &se.post);
2219 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NUM_THREADS);
2220 OMP_CLAUSE_NUM_THREADS_EXPR (c) = num_threads;
2221 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2224 chunk_size = NULL_TREE;
2225 if (clauses->chunk_size)
2227 gfc_init_se (&se, NULL);
2228 gfc_conv_expr (&se, clauses->chunk_size);
2229 gfc_add_block_to_block (block, &se.pre);
2230 chunk_size = gfc_evaluate_now (se.expr, block);
2231 gfc_add_block_to_block (block, &se.post);
2234 if (clauses->sched_kind != OMP_SCHED_NONE)
2236 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SCHEDULE);
2237 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = chunk_size;
2238 switch (clauses->sched_kind)
2240 case OMP_SCHED_STATIC:
2241 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_STATIC;
2242 break;
2243 case OMP_SCHED_DYNAMIC:
2244 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_DYNAMIC;
2245 break;
2246 case OMP_SCHED_GUIDED:
2247 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_GUIDED;
2248 break;
2249 case OMP_SCHED_RUNTIME:
2250 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_RUNTIME;
2251 break;
2252 case OMP_SCHED_AUTO:
2253 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_AUTO;
2254 break;
2255 default:
2256 gcc_unreachable ();
2258 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2261 if (clauses->default_sharing != OMP_DEFAULT_UNKNOWN)
2263 c = build_omp_clause (where.lb->location, OMP_CLAUSE_DEFAULT);
2264 switch (clauses->default_sharing)
2266 case OMP_DEFAULT_NONE:
2267 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_NONE;
2268 break;
2269 case OMP_DEFAULT_SHARED:
2270 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_SHARED;
2271 break;
2272 case OMP_DEFAULT_PRIVATE:
2273 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_PRIVATE;
2274 break;
2275 case OMP_DEFAULT_FIRSTPRIVATE:
2276 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
2277 break;
2278 default:
2279 gcc_unreachable ();
2281 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2284 if (clauses->nowait)
2286 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NOWAIT);
2287 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2290 if (clauses->ordered)
2292 c = build_omp_clause (where.lb->location, OMP_CLAUSE_ORDERED);
2293 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2296 if (clauses->untied)
2298 c = build_omp_clause (where.lb->location, OMP_CLAUSE_UNTIED);
2299 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2302 if (clauses->mergeable)
2304 c = build_omp_clause (where.lb->location, OMP_CLAUSE_MERGEABLE);
2305 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2308 if (clauses->collapse)
2310 c = build_omp_clause (where.lb->location, OMP_CLAUSE_COLLAPSE);
2311 OMP_CLAUSE_COLLAPSE_EXPR (c)
2312 = build_int_cst (integer_type_node, clauses->collapse);
2313 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2316 if (clauses->inbranch)
2318 c = build_omp_clause (where.lb->location, OMP_CLAUSE_INBRANCH);
2319 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2322 if (clauses->notinbranch)
2324 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NOTINBRANCH);
2325 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2328 switch (clauses->cancel)
2330 case OMP_CANCEL_UNKNOWN:
2331 break;
2332 case OMP_CANCEL_PARALLEL:
2333 c = build_omp_clause (where.lb->location, OMP_CLAUSE_PARALLEL);
2334 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2335 break;
2336 case OMP_CANCEL_SECTIONS:
2337 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SECTIONS);
2338 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2339 break;
2340 case OMP_CANCEL_DO:
2341 c = build_omp_clause (where.lb->location, OMP_CLAUSE_FOR);
2342 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2343 break;
2344 case OMP_CANCEL_TASKGROUP:
2345 c = build_omp_clause (where.lb->location, OMP_CLAUSE_TASKGROUP);
2346 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2347 break;
2350 if (clauses->proc_bind != OMP_PROC_BIND_UNKNOWN)
2352 c = build_omp_clause (where.lb->location, OMP_CLAUSE_PROC_BIND);
2353 switch (clauses->proc_bind)
2355 case OMP_PROC_BIND_MASTER:
2356 OMP_CLAUSE_PROC_BIND_KIND (c) = OMP_CLAUSE_PROC_BIND_MASTER;
2357 break;
2358 case OMP_PROC_BIND_SPREAD:
2359 OMP_CLAUSE_PROC_BIND_KIND (c) = OMP_CLAUSE_PROC_BIND_SPREAD;
2360 break;
2361 case OMP_PROC_BIND_CLOSE:
2362 OMP_CLAUSE_PROC_BIND_KIND (c) = OMP_CLAUSE_PROC_BIND_CLOSE;
2363 break;
2364 default:
2365 gcc_unreachable ();
2367 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2370 if (clauses->safelen_expr)
2372 tree safelen_var;
2374 gfc_init_se (&se, NULL);
2375 gfc_conv_expr (&se, clauses->safelen_expr);
2376 gfc_add_block_to_block (block, &se.pre);
2377 safelen_var = gfc_evaluate_now (se.expr, block);
2378 gfc_add_block_to_block (block, &se.post);
2380 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SAFELEN);
2381 OMP_CLAUSE_SAFELEN_EXPR (c) = safelen_var;
2382 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2385 if (clauses->simdlen_expr)
2387 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SIMDLEN);
2388 OMP_CLAUSE_SIMDLEN_EXPR (c)
2389 = gfc_conv_constant_to_tree (clauses->simdlen_expr);
2390 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2393 if (clauses->num_teams)
2395 tree num_teams;
2397 gfc_init_se (&se, NULL);
2398 gfc_conv_expr (&se, clauses->num_teams);
2399 gfc_add_block_to_block (block, &se.pre);
2400 num_teams = gfc_evaluate_now (se.expr, block);
2401 gfc_add_block_to_block (block, &se.post);
2403 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NUM_TEAMS);
2404 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
2405 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2408 if (clauses->device)
2410 tree device;
2412 gfc_init_se (&se, NULL);
2413 gfc_conv_expr (&se, clauses->device);
2414 gfc_add_block_to_block (block, &se.pre);
2415 device = gfc_evaluate_now (se.expr, block);
2416 gfc_add_block_to_block (block, &se.post);
2418 c = build_omp_clause (where.lb->location, OMP_CLAUSE_DEVICE);
2419 OMP_CLAUSE_DEVICE_ID (c) = device;
2420 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2423 if (clauses->thread_limit)
2425 tree thread_limit;
2427 gfc_init_se (&se, NULL);
2428 gfc_conv_expr (&se, clauses->thread_limit);
2429 gfc_add_block_to_block (block, &se.pre);
2430 thread_limit = gfc_evaluate_now (se.expr, block);
2431 gfc_add_block_to_block (block, &se.post);
2433 c = build_omp_clause (where.lb->location, OMP_CLAUSE_THREAD_LIMIT);
2434 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
2435 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2438 chunk_size = NULL_TREE;
2439 if (clauses->dist_chunk_size)
2441 gfc_init_se (&se, NULL);
2442 gfc_conv_expr (&se, clauses->dist_chunk_size);
2443 gfc_add_block_to_block (block, &se.pre);
2444 chunk_size = gfc_evaluate_now (se.expr, block);
2445 gfc_add_block_to_block (block, &se.post);
2448 if (clauses->dist_sched_kind != OMP_SCHED_NONE)
2450 c = build_omp_clause (where.lb->location, OMP_CLAUSE_DIST_SCHEDULE);
2451 OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c) = chunk_size;
2452 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2455 return nreverse (omp_clauses);
2458 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
2460 static tree
2461 gfc_trans_omp_code (gfc_code *code, bool force_empty)
2463 tree stmt;
2465 pushlevel ();
2466 stmt = gfc_trans_code (code);
2467 if (TREE_CODE (stmt) != BIND_EXPR)
2469 if (!IS_EMPTY_STMT (stmt) || force_empty)
2471 tree block = poplevel (1, 0);
2472 stmt = build3_v (BIND_EXPR, NULL, stmt, block);
2474 else
2475 poplevel (0, 0);
2477 else
2478 poplevel (0, 0);
2479 return stmt;
2483 static tree gfc_trans_omp_sections (gfc_code *, gfc_omp_clauses *);
2484 static tree gfc_trans_omp_workshare (gfc_code *, gfc_omp_clauses *);
2486 static tree
2487 gfc_trans_omp_atomic (gfc_code *code)
2489 gfc_code *atomic_code = code;
2490 gfc_se lse;
2491 gfc_se rse;
2492 gfc_se vse;
2493 gfc_expr *expr2, *e;
2494 gfc_symbol *var;
2495 stmtblock_t block;
2496 tree lhsaddr, type, rhs, x;
2497 enum tree_code op = ERROR_MARK;
2498 enum tree_code aop = OMP_ATOMIC;
2499 bool var_on_left = false;
2500 bool seq_cst = (atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_SEQ_CST) != 0;
2502 code = code->block->next;
2503 gcc_assert (code->op == EXEC_ASSIGN);
2504 var = code->expr1->symtree->n.sym;
2506 gfc_init_se (&lse, NULL);
2507 gfc_init_se (&rse, NULL);
2508 gfc_init_se (&vse, NULL);
2509 gfc_start_block (&block);
2511 expr2 = code->expr2;
2512 if (expr2->expr_type == EXPR_FUNCTION
2513 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
2514 expr2 = expr2->value.function.actual->expr;
2516 switch (atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_MASK)
2518 case GFC_OMP_ATOMIC_READ:
2519 gfc_conv_expr (&vse, code->expr1);
2520 gfc_add_block_to_block (&block, &vse.pre);
2522 gfc_conv_expr (&lse, expr2);
2523 gfc_add_block_to_block (&block, &lse.pre);
2524 type = TREE_TYPE (lse.expr);
2525 lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
2527 x = build1 (OMP_ATOMIC_READ, type, lhsaddr);
2528 OMP_ATOMIC_SEQ_CST (x) = seq_cst;
2529 x = convert (TREE_TYPE (vse.expr), x);
2530 gfc_add_modify (&block, vse.expr, x);
2532 gfc_add_block_to_block (&block, &lse.pre);
2533 gfc_add_block_to_block (&block, &rse.pre);
2535 return gfc_finish_block (&block);
2536 case GFC_OMP_ATOMIC_CAPTURE:
2537 aop = OMP_ATOMIC_CAPTURE_NEW;
2538 if (expr2->expr_type == EXPR_VARIABLE)
2540 aop = OMP_ATOMIC_CAPTURE_OLD;
2541 gfc_conv_expr (&vse, code->expr1);
2542 gfc_add_block_to_block (&block, &vse.pre);
2544 gfc_conv_expr (&lse, expr2);
2545 gfc_add_block_to_block (&block, &lse.pre);
2546 gfc_init_se (&lse, NULL);
2547 code = code->next;
2548 var = code->expr1->symtree->n.sym;
2549 expr2 = code->expr2;
2550 if (expr2->expr_type == EXPR_FUNCTION
2551 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
2552 expr2 = expr2->value.function.actual->expr;
2554 break;
2555 default:
2556 break;
2559 gfc_conv_expr (&lse, code->expr1);
2560 gfc_add_block_to_block (&block, &lse.pre);
2561 type = TREE_TYPE (lse.expr);
2562 lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
2564 if (((atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_MASK)
2565 == GFC_OMP_ATOMIC_WRITE)
2566 || (atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_SWAP))
2568 gfc_conv_expr (&rse, expr2);
2569 gfc_add_block_to_block (&block, &rse.pre);
2571 else if (expr2->expr_type == EXPR_OP)
2573 gfc_expr *e;
2574 switch (expr2->value.op.op)
2576 case INTRINSIC_PLUS:
2577 op = PLUS_EXPR;
2578 break;
2579 case INTRINSIC_TIMES:
2580 op = MULT_EXPR;
2581 break;
2582 case INTRINSIC_MINUS:
2583 op = MINUS_EXPR;
2584 break;
2585 case INTRINSIC_DIVIDE:
2586 if (expr2->ts.type == BT_INTEGER)
2587 op = TRUNC_DIV_EXPR;
2588 else
2589 op = RDIV_EXPR;
2590 break;
2591 case INTRINSIC_AND:
2592 op = TRUTH_ANDIF_EXPR;
2593 break;
2594 case INTRINSIC_OR:
2595 op = TRUTH_ORIF_EXPR;
2596 break;
2597 case INTRINSIC_EQV:
2598 op = EQ_EXPR;
2599 break;
2600 case INTRINSIC_NEQV:
2601 op = NE_EXPR;
2602 break;
2603 default:
2604 gcc_unreachable ();
2606 e = expr2->value.op.op1;
2607 if (e->expr_type == EXPR_FUNCTION
2608 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
2609 e = e->value.function.actual->expr;
2610 if (e->expr_type == EXPR_VARIABLE
2611 && e->symtree != NULL
2612 && e->symtree->n.sym == var)
2614 expr2 = expr2->value.op.op2;
2615 var_on_left = true;
2617 else
2619 e = expr2->value.op.op2;
2620 if (e->expr_type == EXPR_FUNCTION
2621 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
2622 e = e->value.function.actual->expr;
2623 gcc_assert (e->expr_type == EXPR_VARIABLE
2624 && e->symtree != NULL
2625 && e->symtree->n.sym == var);
2626 expr2 = expr2->value.op.op1;
2627 var_on_left = false;
2629 gfc_conv_expr (&rse, expr2);
2630 gfc_add_block_to_block (&block, &rse.pre);
2632 else
2634 gcc_assert (expr2->expr_type == EXPR_FUNCTION);
2635 switch (expr2->value.function.isym->id)
2637 case GFC_ISYM_MIN:
2638 op = MIN_EXPR;
2639 break;
2640 case GFC_ISYM_MAX:
2641 op = MAX_EXPR;
2642 break;
2643 case GFC_ISYM_IAND:
2644 op = BIT_AND_EXPR;
2645 break;
2646 case GFC_ISYM_IOR:
2647 op = BIT_IOR_EXPR;
2648 break;
2649 case GFC_ISYM_IEOR:
2650 op = BIT_XOR_EXPR;
2651 break;
2652 default:
2653 gcc_unreachable ();
2655 e = expr2->value.function.actual->expr;
2656 gcc_assert (e->expr_type == EXPR_VARIABLE
2657 && e->symtree != NULL
2658 && e->symtree->n.sym == var);
2660 gfc_conv_expr (&rse, expr2->value.function.actual->next->expr);
2661 gfc_add_block_to_block (&block, &rse.pre);
2662 if (expr2->value.function.actual->next->next != NULL)
2664 tree accum = gfc_create_var (TREE_TYPE (rse.expr), NULL);
2665 gfc_actual_arglist *arg;
2667 gfc_add_modify (&block, accum, rse.expr);
2668 for (arg = expr2->value.function.actual->next->next; arg;
2669 arg = arg->next)
2671 gfc_init_block (&rse.pre);
2672 gfc_conv_expr (&rse, arg->expr);
2673 gfc_add_block_to_block (&block, &rse.pre);
2674 x = fold_build2_loc (input_location, op, TREE_TYPE (accum),
2675 accum, rse.expr);
2676 gfc_add_modify (&block, accum, x);
2679 rse.expr = accum;
2682 expr2 = expr2->value.function.actual->next->expr;
2685 lhsaddr = save_expr (lhsaddr);
2686 if (TREE_CODE (lhsaddr) != SAVE_EXPR
2687 && (TREE_CODE (lhsaddr) != ADDR_EXPR
2688 || TREE_CODE (TREE_OPERAND (lhsaddr, 0)) != VAR_DECL))
2690 /* Make sure LHS is simple enough so that goa_lhs_expr_p can recognize
2691 it even after unsharing function body. */
2692 tree var = create_tmp_var_raw (TREE_TYPE (lhsaddr));
2693 DECL_CONTEXT (var) = current_function_decl;
2694 lhsaddr = build4 (TARGET_EXPR, TREE_TYPE (lhsaddr), var, lhsaddr,
2695 NULL_TREE, NULL_TREE);
2698 rhs = gfc_evaluate_now (rse.expr, &block);
2700 if (((atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_MASK)
2701 == GFC_OMP_ATOMIC_WRITE)
2702 || (atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_SWAP))
2703 x = rhs;
2704 else
2706 x = convert (TREE_TYPE (rhs),
2707 build_fold_indirect_ref_loc (input_location, lhsaddr));
2708 if (var_on_left)
2709 x = fold_build2_loc (input_location, op, TREE_TYPE (rhs), x, rhs);
2710 else
2711 x = fold_build2_loc (input_location, op, TREE_TYPE (rhs), rhs, x);
2714 if (TREE_CODE (TREE_TYPE (rhs)) == COMPLEX_TYPE
2715 && TREE_CODE (type) != COMPLEX_TYPE)
2716 x = fold_build1_loc (input_location, REALPART_EXPR,
2717 TREE_TYPE (TREE_TYPE (rhs)), x);
2719 gfc_add_block_to_block (&block, &lse.pre);
2720 gfc_add_block_to_block (&block, &rse.pre);
2722 if (aop == OMP_ATOMIC)
2724 x = build2_v (OMP_ATOMIC, lhsaddr, convert (type, x));
2725 OMP_ATOMIC_SEQ_CST (x) = seq_cst;
2726 gfc_add_expr_to_block (&block, x);
2728 else
2730 if (aop == OMP_ATOMIC_CAPTURE_NEW)
2732 code = code->next;
2733 expr2 = code->expr2;
2734 if (expr2->expr_type == EXPR_FUNCTION
2735 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
2736 expr2 = expr2->value.function.actual->expr;
2738 gcc_assert (expr2->expr_type == EXPR_VARIABLE);
2739 gfc_conv_expr (&vse, code->expr1);
2740 gfc_add_block_to_block (&block, &vse.pre);
2742 gfc_init_se (&lse, NULL);
2743 gfc_conv_expr (&lse, expr2);
2744 gfc_add_block_to_block (&block, &lse.pre);
2746 x = build2 (aop, type, lhsaddr, convert (type, x));
2747 OMP_ATOMIC_SEQ_CST (x) = seq_cst;
2748 x = convert (TREE_TYPE (vse.expr), x);
2749 gfc_add_modify (&block, vse.expr, x);
2752 return gfc_finish_block (&block);
2755 static tree
2756 gfc_trans_omp_barrier (void)
2758 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER);
2759 return build_call_expr_loc (input_location, decl, 0);
2762 static tree
2763 gfc_trans_omp_cancel (gfc_code *code)
2765 int mask = 0;
2766 tree ifc = boolean_true_node;
2767 stmtblock_t block;
2768 switch (code->ext.omp_clauses->cancel)
2770 case OMP_CANCEL_PARALLEL: mask = 1; break;
2771 case OMP_CANCEL_DO: mask = 2; break;
2772 case OMP_CANCEL_SECTIONS: mask = 4; break;
2773 case OMP_CANCEL_TASKGROUP: mask = 8; break;
2774 default: gcc_unreachable ();
2776 gfc_start_block (&block);
2777 if (code->ext.omp_clauses->if_expr)
2779 gfc_se se;
2780 tree if_var;
2782 gfc_init_se (&se, NULL);
2783 gfc_conv_expr (&se, code->ext.omp_clauses->if_expr);
2784 gfc_add_block_to_block (&block, &se.pre);
2785 if_var = gfc_evaluate_now (se.expr, &block);
2786 gfc_add_block_to_block (&block, &se.post);
2787 tree type = TREE_TYPE (if_var);
2788 ifc = fold_build2_loc (input_location, NE_EXPR,
2789 boolean_type_node, if_var,
2790 build_zero_cst (type));
2792 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
2793 tree c_bool_type = TREE_TYPE (TREE_TYPE (decl));
2794 ifc = fold_convert (c_bool_type, ifc);
2795 gfc_add_expr_to_block (&block,
2796 build_call_expr_loc (input_location, decl, 2,
2797 build_int_cst (integer_type_node,
2798 mask), ifc));
2799 return gfc_finish_block (&block);
2802 static tree
2803 gfc_trans_omp_cancellation_point (gfc_code *code)
2805 int mask = 0;
2806 switch (code->ext.omp_clauses->cancel)
2808 case OMP_CANCEL_PARALLEL: mask = 1; break;
2809 case OMP_CANCEL_DO: mask = 2; break;
2810 case OMP_CANCEL_SECTIONS: mask = 4; break;
2811 case OMP_CANCEL_TASKGROUP: mask = 8; break;
2812 default: gcc_unreachable ();
2814 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_CANCELLATION_POINT);
2815 return build_call_expr_loc (input_location, decl, 1,
2816 build_int_cst (integer_type_node, mask));
2819 static tree
2820 gfc_trans_omp_critical (gfc_code *code)
2822 tree name = NULL_TREE, stmt;
2823 if (code->ext.omp_name != NULL)
2824 name = get_identifier (code->ext.omp_name);
2825 stmt = gfc_trans_code (code->block->next);
2826 return build2_loc (input_location, OMP_CRITICAL, void_type_node, stmt, name);
2829 typedef struct dovar_init_d {
2830 tree var;
2831 tree init;
2832 } dovar_init;
2835 static tree
2836 gfc_trans_omp_do (gfc_code *code, gfc_exec_op op, stmtblock_t *pblock,
2837 gfc_omp_clauses *do_clauses, tree par_clauses)
2839 gfc_se se;
2840 tree dovar, stmt, from, to, step, type, init, cond, incr;
2841 tree count = NULL_TREE, cycle_label, tmp, omp_clauses;
2842 stmtblock_t block;
2843 stmtblock_t body;
2844 gfc_omp_clauses *clauses = code->ext.omp_clauses;
2845 int i, collapse = clauses->collapse;
2846 vec<dovar_init> inits = vNULL;
2847 dovar_init *di;
2848 unsigned ix;
2850 if (collapse <= 0)
2851 collapse = 1;
2853 code = code->block->next;
2854 gcc_assert (code->op == EXEC_DO);
2856 init = make_tree_vec (collapse);
2857 cond = make_tree_vec (collapse);
2858 incr = make_tree_vec (collapse);
2860 if (pblock == NULL)
2862 gfc_start_block (&block);
2863 pblock = &block;
2866 omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc);
2868 for (i = 0; i < collapse; i++)
2870 int simple = 0;
2871 int dovar_found = 0;
2872 tree dovar_decl;
2874 if (clauses)
2876 gfc_omp_namelist *n = NULL;
2877 if (op != EXEC_OMP_DISTRIBUTE)
2878 for (n = clauses->lists[(op == EXEC_OMP_SIMD && collapse == 1)
2879 ? OMP_LIST_LINEAR : OMP_LIST_LASTPRIVATE];
2880 n != NULL; n = n->next)
2881 if (code->ext.iterator->var->symtree->n.sym == n->sym)
2882 break;
2883 if (n != NULL)
2884 dovar_found = 1;
2885 else if (n == NULL && op != EXEC_OMP_SIMD)
2886 for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next)
2887 if (code->ext.iterator->var->symtree->n.sym == n->sym)
2888 break;
2889 if (n != NULL)
2890 dovar_found++;
2893 /* Evaluate all the expressions in the iterator. */
2894 gfc_init_se (&se, NULL);
2895 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
2896 gfc_add_block_to_block (pblock, &se.pre);
2897 dovar = se.expr;
2898 type = TREE_TYPE (dovar);
2899 gcc_assert (TREE_CODE (type) == INTEGER_TYPE);
2901 gfc_init_se (&se, NULL);
2902 gfc_conv_expr_val (&se, code->ext.iterator->start);
2903 gfc_add_block_to_block (pblock, &se.pre);
2904 from = gfc_evaluate_now (se.expr, pblock);
2906 gfc_init_se (&se, NULL);
2907 gfc_conv_expr_val (&se, code->ext.iterator->end);
2908 gfc_add_block_to_block (pblock, &se.pre);
2909 to = gfc_evaluate_now (se.expr, pblock);
2911 gfc_init_se (&se, NULL);
2912 gfc_conv_expr_val (&se, code->ext.iterator->step);
2913 gfc_add_block_to_block (pblock, &se.pre);
2914 step = gfc_evaluate_now (se.expr, pblock);
2915 dovar_decl = dovar;
2917 /* Special case simple loops. */
2918 if (TREE_CODE (dovar) == VAR_DECL)
2920 if (integer_onep (step))
2921 simple = 1;
2922 else if (tree_int_cst_equal (step, integer_minus_one_node))
2923 simple = -1;
2925 else
2926 dovar_decl
2927 = gfc_trans_omp_variable (code->ext.iterator->var->symtree->n.sym,
2928 false);
2930 /* Loop body. */
2931 if (simple)
2933 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, dovar, from);
2934 /* The condition should not be folded. */
2935 TREE_VEC_ELT (cond, i) = build2_loc (input_location, simple > 0
2936 ? LE_EXPR : GE_EXPR,
2937 boolean_type_node, dovar, to);
2938 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location, PLUS_EXPR,
2939 type, dovar, step);
2940 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location,
2941 MODIFY_EXPR,
2942 type, dovar,
2943 TREE_VEC_ELT (incr, i));
2945 else
2947 /* STEP is not 1 or -1. Use:
2948 for (count = 0; count < (to + step - from) / step; count++)
2950 dovar = from + count * step;
2951 body;
2952 cycle_label:;
2953 } */
2954 tmp = fold_build2_loc (input_location, MINUS_EXPR, type, step, from);
2955 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, to, tmp);
2956 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR, type, tmp,
2957 step);
2958 tmp = gfc_evaluate_now (tmp, pblock);
2959 count = gfc_create_var (type, "count");
2960 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, count,
2961 build_int_cst (type, 0));
2962 /* The condition should not be folded. */
2963 TREE_VEC_ELT (cond, i) = build2_loc (input_location, LT_EXPR,
2964 boolean_type_node,
2965 count, tmp);
2966 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location, PLUS_EXPR,
2967 type, count,
2968 build_int_cst (type, 1));
2969 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location,
2970 MODIFY_EXPR, type, count,
2971 TREE_VEC_ELT (incr, i));
2973 /* Initialize DOVAR. */
2974 tmp = fold_build2_loc (input_location, MULT_EXPR, type, count, step);
2975 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, from, tmp);
2976 dovar_init e = {dovar, tmp};
2977 inits.safe_push (e);
2980 if (!dovar_found)
2982 if (op == EXEC_OMP_SIMD)
2984 if (collapse == 1)
2986 tmp = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
2987 OMP_CLAUSE_LINEAR_STEP (tmp) = step;
2989 else
2990 tmp = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
2991 if (!simple)
2992 dovar_found = 2;
2994 else
2995 tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
2996 OMP_CLAUSE_DECL (tmp) = dovar_decl;
2997 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
2999 if (dovar_found == 2)
3001 tree c = NULL;
3003 tmp = NULL;
3004 if (!simple)
3006 /* If dovar is lastprivate, but different counter is used,
3007 dovar += step needs to be added to
3008 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
3009 will have the value on entry of the last loop, rather
3010 than value after iterator increment. */
3011 tmp = gfc_evaluate_now (step, pblock);
3012 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, dovar,
3013 tmp);
3014 tmp = fold_build2_loc (input_location, MODIFY_EXPR, type,
3015 dovar, tmp);
3016 for (c = omp_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3017 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
3018 && OMP_CLAUSE_DECL (c) == dovar_decl)
3020 OMP_CLAUSE_LASTPRIVATE_STMT (c) = tmp;
3021 break;
3023 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
3024 && OMP_CLAUSE_DECL (c) == dovar_decl)
3026 OMP_CLAUSE_LINEAR_STMT (c) = tmp;
3027 break;
3030 if (c == NULL && op == EXEC_OMP_DO && par_clauses != NULL)
3032 for (c = par_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3033 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
3034 && OMP_CLAUSE_DECL (c) == dovar_decl)
3036 tree l = build_omp_clause (input_location,
3037 OMP_CLAUSE_LASTPRIVATE);
3038 OMP_CLAUSE_DECL (l) = dovar_decl;
3039 OMP_CLAUSE_CHAIN (l) = omp_clauses;
3040 OMP_CLAUSE_LASTPRIVATE_STMT (l) = tmp;
3041 omp_clauses = l;
3042 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_SHARED);
3043 break;
3046 gcc_assert (simple || c != NULL);
3048 if (!simple)
3050 if (op != EXEC_OMP_SIMD)
3051 tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
3052 else if (collapse == 1)
3054 tmp = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
3055 OMP_CLAUSE_LINEAR_STEP (tmp) = step;
3056 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp) = 1;
3057 OMP_CLAUSE_LINEAR_NO_COPYOUT (tmp) = 1;
3059 else
3060 tmp = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
3061 OMP_CLAUSE_DECL (tmp) = count;
3062 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
3065 if (i + 1 < collapse)
3066 code = code->block->next;
3069 if (pblock != &block)
3071 pushlevel ();
3072 gfc_start_block (&block);
3075 gfc_start_block (&body);
3077 FOR_EACH_VEC_ELT (inits, ix, di)
3078 gfc_add_modify (&body, di->var, di->init);
3079 inits.release ();
3081 /* Cycle statement is implemented with a goto. Exit statement must not be
3082 present for this loop. */
3083 cycle_label = gfc_build_label_decl (NULL_TREE);
3085 /* Put these labels where they can be found later. */
3087 code->cycle_label = cycle_label;
3088 code->exit_label = NULL_TREE;
3090 /* Main loop body. */
3091 tmp = gfc_trans_omp_code (code->block->next, true);
3092 gfc_add_expr_to_block (&body, tmp);
3094 /* Label for cycle statements (if needed). */
3095 if (TREE_USED (cycle_label))
3097 tmp = build1_v (LABEL_EXPR, cycle_label);
3098 gfc_add_expr_to_block (&body, tmp);
3101 /* End of loop body. */
3102 switch (op)
3104 case EXEC_OMP_SIMD: stmt = make_node (OMP_SIMD); break;
3105 case EXEC_OMP_DO: stmt = make_node (OMP_FOR); break;
3106 case EXEC_OMP_DISTRIBUTE: stmt = make_node (OMP_DISTRIBUTE); break;
3107 default: gcc_unreachable ();
3110 TREE_TYPE (stmt) = void_type_node;
3111 OMP_FOR_BODY (stmt) = gfc_finish_block (&body);
3112 OMP_FOR_CLAUSES (stmt) = omp_clauses;
3113 OMP_FOR_INIT (stmt) = init;
3114 OMP_FOR_COND (stmt) = cond;
3115 OMP_FOR_INCR (stmt) = incr;
3116 gfc_add_expr_to_block (&block, stmt);
3118 return gfc_finish_block (&block);
3121 static tree
3122 gfc_trans_omp_flush (void)
3124 tree decl = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
3125 return build_call_expr_loc (input_location, decl, 0);
3128 static tree
3129 gfc_trans_omp_master (gfc_code *code)
3131 tree stmt = gfc_trans_code (code->block->next);
3132 if (IS_EMPTY_STMT (stmt))
3133 return stmt;
3134 return build1_v (OMP_MASTER, stmt);
3137 static tree
3138 gfc_trans_omp_ordered (gfc_code *code)
3140 return build1_v (OMP_ORDERED, gfc_trans_code (code->block->next));
3143 static tree
3144 gfc_trans_omp_parallel (gfc_code *code)
3146 stmtblock_t block;
3147 tree stmt, omp_clauses;
3149 gfc_start_block (&block);
3150 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
3151 code->loc);
3152 stmt = gfc_trans_omp_code (code->block->next, true);
3153 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
3154 omp_clauses);
3155 gfc_add_expr_to_block (&block, stmt);
3156 return gfc_finish_block (&block);
3159 enum
3161 GFC_OMP_SPLIT_SIMD,
3162 GFC_OMP_SPLIT_DO,
3163 GFC_OMP_SPLIT_PARALLEL,
3164 GFC_OMP_SPLIT_DISTRIBUTE,
3165 GFC_OMP_SPLIT_TEAMS,
3166 GFC_OMP_SPLIT_TARGET,
3167 GFC_OMP_SPLIT_NUM
3170 enum
3172 GFC_OMP_MASK_SIMD = (1 << GFC_OMP_SPLIT_SIMD),
3173 GFC_OMP_MASK_DO = (1 << GFC_OMP_SPLIT_DO),
3174 GFC_OMP_MASK_PARALLEL = (1 << GFC_OMP_SPLIT_PARALLEL),
3175 GFC_OMP_MASK_DISTRIBUTE = (1 << GFC_OMP_SPLIT_DISTRIBUTE),
3176 GFC_OMP_MASK_TEAMS = (1 << GFC_OMP_SPLIT_TEAMS),
3177 GFC_OMP_MASK_TARGET = (1 << GFC_OMP_SPLIT_TARGET)
3180 static void
3181 gfc_split_omp_clauses (gfc_code *code,
3182 gfc_omp_clauses clausesa[GFC_OMP_SPLIT_NUM])
3184 int mask = 0, innermost = 0;
3185 memset (clausesa, 0, GFC_OMP_SPLIT_NUM * sizeof (gfc_omp_clauses));
3186 switch (code->op)
3188 case EXEC_OMP_DISTRIBUTE:
3189 innermost = GFC_OMP_SPLIT_DISTRIBUTE;
3190 break;
3191 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO:
3192 mask = GFC_OMP_MASK_DISTRIBUTE | GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO;
3193 innermost = GFC_OMP_SPLIT_DO;
3194 break;
3195 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD:
3196 mask = GFC_OMP_MASK_DISTRIBUTE | GFC_OMP_MASK_PARALLEL
3197 | GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
3198 innermost = GFC_OMP_SPLIT_SIMD;
3199 break;
3200 case EXEC_OMP_DISTRIBUTE_SIMD:
3201 mask = GFC_OMP_MASK_DISTRIBUTE | GFC_OMP_MASK_SIMD;
3202 innermost = GFC_OMP_SPLIT_SIMD;
3203 break;
3204 case EXEC_OMP_DO:
3205 innermost = GFC_OMP_SPLIT_DO;
3206 break;
3207 case EXEC_OMP_DO_SIMD:
3208 mask = GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
3209 innermost = GFC_OMP_SPLIT_SIMD;
3210 break;
3211 case EXEC_OMP_PARALLEL:
3212 innermost = GFC_OMP_SPLIT_PARALLEL;
3213 break;
3214 case EXEC_OMP_PARALLEL_DO:
3215 mask = GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO;
3216 innermost = GFC_OMP_SPLIT_DO;
3217 break;
3218 case EXEC_OMP_PARALLEL_DO_SIMD:
3219 mask = GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
3220 innermost = GFC_OMP_SPLIT_SIMD;
3221 break;
3222 case EXEC_OMP_SIMD:
3223 innermost = GFC_OMP_SPLIT_SIMD;
3224 break;
3225 case EXEC_OMP_TARGET:
3226 innermost = GFC_OMP_SPLIT_TARGET;
3227 break;
3228 case EXEC_OMP_TARGET_TEAMS:
3229 mask = GFC_OMP_MASK_TARGET | GFC_OMP_MASK_TEAMS;
3230 innermost = GFC_OMP_SPLIT_TEAMS;
3231 break;
3232 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE:
3233 mask = GFC_OMP_MASK_TARGET | GFC_OMP_MASK_TEAMS
3234 | GFC_OMP_MASK_DISTRIBUTE;
3235 innermost = GFC_OMP_SPLIT_DISTRIBUTE;
3236 break;
3237 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO:
3238 mask = GFC_OMP_MASK_TARGET | GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE
3239 | GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO;
3240 innermost = GFC_OMP_SPLIT_DO;
3241 break;
3242 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
3243 mask = GFC_OMP_MASK_TARGET | GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE
3244 | GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
3245 innermost = GFC_OMP_SPLIT_SIMD;
3246 break;
3247 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD:
3248 mask = GFC_OMP_MASK_TARGET | GFC_OMP_MASK_TEAMS
3249 | GFC_OMP_MASK_DISTRIBUTE | GFC_OMP_MASK_SIMD;
3250 innermost = GFC_OMP_SPLIT_SIMD;
3251 break;
3252 case EXEC_OMP_TEAMS:
3253 innermost = GFC_OMP_SPLIT_TEAMS;
3254 break;
3255 case EXEC_OMP_TEAMS_DISTRIBUTE:
3256 mask = GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE;
3257 innermost = GFC_OMP_SPLIT_DISTRIBUTE;
3258 break;
3259 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO:
3260 mask = GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE
3261 | GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO;
3262 innermost = GFC_OMP_SPLIT_DO;
3263 break;
3264 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
3265 mask = GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE
3266 | GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
3267 innermost = GFC_OMP_SPLIT_SIMD;
3268 break;
3269 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD:
3270 mask = GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE | GFC_OMP_MASK_SIMD;
3271 innermost = GFC_OMP_SPLIT_SIMD;
3272 break;
3273 default:
3274 gcc_unreachable ();
3276 if (mask == 0)
3278 clausesa[innermost] = *code->ext.omp_clauses;
3279 return;
3281 if (code->ext.omp_clauses != NULL)
3283 if (mask & GFC_OMP_MASK_TARGET)
3285 /* First the clauses that are unique to some constructs. */
3286 clausesa[GFC_OMP_SPLIT_TARGET].lists[OMP_LIST_MAP]
3287 = code->ext.omp_clauses->lists[OMP_LIST_MAP];
3288 clausesa[GFC_OMP_SPLIT_TARGET].device
3289 = code->ext.omp_clauses->device;
3291 if (mask & GFC_OMP_MASK_TEAMS)
3293 /* First the clauses that are unique to some constructs. */
3294 clausesa[GFC_OMP_SPLIT_TEAMS].num_teams
3295 = code->ext.omp_clauses->num_teams;
3296 clausesa[GFC_OMP_SPLIT_TEAMS].thread_limit
3297 = code->ext.omp_clauses->thread_limit;
3298 /* Shared and default clauses are allowed on parallel and teams. */
3299 clausesa[GFC_OMP_SPLIT_TEAMS].lists[OMP_LIST_SHARED]
3300 = code->ext.omp_clauses->lists[OMP_LIST_SHARED];
3301 clausesa[GFC_OMP_SPLIT_TEAMS].default_sharing
3302 = code->ext.omp_clauses->default_sharing;
3304 if (mask & GFC_OMP_MASK_DISTRIBUTE)
3306 /* First the clauses that are unique to some constructs. */
3307 clausesa[GFC_OMP_SPLIT_DISTRIBUTE].dist_sched_kind
3308 = code->ext.omp_clauses->dist_sched_kind;
3309 clausesa[GFC_OMP_SPLIT_DISTRIBUTE].dist_chunk_size
3310 = code->ext.omp_clauses->dist_chunk_size;
3311 /* Duplicate collapse. */
3312 clausesa[GFC_OMP_SPLIT_DISTRIBUTE].collapse
3313 = code->ext.omp_clauses->collapse;
3315 if (mask & GFC_OMP_MASK_PARALLEL)
3317 /* First the clauses that are unique to some constructs. */
3318 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_COPYIN]
3319 = code->ext.omp_clauses->lists[OMP_LIST_COPYIN];
3320 clausesa[GFC_OMP_SPLIT_PARALLEL].num_threads
3321 = code->ext.omp_clauses->num_threads;
3322 clausesa[GFC_OMP_SPLIT_PARALLEL].proc_bind
3323 = code->ext.omp_clauses->proc_bind;
3324 /* Shared and default clauses are allowed on parallel and teams. */
3325 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_SHARED]
3326 = code->ext.omp_clauses->lists[OMP_LIST_SHARED];
3327 clausesa[GFC_OMP_SPLIT_PARALLEL].default_sharing
3328 = code->ext.omp_clauses->default_sharing;
3330 if (mask & GFC_OMP_MASK_DO)
3332 /* First the clauses that are unique to some constructs. */
3333 clausesa[GFC_OMP_SPLIT_DO].ordered
3334 = code->ext.omp_clauses->ordered;
3335 clausesa[GFC_OMP_SPLIT_DO].sched_kind
3336 = code->ext.omp_clauses->sched_kind;
3337 clausesa[GFC_OMP_SPLIT_DO].chunk_size
3338 = code->ext.omp_clauses->chunk_size;
3339 clausesa[GFC_OMP_SPLIT_DO].nowait
3340 = code->ext.omp_clauses->nowait;
3341 /* Duplicate collapse. */
3342 clausesa[GFC_OMP_SPLIT_DO].collapse
3343 = code->ext.omp_clauses->collapse;
3345 if (mask & GFC_OMP_MASK_SIMD)
3347 clausesa[GFC_OMP_SPLIT_SIMD].safelen_expr
3348 = code->ext.omp_clauses->safelen_expr;
3349 clausesa[GFC_OMP_SPLIT_SIMD].lists[OMP_LIST_LINEAR]
3350 = code->ext.omp_clauses->lists[OMP_LIST_LINEAR];
3351 clausesa[GFC_OMP_SPLIT_SIMD].lists[OMP_LIST_ALIGNED]
3352 = code->ext.omp_clauses->lists[OMP_LIST_ALIGNED];
3353 /* Duplicate collapse. */
3354 clausesa[GFC_OMP_SPLIT_SIMD].collapse
3355 = code->ext.omp_clauses->collapse;
3357 /* Private clause is supported on all constructs but target,
3358 it is enough to put it on the innermost one. For
3359 !$ omp do put it on parallel though,
3360 as that's what we did for OpenMP 3.1. */
3361 clausesa[innermost == GFC_OMP_SPLIT_DO
3362 ? (int) GFC_OMP_SPLIT_PARALLEL
3363 : innermost].lists[OMP_LIST_PRIVATE]
3364 = code->ext.omp_clauses->lists[OMP_LIST_PRIVATE];
3365 /* Firstprivate clause is supported on all constructs but
3366 target and simd. Put it on the outermost of those and
3367 duplicate on parallel. */
3368 if (mask & GFC_OMP_MASK_TEAMS)
3369 clausesa[GFC_OMP_SPLIT_TEAMS].lists[OMP_LIST_FIRSTPRIVATE]
3370 = code->ext.omp_clauses->lists[OMP_LIST_FIRSTPRIVATE];
3371 else if (mask & GFC_OMP_MASK_DISTRIBUTE)
3372 clausesa[GFC_OMP_SPLIT_DISTRIBUTE].lists[OMP_LIST_FIRSTPRIVATE]
3373 = code->ext.omp_clauses->lists[OMP_LIST_FIRSTPRIVATE];
3374 if (mask & GFC_OMP_MASK_PARALLEL)
3375 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_FIRSTPRIVATE]
3376 = code->ext.omp_clauses->lists[OMP_LIST_FIRSTPRIVATE];
3377 else if (mask & GFC_OMP_MASK_DO)
3378 clausesa[GFC_OMP_SPLIT_DO].lists[OMP_LIST_FIRSTPRIVATE]
3379 = code->ext.omp_clauses->lists[OMP_LIST_FIRSTPRIVATE];
3380 /* Lastprivate is allowed on do and simd. In
3381 parallel do{, simd} we actually want to put it on
3382 parallel rather than do. */
3383 if (mask & GFC_OMP_MASK_PARALLEL)
3384 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_LASTPRIVATE]
3385 = code->ext.omp_clauses->lists[OMP_LIST_LASTPRIVATE];
3386 else if (mask & GFC_OMP_MASK_DO)
3387 clausesa[GFC_OMP_SPLIT_DO].lists[OMP_LIST_LASTPRIVATE]
3388 = code->ext.omp_clauses->lists[OMP_LIST_LASTPRIVATE];
3389 if (mask & GFC_OMP_MASK_SIMD)
3390 clausesa[GFC_OMP_SPLIT_SIMD].lists[OMP_LIST_LASTPRIVATE]
3391 = code->ext.omp_clauses->lists[OMP_LIST_LASTPRIVATE];
3392 /* Reduction is allowed on simd, do, parallel and teams.
3393 Duplicate it on all of them, but omit on do if
3394 parallel is present. */
3395 if (mask & GFC_OMP_MASK_TEAMS)
3396 clausesa[GFC_OMP_SPLIT_TEAMS].lists[OMP_LIST_REDUCTION]
3397 = code->ext.omp_clauses->lists[OMP_LIST_REDUCTION];
3398 if (mask & GFC_OMP_MASK_PARALLEL)
3399 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_REDUCTION]
3400 = code->ext.omp_clauses->lists[OMP_LIST_REDUCTION];
3401 else if (mask & GFC_OMP_MASK_DO)
3402 clausesa[GFC_OMP_SPLIT_DO].lists[OMP_LIST_REDUCTION]
3403 = code->ext.omp_clauses->lists[OMP_LIST_REDUCTION];
3404 if (mask & GFC_OMP_MASK_SIMD)
3405 clausesa[GFC_OMP_SPLIT_SIMD].lists[OMP_LIST_REDUCTION]
3406 = code->ext.omp_clauses->lists[OMP_LIST_REDUCTION];
3407 /* FIXME: This is currently being discussed. */
3408 if (mask & GFC_OMP_MASK_PARALLEL)
3409 clausesa[GFC_OMP_SPLIT_PARALLEL].if_expr
3410 = code->ext.omp_clauses->if_expr;
3411 else
3412 clausesa[GFC_OMP_SPLIT_TARGET].if_expr
3413 = code->ext.omp_clauses->if_expr;
3415 if ((mask & (GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO))
3416 == (GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO))
3417 clausesa[GFC_OMP_SPLIT_DO].nowait = true;
3420 static tree
3421 gfc_trans_omp_do_simd (gfc_code *code, stmtblock_t *pblock,
3422 gfc_omp_clauses *clausesa, tree omp_clauses)
3424 stmtblock_t block;
3425 gfc_omp_clauses clausesa_buf[GFC_OMP_SPLIT_NUM];
3426 tree stmt, body, omp_do_clauses = NULL_TREE;
3428 if (pblock == NULL)
3429 gfc_start_block (&block);
3430 else
3431 gfc_init_block (&block);
3433 if (clausesa == NULL)
3435 clausesa = clausesa_buf;
3436 gfc_split_omp_clauses (code, clausesa);
3438 if (flag_openmp)
3439 omp_do_clauses
3440 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_DO], code->loc);
3441 body = gfc_trans_omp_do (code, EXEC_OMP_SIMD, pblock ? pblock : &block,
3442 &clausesa[GFC_OMP_SPLIT_SIMD], omp_clauses);
3443 if (pblock == NULL)
3445 if (TREE_CODE (body) != BIND_EXPR)
3446 body = build3_v (BIND_EXPR, NULL, body, poplevel (1, 0));
3447 else
3448 poplevel (0, 0);
3450 else if (TREE_CODE (body) != BIND_EXPR)
3451 body = build3_v (BIND_EXPR, NULL, body, NULL_TREE);
3452 if (flag_openmp)
3454 stmt = make_node (OMP_FOR);
3455 TREE_TYPE (stmt) = void_type_node;
3456 OMP_FOR_BODY (stmt) = body;
3457 OMP_FOR_CLAUSES (stmt) = omp_do_clauses;
3459 else
3460 stmt = body;
3461 gfc_add_expr_to_block (&block, stmt);
3462 return gfc_finish_block (&block);
3465 static tree
3466 gfc_trans_omp_parallel_do (gfc_code *code, stmtblock_t *pblock,
3467 gfc_omp_clauses *clausesa)
3469 stmtblock_t block, *new_pblock = pblock;
3470 gfc_omp_clauses clausesa_buf[GFC_OMP_SPLIT_NUM];
3471 tree stmt, omp_clauses = NULL_TREE;
3473 if (pblock == NULL)
3474 gfc_start_block (&block);
3475 else
3476 gfc_init_block (&block);
3478 if (clausesa == NULL)
3480 clausesa = clausesa_buf;
3481 gfc_split_omp_clauses (code, clausesa);
3483 omp_clauses
3484 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_PARALLEL],
3485 code->loc);
3486 if (pblock == NULL)
3488 if (!clausesa[GFC_OMP_SPLIT_DO].ordered
3489 && clausesa[GFC_OMP_SPLIT_DO].sched_kind != OMP_SCHED_STATIC)
3490 new_pblock = &block;
3491 else
3492 pushlevel ();
3494 stmt = gfc_trans_omp_do (code, EXEC_OMP_DO, new_pblock,
3495 &clausesa[GFC_OMP_SPLIT_DO], omp_clauses);
3496 if (pblock == NULL)
3498 if (TREE_CODE (stmt) != BIND_EXPR)
3499 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3500 else
3501 poplevel (0, 0);
3503 else if (TREE_CODE (stmt) != BIND_EXPR)
3504 stmt = build3_v (BIND_EXPR, NULL, stmt, NULL_TREE);
3505 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
3506 omp_clauses);
3507 OMP_PARALLEL_COMBINED (stmt) = 1;
3508 gfc_add_expr_to_block (&block, stmt);
3509 return gfc_finish_block (&block);
3512 static tree
3513 gfc_trans_omp_parallel_do_simd (gfc_code *code, stmtblock_t *pblock,
3514 gfc_omp_clauses *clausesa)
3516 stmtblock_t block;
3517 gfc_omp_clauses clausesa_buf[GFC_OMP_SPLIT_NUM];
3518 tree stmt, omp_clauses = NULL_TREE;
3520 if (pblock == NULL)
3521 gfc_start_block (&block);
3522 else
3523 gfc_init_block (&block);
3525 if (clausesa == NULL)
3527 clausesa = clausesa_buf;
3528 gfc_split_omp_clauses (code, clausesa);
3530 if (flag_openmp)
3531 omp_clauses
3532 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_PARALLEL],
3533 code->loc);
3534 if (pblock == NULL)
3535 pushlevel ();
3536 stmt = gfc_trans_omp_do_simd (code, pblock, clausesa, omp_clauses);
3537 if (pblock == NULL)
3539 if (TREE_CODE (stmt) != BIND_EXPR)
3540 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3541 else
3542 poplevel (0, 0);
3544 else if (TREE_CODE (stmt) != BIND_EXPR)
3545 stmt = build3_v (BIND_EXPR, NULL, stmt, NULL_TREE);
3546 if (flag_openmp)
3548 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
3549 omp_clauses);
3550 OMP_PARALLEL_COMBINED (stmt) = 1;
3552 gfc_add_expr_to_block (&block, stmt);
3553 return gfc_finish_block (&block);
3556 static tree
3557 gfc_trans_omp_parallel_sections (gfc_code *code)
3559 stmtblock_t block;
3560 gfc_omp_clauses section_clauses;
3561 tree stmt, omp_clauses;
3563 memset (&section_clauses, 0, sizeof (section_clauses));
3564 section_clauses.nowait = true;
3566 gfc_start_block (&block);
3567 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
3568 code->loc);
3569 pushlevel ();
3570 stmt = gfc_trans_omp_sections (code, &section_clauses);
3571 if (TREE_CODE (stmt) != BIND_EXPR)
3572 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3573 else
3574 poplevel (0, 0);
3575 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
3576 omp_clauses);
3577 OMP_PARALLEL_COMBINED (stmt) = 1;
3578 gfc_add_expr_to_block (&block, stmt);
3579 return gfc_finish_block (&block);
3582 static tree
3583 gfc_trans_omp_parallel_workshare (gfc_code *code)
3585 stmtblock_t block;
3586 gfc_omp_clauses workshare_clauses;
3587 tree stmt, omp_clauses;
3589 memset (&workshare_clauses, 0, sizeof (workshare_clauses));
3590 workshare_clauses.nowait = true;
3592 gfc_start_block (&block);
3593 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
3594 code->loc);
3595 pushlevel ();
3596 stmt = gfc_trans_omp_workshare (code, &workshare_clauses);
3597 if (TREE_CODE (stmt) != BIND_EXPR)
3598 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3599 else
3600 poplevel (0, 0);
3601 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
3602 omp_clauses);
3603 OMP_PARALLEL_COMBINED (stmt) = 1;
3604 gfc_add_expr_to_block (&block, stmt);
3605 return gfc_finish_block (&block);
3608 static tree
3609 gfc_trans_omp_sections (gfc_code *code, gfc_omp_clauses *clauses)
3611 stmtblock_t block, body;
3612 tree omp_clauses, stmt;
3613 bool has_lastprivate = clauses->lists[OMP_LIST_LASTPRIVATE] != NULL;
3615 gfc_start_block (&block);
3617 omp_clauses = gfc_trans_omp_clauses (&block, clauses, code->loc);
3619 gfc_init_block (&body);
3620 for (code = code->block; code; code = code->block)
3622 /* Last section is special because of lastprivate, so even if it
3623 is empty, chain it in. */
3624 stmt = gfc_trans_omp_code (code->next,
3625 has_lastprivate && code->block == NULL);
3626 if (! IS_EMPTY_STMT (stmt))
3628 stmt = build1_v (OMP_SECTION, stmt);
3629 gfc_add_expr_to_block (&body, stmt);
3632 stmt = gfc_finish_block (&body);
3634 stmt = build2_loc (input_location, OMP_SECTIONS, void_type_node, stmt,
3635 omp_clauses);
3636 gfc_add_expr_to_block (&block, stmt);
3638 return gfc_finish_block (&block);
3641 static tree
3642 gfc_trans_omp_single (gfc_code *code, gfc_omp_clauses *clauses)
3644 tree omp_clauses = gfc_trans_omp_clauses (NULL, clauses, code->loc);
3645 tree stmt = gfc_trans_omp_code (code->block->next, true);
3646 stmt = build2_loc (input_location, OMP_SINGLE, void_type_node, stmt,
3647 omp_clauses);
3648 return stmt;
3651 static tree
3652 gfc_trans_omp_task (gfc_code *code)
3654 stmtblock_t block;
3655 tree stmt, omp_clauses;
3657 gfc_start_block (&block);
3658 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
3659 code->loc);
3660 stmt = gfc_trans_omp_code (code->block->next, true);
3661 stmt = build2_loc (input_location, OMP_TASK, void_type_node, stmt,
3662 omp_clauses);
3663 gfc_add_expr_to_block (&block, stmt);
3664 return gfc_finish_block (&block);
3667 static tree
3668 gfc_trans_omp_taskgroup (gfc_code *code)
3670 tree stmt = gfc_trans_code (code->block->next);
3671 return build1_loc (input_location, OMP_TASKGROUP, void_type_node, stmt);
3674 static tree
3675 gfc_trans_omp_taskwait (void)
3677 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT);
3678 return build_call_expr_loc (input_location, decl, 0);
3681 static tree
3682 gfc_trans_omp_taskyield (void)
3684 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD);
3685 return build_call_expr_loc (input_location, decl, 0);
3688 static tree
3689 gfc_trans_omp_distribute (gfc_code *code, gfc_omp_clauses *clausesa)
3691 stmtblock_t block;
3692 gfc_omp_clauses clausesa_buf[GFC_OMP_SPLIT_NUM];
3693 tree stmt, omp_clauses = NULL_TREE;
3695 gfc_start_block (&block);
3696 if (clausesa == NULL)
3698 clausesa = clausesa_buf;
3699 gfc_split_omp_clauses (code, clausesa);
3701 if (flag_openmp)
3702 omp_clauses
3703 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_DISTRIBUTE],
3704 code->loc);
3705 switch (code->op)
3707 case EXEC_OMP_DISTRIBUTE:
3708 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE:
3709 case EXEC_OMP_TEAMS_DISTRIBUTE:
3710 /* This is handled in gfc_trans_omp_do. */
3711 gcc_unreachable ();
3712 break;
3713 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO:
3714 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO:
3715 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO:
3716 stmt = gfc_trans_omp_parallel_do (code, &block, clausesa);
3717 if (TREE_CODE (stmt) != BIND_EXPR)
3718 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3719 else
3720 poplevel (0, 0);
3721 break;
3722 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD:
3723 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
3724 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
3725 stmt = gfc_trans_omp_parallel_do_simd (code, &block, clausesa);
3726 if (TREE_CODE (stmt) != BIND_EXPR)
3727 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3728 else
3729 poplevel (0, 0);
3730 break;
3731 case EXEC_OMP_DISTRIBUTE_SIMD:
3732 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD:
3733 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD:
3734 stmt = gfc_trans_omp_do (code, EXEC_OMP_SIMD, &block,
3735 &clausesa[GFC_OMP_SPLIT_SIMD], NULL_TREE);
3736 if (TREE_CODE (stmt) != BIND_EXPR)
3737 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3738 else
3739 poplevel (0, 0);
3740 break;
3741 default:
3742 gcc_unreachable ();
3744 if (flag_openmp)
3746 tree distribute = make_node (OMP_DISTRIBUTE);
3747 TREE_TYPE (distribute) = void_type_node;
3748 OMP_FOR_BODY (distribute) = stmt;
3749 OMP_FOR_CLAUSES (distribute) = omp_clauses;
3750 stmt = distribute;
3752 gfc_add_expr_to_block (&block, stmt);
3753 return gfc_finish_block (&block);
3756 static tree
3757 gfc_trans_omp_teams (gfc_code *code, gfc_omp_clauses *clausesa)
3759 stmtblock_t block;
3760 gfc_omp_clauses clausesa_buf[GFC_OMP_SPLIT_NUM];
3761 tree stmt, omp_clauses = NULL_TREE;
3763 gfc_start_block (&block);
3764 if (clausesa == NULL)
3766 clausesa = clausesa_buf;
3767 gfc_split_omp_clauses (code, clausesa);
3769 if (flag_openmp)
3770 omp_clauses
3771 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_TEAMS],
3772 code->loc);
3773 switch (code->op)
3775 case EXEC_OMP_TARGET_TEAMS:
3776 case EXEC_OMP_TEAMS:
3777 stmt = gfc_trans_omp_code (code->block->next, true);
3778 break;
3779 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE:
3780 case EXEC_OMP_TEAMS_DISTRIBUTE:
3781 stmt = gfc_trans_omp_do (code, EXEC_OMP_DISTRIBUTE, NULL,
3782 &clausesa[GFC_OMP_SPLIT_DISTRIBUTE],
3783 NULL);
3784 break;
3785 default:
3786 stmt = gfc_trans_omp_distribute (code, clausesa);
3787 break;
3789 stmt = build2_loc (input_location, OMP_TEAMS, void_type_node, stmt,
3790 omp_clauses);
3791 gfc_add_expr_to_block (&block, stmt);
3792 return gfc_finish_block (&block);
3795 static tree
3796 gfc_trans_omp_target (gfc_code *code)
3798 stmtblock_t block;
3799 gfc_omp_clauses clausesa[GFC_OMP_SPLIT_NUM];
3800 tree stmt, omp_clauses = NULL_TREE;
3802 gfc_start_block (&block);
3803 gfc_split_omp_clauses (code, clausesa);
3804 if (flag_openmp)
3805 omp_clauses
3806 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_TARGET],
3807 code->loc);
3808 if (code->op == EXEC_OMP_TARGET)
3809 stmt = gfc_trans_omp_code (code->block->next, true);
3810 else
3811 stmt = gfc_trans_omp_teams (code, clausesa);
3812 if (TREE_CODE (stmt) != BIND_EXPR)
3813 stmt = build3_v (BIND_EXPR, NULL, stmt, NULL_TREE);
3814 if (flag_openmp)
3815 stmt = build2_loc (input_location, OMP_TARGET, void_type_node, stmt,
3816 omp_clauses);
3817 gfc_add_expr_to_block (&block, stmt);
3818 return gfc_finish_block (&block);
3821 static tree
3822 gfc_trans_omp_target_data (gfc_code *code)
3824 stmtblock_t block;
3825 tree stmt, omp_clauses;
3827 gfc_start_block (&block);
3828 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
3829 code->loc);
3830 stmt = gfc_trans_omp_code (code->block->next, true);
3831 stmt = build2_loc (input_location, OMP_TARGET_DATA, void_type_node, stmt,
3832 omp_clauses);
3833 gfc_add_expr_to_block (&block, stmt);
3834 return gfc_finish_block (&block);
3837 static tree
3838 gfc_trans_omp_target_update (gfc_code *code)
3840 stmtblock_t block;
3841 tree stmt, omp_clauses;
3843 gfc_start_block (&block);
3844 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
3845 code->loc);
3846 stmt = build1_loc (input_location, OMP_TARGET_UPDATE, void_type_node,
3847 omp_clauses);
3848 gfc_add_expr_to_block (&block, stmt);
3849 return gfc_finish_block (&block);
3852 static tree
3853 gfc_trans_omp_workshare (gfc_code *code, gfc_omp_clauses *clauses)
3855 tree res, tmp, stmt;
3856 stmtblock_t block, *pblock = NULL;
3857 stmtblock_t singleblock;
3858 int saved_ompws_flags;
3859 bool singleblock_in_progress = false;
3860 /* True if previous gfc_code in workshare construct is not workshared. */
3861 bool prev_singleunit;
3863 code = code->block->next;
3865 pushlevel ();
3867 gfc_start_block (&block);
3868 pblock = &block;
3870 ompws_flags = OMPWS_WORKSHARE_FLAG;
3871 prev_singleunit = false;
3873 /* Translate statements one by one to trees until we reach
3874 the end of the workshare construct. Adjacent gfc_codes that
3875 are a single unit of work are clustered and encapsulated in a
3876 single OMP_SINGLE construct. */
3877 for (; code; code = code->next)
3879 if (code->here != 0)
3881 res = gfc_trans_label_here (code);
3882 gfc_add_expr_to_block (pblock, res);
3885 /* No dependence analysis, use for clauses with wait.
3886 If this is the last gfc_code, use default omp_clauses. */
3887 if (code->next == NULL && clauses->nowait)
3888 ompws_flags |= OMPWS_NOWAIT;
3890 /* By default, every gfc_code is a single unit of work. */
3891 ompws_flags |= OMPWS_CURR_SINGLEUNIT;
3892 ompws_flags &= ~OMPWS_SCALARIZER_WS;
3894 switch (code->op)
3896 case EXEC_NOP:
3897 res = NULL_TREE;
3898 break;
3900 case EXEC_ASSIGN:
3901 res = gfc_trans_assign (code);
3902 break;
3904 case EXEC_POINTER_ASSIGN:
3905 res = gfc_trans_pointer_assign (code);
3906 break;
3908 case EXEC_INIT_ASSIGN:
3909 res = gfc_trans_init_assign (code);
3910 break;
3912 case EXEC_FORALL:
3913 res = gfc_trans_forall (code);
3914 break;
3916 case EXEC_WHERE:
3917 res = gfc_trans_where (code);
3918 break;
3920 case EXEC_OMP_ATOMIC:
3921 res = gfc_trans_omp_directive (code);
3922 break;
3924 case EXEC_OMP_PARALLEL:
3925 case EXEC_OMP_PARALLEL_DO:
3926 case EXEC_OMP_PARALLEL_SECTIONS:
3927 case EXEC_OMP_PARALLEL_WORKSHARE:
3928 case EXEC_OMP_CRITICAL:
3929 saved_ompws_flags = ompws_flags;
3930 ompws_flags = 0;
3931 res = gfc_trans_omp_directive (code);
3932 ompws_flags = saved_ompws_flags;
3933 break;
3935 default:
3936 gfc_internal_error ("gfc_trans_omp_workshare(): Bad statement code");
3939 gfc_set_backend_locus (&code->loc);
3941 if (res != NULL_TREE && ! IS_EMPTY_STMT (res))
3943 if (prev_singleunit)
3945 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
3946 /* Add current gfc_code to single block. */
3947 gfc_add_expr_to_block (&singleblock, res);
3948 else
3950 /* Finish single block and add it to pblock. */
3951 tmp = gfc_finish_block (&singleblock);
3952 tmp = build2_loc (input_location, OMP_SINGLE,
3953 void_type_node, tmp, NULL_TREE);
3954 gfc_add_expr_to_block (pblock, tmp);
3955 /* Add current gfc_code to pblock. */
3956 gfc_add_expr_to_block (pblock, res);
3957 singleblock_in_progress = false;
3960 else
3962 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
3964 /* Start single block. */
3965 gfc_init_block (&singleblock);
3966 gfc_add_expr_to_block (&singleblock, res);
3967 singleblock_in_progress = true;
3969 else
3970 /* Add the new statement to the block. */
3971 gfc_add_expr_to_block (pblock, res);
3973 prev_singleunit = (ompws_flags & OMPWS_CURR_SINGLEUNIT) != 0;
3977 /* Finish remaining SINGLE block, if we were in the middle of one. */
3978 if (singleblock_in_progress)
3980 /* Finish single block and add it to pblock. */
3981 tmp = gfc_finish_block (&singleblock);
3982 tmp = build2_loc (input_location, OMP_SINGLE, void_type_node, tmp,
3983 clauses->nowait
3984 ? build_omp_clause (input_location, OMP_CLAUSE_NOWAIT)
3985 : NULL_TREE);
3986 gfc_add_expr_to_block (pblock, tmp);
3989 stmt = gfc_finish_block (pblock);
3990 if (TREE_CODE (stmt) != BIND_EXPR)
3992 if (!IS_EMPTY_STMT (stmt))
3994 tree bindblock = poplevel (1, 0);
3995 stmt = build3_v (BIND_EXPR, NULL, stmt, bindblock);
3997 else
3998 poplevel (0, 0);
4000 else
4001 poplevel (0, 0);
4003 if (IS_EMPTY_STMT (stmt) && !clauses->nowait)
4004 stmt = gfc_trans_omp_barrier ();
4006 ompws_flags = 0;
4007 return stmt;
4010 tree
4011 gfc_trans_omp_directive (gfc_code *code)
4013 switch (code->op)
4015 case EXEC_OMP_ATOMIC:
4016 return gfc_trans_omp_atomic (code);
4017 case EXEC_OMP_BARRIER:
4018 return gfc_trans_omp_barrier ();
4019 case EXEC_OMP_CANCEL:
4020 return gfc_trans_omp_cancel (code);
4021 case EXEC_OMP_CANCELLATION_POINT:
4022 return gfc_trans_omp_cancellation_point (code);
4023 case EXEC_OMP_CRITICAL:
4024 return gfc_trans_omp_critical (code);
4025 case EXEC_OMP_DISTRIBUTE:
4026 case EXEC_OMP_DO:
4027 case EXEC_OMP_SIMD:
4028 return gfc_trans_omp_do (code, code->op, NULL, code->ext.omp_clauses,
4029 NULL);
4030 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO:
4031 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD:
4032 case EXEC_OMP_DISTRIBUTE_SIMD:
4033 return gfc_trans_omp_distribute (code, NULL);
4034 case EXEC_OMP_DO_SIMD:
4035 return gfc_trans_omp_do_simd (code, NULL, NULL, NULL_TREE);
4036 case EXEC_OMP_FLUSH:
4037 return gfc_trans_omp_flush ();
4038 case EXEC_OMP_MASTER:
4039 return gfc_trans_omp_master (code);
4040 case EXEC_OMP_ORDERED:
4041 return gfc_trans_omp_ordered (code);
4042 case EXEC_OMP_PARALLEL:
4043 return gfc_trans_omp_parallel (code);
4044 case EXEC_OMP_PARALLEL_DO:
4045 return gfc_trans_omp_parallel_do (code, NULL, NULL);
4046 case EXEC_OMP_PARALLEL_DO_SIMD:
4047 return gfc_trans_omp_parallel_do_simd (code, NULL, NULL);
4048 case EXEC_OMP_PARALLEL_SECTIONS:
4049 return gfc_trans_omp_parallel_sections (code);
4050 case EXEC_OMP_PARALLEL_WORKSHARE:
4051 return gfc_trans_omp_parallel_workshare (code);
4052 case EXEC_OMP_SECTIONS:
4053 return gfc_trans_omp_sections (code, code->ext.omp_clauses);
4054 case EXEC_OMP_SINGLE:
4055 return gfc_trans_omp_single (code, code->ext.omp_clauses);
4056 case EXEC_OMP_TARGET:
4057 case EXEC_OMP_TARGET_TEAMS:
4058 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE:
4059 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO:
4060 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
4061 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD:
4062 return gfc_trans_omp_target (code);
4063 case EXEC_OMP_TARGET_DATA:
4064 return gfc_trans_omp_target_data (code);
4065 case EXEC_OMP_TARGET_UPDATE:
4066 return gfc_trans_omp_target_update (code);
4067 case EXEC_OMP_TASK:
4068 return gfc_trans_omp_task (code);
4069 case EXEC_OMP_TASKGROUP:
4070 return gfc_trans_omp_taskgroup (code);
4071 case EXEC_OMP_TASKWAIT:
4072 return gfc_trans_omp_taskwait ();
4073 case EXEC_OMP_TASKYIELD:
4074 return gfc_trans_omp_taskyield ();
4075 case EXEC_OMP_TEAMS:
4076 case EXEC_OMP_TEAMS_DISTRIBUTE:
4077 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO:
4078 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
4079 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD:
4080 return gfc_trans_omp_teams (code, NULL);
4081 case EXEC_OMP_WORKSHARE:
4082 return gfc_trans_omp_workshare (code, code->ext.omp_clauses);
4083 default:
4084 gcc_unreachable ();
4088 void
4089 gfc_trans_omp_declare_simd (gfc_namespace *ns)
4091 if (ns->entries)
4092 return;
4094 gfc_omp_declare_simd *ods;
4095 for (ods = ns->omp_declare_simd; ods; ods = ods->next)
4097 tree c = gfc_trans_omp_clauses (NULL, ods->clauses, ods->where, true);
4098 tree fndecl = ns->proc_name->backend_decl;
4099 if (c != NULL_TREE)
4100 c = tree_cons (NULL_TREE, c, NULL_TREE);
4101 c = build_tree_list (get_identifier ("omp declare simd"), c);
4102 TREE_CHAIN (c) = DECL_ATTRIBUTES (fndecl);
4103 DECL_ATTRIBUTES (fndecl) = c;