Rebase.
[official-gcc.git] / gcc / fortran / trans-openmp.c
blobda01a9034cb5126b679c488c15cc38725de4631b
1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tree.h"
26 #include "gimple-expr.h"
27 #include "gimplify.h" /* For create_tmp_var_raw. */
28 #include "stringpool.h"
29 #include "diagnostic-core.h" /* For internal_error. */
30 #include "gfortran.h"
31 #include "trans.h"
32 #include "trans-stmt.h"
33 #include "trans-types.h"
34 #include "trans-array.h"
35 #include "trans-const.h"
36 #include "arith.h"
37 #include "omp-low.h"
39 int ompws_flags;
41 /* True if OpenMP should privatize what this DECL points to rather
42 than the DECL itself. */
44 bool
45 gfc_omp_privatize_by_reference (const_tree decl)
47 tree type = TREE_TYPE (decl);
49 if (TREE_CODE (type) == REFERENCE_TYPE
50 && (!DECL_ARTIFICIAL (decl) || TREE_CODE (decl) == PARM_DECL))
51 return true;
53 if (TREE_CODE (type) == POINTER_TYPE)
55 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
56 that have POINTER_TYPE type and aren't scalar pointers, scalar
57 allocatables, Cray pointees or C pointers are supposed to be
58 privatized by reference. */
59 if (GFC_DECL_GET_SCALAR_POINTER (decl)
60 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl)
61 || GFC_DECL_CRAY_POINTEE (decl)
62 || VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
63 return false;
65 if (!DECL_ARTIFICIAL (decl)
66 && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
67 return true;
69 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
70 by the frontend. */
71 if (DECL_LANG_SPECIFIC (decl)
72 && GFC_DECL_SAVED_DESCRIPTOR (decl))
73 return true;
76 return false;
79 /* True if OpenMP sharing attribute of DECL is predetermined. */
81 enum omp_clause_default_kind
82 gfc_omp_predetermined_sharing (tree decl)
84 /* Associate names preserve the association established during ASSOCIATE.
85 As they are implemented either as pointers to the selector or array
86 descriptor and shouldn't really change in the ASSOCIATE region,
87 this decl can be either shared or firstprivate. If it is a pointer,
88 use firstprivate, as it is cheaper that way, otherwise make it shared. */
89 if (GFC_DECL_ASSOCIATE_VAR_P (decl))
91 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
92 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
93 else
94 return OMP_CLAUSE_DEFAULT_SHARED;
97 if (DECL_ARTIFICIAL (decl)
98 && ! GFC_DECL_RESULT (decl)
99 && ! (DECL_LANG_SPECIFIC (decl)
100 && GFC_DECL_SAVED_DESCRIPTOR (decl)))
101 return OMP_CLAUSE_DEFAULT_SHARED;
103 /* Cray pointees shouldn't be listed in any clauses and should be
104 gimplified to dereference of the corresponding Cray pointer.
105 Make them all private, so that they are emitted in the debug
106 information. */
107 if (GFC_DECL_CRAY_POINTEE (decl))
108 return OMP_CLAUSE_DEFAULT_PRIVATE;
110 /* Assumed-size arrays are predetermined shared. */
111 if (TREE_CODE (decl) == PARM_DECL
112 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl))
113 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_UNKNOWN
114 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl),
115 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl)) - 1)
116 == NULL)
117 return OMP_CLAUSE_DEFAULT_SHARED;
119 /* Dummy procedures aren't considered variables by OpenMP, thus are
120 disallowed in OpenMP clauses. They are represented as PARM_DECLs
121 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
122 to avoid complaining about their uses with default(none). */
123 if (TREE_CODE (decl) == PARM_DECL
124 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
125 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == FUNCTION_TYPE)
126 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
128 /* COMMON and EQUIVALENCE decls are shared. They
129 are only referenced through DECL_VALUE_EXPR of the variables
130 contained in them. If those are privatized, they will not be
131 gimplified to the COMMON or EQUIVALENCE decls. */
132 if (GFC_DECL_COMMON_OR_EQUIV (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
133 return OMP_CLAUSE_DEFAULT_SHARED;
135 if (GFC_DECL_RESULT (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
136 return OMP_CLAUSE_DEFAULT_SHARED;
138 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
141 /* Return decl that should be used when reporting DEFAULT(NONE)
142 diagnostics. */
144 tree
145 gfc_omp_report_decl (tree decl)
147 if (DECL_ARTIFICIAL (decl)
148 && DECL_LANG_SPECIFIC (decl)
149 && GFC_DECL_SAVED_DESCRIPTOR (decl))
150 return GFC_DECL_SAVED_DESCRIPTOR (decl);
152 return decl;
155 /* Return true if TYPE has any allocatable components. */
157 static bool
158 gfc_has_alloc_comps (tree type, tree decl)
160 tree field, ftype;
162 if (POINTER_TYPE_P (type))
164 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl))
165 type = TREE_TYPE (type);
166 else if (GFC_DECL_GET_SCALAR_POINTER (decl))
167 return false;
170 while (GFC_DESCRIPTOR_TYPE_P (type) || GFC_ARRAY_TYPE_P (type))
171 type = gfc_get_element_type (type);
173 if (TREE_CODE (type) != RECORD_TYPE)
174 return false;
176 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
178 ftype = TREE_TYPE (field);
179 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
180 return true;
181 if (GFC_DESCRIPTOR_TYPE_P (ftype)
182 && GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
183 return true;
184 if (gfc_has_alloc_comps (ftype, field))
185 return true;
187 return false;
190 /* Return true if DECL in private clause needs
191 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
192 bool
193 gfc_omp_private_outer_ref (tree decl)
195 tree type = TREE_TYPE (decl);
197 if (GFC_DESCRIPTOR_TYPE_P (type)
198 && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
199 return true;
201 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl))
202 return true;
204 if (gfc_omp_privatize_by_reference (decl))
205 type = TREE_TYPE (type);
207 if (gfc_has_alloc_comps (type, decl))
208 return true;
210 return false;
213 /* Callback for gfc_omp_unshare_expr. */
215 static tree
216 gfc_omp_unshare_expr_r (tree *tp, int *walk_subtrees, void *)
218 tree t = *tp;
219 enum tree_code code = TREE_CODE (t);
221 /* Stop at types, decls, constants like copy_tree_r. */
222 if (TREE_CODE_CLASS (code) == tcc_type
223 || TREE_CODE_CLASS (code) == tcc_declaration
224 || TREE_CODE_CLASS (code) == tcc_constant
225 || code == BLOCK)
226 *walk_subtrees = 0;
227 else if (handled_component_p (t)
228 || TREE_CODE (t) == MEM_REF)
230 *tp = unshare_expr (t);
231 *walk_subtrees = 0;
234 return NULL_TREE;
237 /* Unshare in expr anything that the FE which normally doesn't
238 care much about tree sharing (because during gimplification
239 everything is unshared) could cause problems with tree sharing
240 at omp-low.c time. */
242 static tree
243 gfc_omp_unshare_expr (tree expr)
245 walk_tree (&expr, gfc_omp_unshare_expr_r, NULL, NULL);
246 return expr;
249 enum walk_alloc_comps
251 WALK_ALLOC_COMPS_DTOR,
252 WALK_ALLOC_COMPS_DEFAULT_CTOR,
253 WALK_ALLOC_COMPS_COPY_CTOR
256 /* Handle allocatable components in OpenMP clauses. */
258 static tree
259 gfc_walk_alloc_comps (tree decl, tree dest, tree var,
260 enum walk_alloc_comps kind)
262 stmtblock_t block, tmpblock;
263 tree type = TREE_TYPE (decl), then_b, tem, field;
264 gfc_init_block (&block);
266 if (GFC_ARRAY_TYPE_P (type) || GFC_DESCRIPTOR_TYPE_P (type))
268 if (GFC_DESCRIPTOR_TYPE_P (type))
270 gfc_init_block (&tmpblock);
271 tem = gfc_full_array_size (&tmpblock, decl,
272 GFC_TYPE_ARRAY_RANK (type));
273 then_b = gfc_finish_block (&tmpblock);
274 gfc_add_expr_to_block (&block, gfc_omp_unshare_expr (then_b));
275 tem = gfc_omp_unshare_expr (tem);
276 tem = fold_build2_loc (input_location, MINUS_EXPR,
277 gfc_array_index_type, tem,
278 gfc_index_one_node);
280 else
282 if (!TYPE_DOMAIN (type)
283 || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE
284 || TYPE_MIN_VALUE (TYPE_DOMAIN (type)) == error_mark_node
285 || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == error_mark_node)
287 tem = fold_build2 (EXACT_DIV_EXPR, sizetype,
288 TYPE_SIZE_UNIT (type),
289 TYPE_SIZE_UNIT (TREE_TYPE (type)));
290 tem = size_binop (MINUS_EXPR, tem, size_one_node);
292 else
293 tem = array_type_nelts (type);
294 tem = fold_convert (gfc_array_index_type, tem);
297 tree nelems = gfc_evaluate_now (tem, &block);
298 tree index = gfc_create_var (gfc_array_index_type, "S");
300 gfc_init_block (&tmpblock);
301 tem = gfc_conv_array_data (decl);
302 tree declvar = build_fold_indirect_ref_loc (input_location, tem);
303 tree declvref = gfc_build_array_ref (declvar, index, NULL);
304 tree destvar, destvref = NULL_TREE;
305 if (dest)
307 tem = gfc_conv_array_data (dest);
308 destvar = build_fold_indirect_ref_loc (input_location, tem);
309 destvref = gfc_build_array_ref (destvar, index, NULL);
311 gfc_add_expr_to_block (&tmpblock,
312 gfc_walk_alloc_comps (declvref, destvref,
313 var, kind));
315 gfc_loopinfo loop;
316 gfc_init_loopinfo (&loop);
317 loop.dimen = 1;
318 loop.from[0] = gfc_index_zero_node;
319 loop.loopvar[0] = index;
320 loop.to[0] = nelems;
321 gfc_trans_scalarizing_loops (&loop, &tmpblock);
322 gfc_add_block_to_block (&block, &loop.pre);
323 return gfc_finish_block (&block);
325 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (var))
327 decl = build_fold_indirect_ref_loc (input_location, decl);
328 if (dest)
329 dest = build_fold_indirect_ref_loc (input_location, dest);
330 type = TREE_TYPE (decl);
333 gcc_assert (TREE_CODE (type) == RECORD_TYPE);
334 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
336 tree ftype = TREE_TYPE (field);
337 tree declf, destf = NULL_TREE;
338 bool has_alloc_comps = gfc_has_alloc_comps (ftype, field);
339 if ((!GFC_DESCRIPTOR_TYPE_P (ftype)
340 || GFC_TYPE_ARRAY_AKIND (ftype) != GFC_ARRAY_ALLOCATABLE)
341 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (field)
342 && !has_alloc_comps)
343 continue;
344 declf = fold_build3_loc (input_location, COMPONENT_REF, ftype,
345 decl, field, NULL_TREE);
346 if (dest)
347 destf = fold_build3_loc (input_location, COMPONENT_REF, ftype,
348 dest, field, NULL_TREE);
350 tem = NULL_TREE;
351 switch (kind)
353 case WALK_ALLOC_COMPS_DTOR:
354 break;
355 case WALK_ALLOC_COMPS_DEFAULT_CTOR:
356 if (GFC_DESCRIPTOR_TYPE_P (ftype)
357 && GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
359 gfc_add_modify (&block, unshare_expr (destf),
360 unshare_expr (declf));
361 tem = gfc_duplicate_allocatable_nocopy
362 (destf, declf, ftype,
363 GFC_TYPE_ARRAY_RANK (ftype));
365 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
366 tem = gfc_duplicate_allocatable_nocopy (destf, declf, ftype, 0);
367 break;
368 case WALK_ALLOC_COMPS_COPY_CTOR:
369 if (GFC_DESCRIPTOR_TYPE_P (ftype)
370 && GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
371 tem = gfc_duplicate_allocatable (destf, declf, ftype,
372 GFC_TYPE_ARRAY_RANK (ftype));
373 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
374 tem = gfc_duplicate_allocatable (destf, declf, ftype, 0);
375 break;
377 if (tem)
378 gfc_add_expr_to_block (&block, gfc_omp_unshare_expr (tem));
379 if (has_alloc_comps)
381 gfc_init_block (&tmpblock);
382 gfc_add_expr_to_block (&tmpblock,
383 gfc_walk_alloc_comps (declf, destf,
384 field, kind));
385 then_b = gfc_finish_block (&tmpblock);
386 if (GFC_DESCRIPTOR_TYPE_P (ftype)
387 && GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
388 tem = gfc_conv_descriptor_data_get (unshare_expr (declf));
389 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
390 tem = unshare_expr (declf);
391 else
392 tem = NULL_TREE;
393 if (tem)
395 tem = fold_convert (pvoid_type_node, tem);
396 tem = fold_build2_loc (input_location, NE_EXPR,
397 boolean_type_node, tem,
398 null_pointer_node);
399 then_b = build3_loc (input_location, COND_EXPR, void_type_node,
400 tem, then_b,
401 build_empty_stmt (input_location));
403 gfc_add_expr_to_block (&block, then_b);
405 if (kind == WALK_ALLOC_COMPS_DTOR)
407 if (GFC_DESCRIPTOR_TYPE_P (ftype)
408 && GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
410 tem = gfc_trans_dealloc_allocated (unshare_expr (declf),
411 false, NULL);
412 gfc_add_expr_to_block (&block, gfc_omp_unshare_expr (tem));
414 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
416 tem = gfc_call_free (unshare_expr (declf));
417 gfc_add_expr_to_block (&block, gfc_omp_unshare_expr (tem));
422 return gfc_finish_block (&block);
425 /* Return code to initialize DECL with its default constructor, or
426 NULL if there's nothing to do. */
428 tree
429 gfc_omp_clause_default_ctor (tree clause, tree decl, tree outer)
431 tree type = TREE_TYPE (decl), size, ptr, cond, then_b, else_b;
432 stmtblock_t block, cond_block;
434 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_PRIVATE
435 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LASTPRIVATE
436 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LINEAR
437 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_REDUCTION);
439 if ((! GFC_DESCRIPTOR_TYPE_P (type)
440 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
441 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause)))
443 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
445 gcc_assert (outer);
446 gfc_start_block (&block);
447 tree tem = gfc_walk_alloc_comps (outer, decl,
448 OMP_CLAUSE_DECL (clause),
449 WALK_ALLOC_COMPS_DEFAULT_CTOR);
450 gfc_add_expr_to_block (&block, tem);
451 return gfc_finish_block (&block);
453 return NULL_TREE;
456 gcc_assert (outer != NULL_TREE);
458 /* Allocatable arrays and scalars in PRIVATE clauses need to be set to
459 "not currently allocated" allocation status if outer
460 array is "not currently allocated", otherwise should be allocated. */
461 gfc_start_block (&block);
463 gfc_init_block (&cond_block);
465 if (GFC_DESCRIPTOR_TYPE_P (type))
467 gfc_add_modify (&cond_block, decl, outer);
468 tree rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
469 size = gfc_conv_descriptor_ubound_get (decl, rank);
470 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
471 size,
472 gfc_conv_descriptor_lbound_get (decl, rank));
473 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
474 size, gfc_index_one_node);
475 if (GFC_TYPE_ARRAY_RANK (type) > 1)
476 size = fold_build2_loc (input_location, MULT_EXPR,
477 gfc_array_index_type, size,
478 gfc_conv_descriptor_stride_get (decl, rank));
479 tree esize = fold_convert (gfc_array_index_type,
480 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
481 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
482 size, esize);
483 size = unshare_expr (size);
484 size = gfc_evaluate_now (fold_convert (size_type_node, size),
485 &cond_block);
487 else
488 size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (type)));
489 ptr = gfc_create_var (pvoid_type_node, NULL);
490 gfc_allocate_using_malloc (&cond_block, ptr, size, NULL_TREE);
491 if (GFC_DESCRIPTOR_TYPE_P (type))
492 gfc_conv_descriptor_data_set (&cond_block, unshare_expr (decl), ptr);
493 else
494 gfc_add_modify (&cond_block, unshare_expr (decl),
495 fold_convert (TREE_TYPE (decl), ptr));
496 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
498 tree tem = gfc_walk_alloc_comps (outer, decl,
499 OMP_CLAUSE_DECL (clause),
500 WALK_ALLOC_COMPS_DEFAULT_CTOR);
501 gfc_add_expr_to_block (&cond_block, tem);
503 then_b = gfc_finish_block (&cond_block);
505 /* Reduction clause requires allocated ALLOCATABLE. */
506 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_REDUCTION)
508 gfc_init_block (&cond_block);
509 if (GFC_DESCRIPTOR_TYPE_P (type))
510 gfc_conv_descriptor_data_set (&cond_block, unshare_expr (decl),
511 null_pointer_node);
512 else
513 gfc_add_modify (&cond_block, unshare_expr (decl),
514 build_zero_cst (TREE_TYPE (decl)));
515 else_b = gfc_finish_block (&cond_block);
517 tree tem = fold_convert (pvoid_type_node,
518 GFC_DESCRIPTOR_TYPE_P (type)
519 ? gfc_conv_descriptor_data_get (outer) : outer);
520 tem = unshare_expr (tem);
521 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
522 tem, null_pointer_node);
523 gfc_add_expr_to_block (&block,
524 build3_loc (input_location, COND_EXPR,
525 void_type_node, cond, then_b,
526 else_b));
528 else
529 gfc_add_expr_to_block (&block, then_b);
531 return gfc_finish_block (&block);
534 /* Build and return code for a copy constructor from SRC to DEST. */
536 tree
537 gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src)
539 tree type = TREE_TYPE (dest), ptr, size, call;
540 tree cond, then_b, else_b;
541 stmtblock_t block, cond_block;
543 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_FIRSTPRIVATE
544 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LINEAR);
546 if ((! GFC_DESCRIPTOR_TYPE_P (type)
547 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
548 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause)))
550 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
552 gfc_start_block (&block);
553 gfc_add_modify (&block, dest, src);
554 tree tem = gfc_walk_alloc_comps (src, dest, OMP_CLAUSE_DECL (clause),
555 WALK_ALLOC_COMPS_COPY_CTOR);
556 gfc_add_expr_to_block (&block, tem);
557 return gfc_finish_block (&block);
559 else
560 return build2_v (MODIFY_EXPR, dest, src);
563 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
564 and copied from SRC. */
565 gfc_start_block (&block);
567 gfc_init_block (&cond_block);
569 gfc_add_modify (&cond_block, dest, src);
570 if (GFC_DESCRIPTOR_TYPE_P (type))
572 tree rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
573 size = gfc_conv_descriptor_ubound_get (dest, rank);
574 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
575 size,
576 gfc_conv_descriptor_lbound_get (dest, rank));
577 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
578 size, gfc_index_one_node);
579 if (GFC_TYPE_ARRAY_RANK (type) > 1)
580 size = fold_build2_loc (input_location, MULT_EXPR,
581 gfc_array_index_type, size,
582 gfc_conv_descriptor_stride_get (dest, rank));
583 tree esize = fold_convert (gfc_array_index_type,
584 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
585 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
586 size, esize);
587 size = unshare_expr (size);
588 size = gfc_evaluate_now (fold_convert (size_type_node, size),
589 &cond_block);
591 else
592 size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (type)));
593 ptr = gfc_create_var (pvoid_type_node, NULL);
594 gfc_allocate_using_malloc (&cond_block, ptr, size, NULL_TREE);
595 if (GFC_DESCRIPTOR_TYPE_P (type))
596 gfc_conv_descriptor_data_set (&cond_block, unshare_expr (dest), ptr);
597 else
598 gfc_add_modify (&cond_block, unshare_expr (dest),
599 fold_convert (TREE_TYPE (dest), ptr));
601 tree srcptr = GFC_DESCRIPTOR_TYPE_P (type)
602 ? gfc_conv_descriptor_data_get (src) : src;
603 srcptr = unshare_expr (srcptr);
604 srcptr = fold_convert (pvoid_type_node, srcptr);
605 call = build_call_expr_loc (input_location,
606 builtin_decl_explicit (BUILT_IN_MEMCPY), 3, ptr,
607 srcptr, size);
608 gfc_add_expr_to_block (&cond_block, fold_convert (void_type_node, call));
609 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
611 tree tem = gfc_walk_alloc_comps (src, dest,
612 OMP_CLAUSE_DECL (clause),
613 WALK_ALLOC_COMPS_COPY_CTOR);
614 gfc_add_expr_to_block (&cond_block, tem);
616 then_b = gfc_finish_block (&cond_block);
618 gfc_init_block (&cond_block);
619 if (GFC_DESCRIPTOR_TYPE_P (type))
620 gfc_conv_descriptor_data_set (&cond_block, unshare_expr (dest),
621 null_pointer_node);
622 else
623 gfc_add_modify (&cond_block, unshare_expr (dest),
624 build_zero_cst (TREE_TYPE (dest)));
625 else_b = gfc_finish_block (&cond_block);
627 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
628 unshare_expr (srcptr), null_pointer_node);
629 gfc_add_expr_to_block (&block,
630 build3_loc (input_location, COND_EXPR,
631 void_type_node, cond, then_b, else_b));
633 return gfc_finish_block (&block);
636 /* Similarly, except use an intrinsic or pointer assignment operator
637 instead. */
639 tree
640 gfc_omp_clause_assign_op (tree clause, tree dest, tree src)
642 tree type = TREE_TYPE (dest), ptr, size, call, nonalloc;
643 tree cond, then_b, else_b;
644 stmtblock_t block, cond_block, cond_block2, inner_block;
646 if ((! GFC_DESCRIPTOR_TYPE_P (type)
647 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
648 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause)))
650 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
652 gfc_start_block (&block);
653 /* First dealloc any allocatable components in DEST. */
654 tree tem = gfc_walk_alloc_comps (dest, NULL_TREE,
655 OMP_CLAUSE_DECL (clause),
656 WALK_ALLOC_COMPS_DTOR);
657 gfc_add_expr_to_block (&block, tem);
658 /* Then copy over toplevel data. */
659 gfc_add_modify (&block, dest, src);
660 /* Finally allocate any allocatable components and copy. */
661 tem = gfc_walk_alloc_comps (src, dest, OMP_CLAUSE_DECL (clause),
662 WALK_ALLOC_COMPS_COPY_CTOR);
663 gfc_add_expr_to_block (&block, tem);
664 return gfc_finish_block (&block);
666 else
667 return build2_v (MODIFY_EXPR, dest, src);
670 gfc_start_block (&block);
672 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
674 then_b = gfc_walk_alloc_comps (dest, NULL_TREE, OMP_CLAUSE_DECL (clause),
675 WALK_ALLOC_COMPS_DTOR);
676 tree tem = fold_convert (pvoid_type_node,
677 GFC_DESCRIPTOR_TYPE_P (type)
678 ? gfc_conv_descriptor_data_get (dest) : dest);
679 tem = unshare_expr (tem);
680 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
681 tem, null_pointer_node);
682 tem = build3_loc (input_location, COND_EXPR, void_type_node, cond,
683 then_b, build_empty_stmt (input_location));
684 gfc_add_expr_to_block (&block, tem);
687 gfc_init_block (&cond_block);
689 if (GFC_DESCRIPTOR_TYPE_P (type))
691 tree rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
692 size = gfc_conv_descriptor_ubound_get (src, rank);
693 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
694 size,
695 gfc_conv_descriptor_lbound_get (src, rank));
696 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
697 size, gfc_index_one_node);
698 if (GFC_TYPE_ARRAY_RANK (type) > 1)
699 size = fold_build2_loc (input_location, MULT_EXPR,
700 gfc_array_index_type, size,
701 gfc_conv_descriptor_stride_get (src, rank));
702 tree esize = fold_convert (gfc_array_index_type,
703 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
704 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
705 size, esize);
706 size = unshare_expr (size);
707 size = gfc_evaluate_now (fold_convert (size_type_node, size),
708 &cond_block);
710 else
711 size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (type)));
712 ptr = gfc_create_var (pvoid_type_node, NULL);
714 tree destptr = GFC_DESCRIPTOR_TYPE_P (type)
715 ? gfc_conv_descriptor_data_get (dest) : dest;
716 destptr = unshare_expr (destptr);
717 destptr = fold_convert (pvoid_type_node, destptr);
718 gfc_add_modify (&cond_block, ptr, destptr);
720 nonalloc = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
721 destptr, null_pointer_node);
722 cond = nonalloc;
723 if (GFC_DESCRIPTOR_TYPE_P (type))
725 int i;
726 for (i = 0; i < GFC_TYPE_ARRAY_RANK (type); i++)
728 tree rank = gfc_rank_cst[i];
729 tree tem = gfc_conv_descriptor_ubound_get (src, rank);
730 tem = fold_build2_loc (input_location, MINUS_EXPR,
731 gfc_array_index_type, tem,
732 gfc_conv_descriptor_lbound_get (src, rank));
733 tem = fold_build2_loc (input_location, PLUS_EXPR,
734 gfc_array_index_type, tem,
735 gfc_conv_descriptor_lbound_get (dest, rank));
736 tem = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
737 tem, gfc_conv_descriptor_ubound_get (dest,
738 rank));
739 cond = fold_build2_loc (input_location, TRUTH_ORIF_EXPR,
740 boolean_type_node, cond, tem);
744 gfc_init_block (&cond_block2);
746 if (GFC_DESCRIPTOR_TYPE_P (type))
748 gfc_init_block (&inner_block);
749 gfc_allocate_using_malloc (&inner_block, ptr, size, NULL_TREE);
750 then_b = gfc_finish_block (&inner_block);
752 gfc_init_block (&inner_block);
753 gfc_add_modify (&inner_block, ptr,
754 gfc_call_realloc (&inner_block, ptr, size));
755 else_b = gfc_finish_block (&inner_block);
757 gfc_add_expr_to_block (&cond_block2,
758 build3_loc (input_location, COND_EXPR,
759 void_type_node,
760 unshare_expr (nonalloc),
761 then_b, else_b));
762 gfc_add_modify (&cond_block2, dest, src);
763 gfc_conv_descriptor_data_set (&cond_block2, unshare_expr (dest), ptr);
765 else
767 gfc_allocate_using_malloc (&cond_block2, ptr, size, NULL_TREE);
768 gfc_add_modify (&cond_block2, unshare_expr (dest),
769 fold_convert (type, ptr));
771 then_b = gfc_finish_block (&cond_block2);
772 else_b = build_empty_stmt (input_location);
774 gfc_add_expr_to_block (&cond_block,
775 build3_loc (input_location, COND_EXPR,
776 void_type_node, unshare_expr (cond),
777 then_b, else_b));
779 tree srcptr = GFC_DESCRIPTOR_TYPE_P (type)
780 ? gfc_conv_descriptor_data_get (src) : src;
781 srcptr = unshare_expr (srcptr);
782 srcptr = fold_convert (pvoid_type_node, srcptr);
783 call = build_call_expr_loc (input_location,
784 builtin_decl_explicit (BUILT_IN_MEMCPY), 3, ptr,
785 srcptr, size);
786 gfc_add_expr_to_block (&cond_block, fold_convert (void_type_node, call));
787 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
789 tree tem = gfc_walk_alloc_comps (src, dest,
790 OMP_CLAUSE_DECL (clause),
791 WALK_ALLOC_COMPS_COPY_CTOR);
792 gfc_add_expr_to_block (&cond_block, tem);
794 then_b = gfc_finish_block (&cond_block);
796 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_COPYIN)
798 gfc_init_block (&cond_block);
799 if (GFC_DESCRIPTOR_TYPE_P (type))
800 gfc_add_expr_to_block (&cond_block,
801 gfc_trans_dealloc_allocated (unshare_expr (dest),
802 false, NULL));
803 else
805 destptr = gfc_evaluate_now (destptr, &cond_block);
806 gfc_add_expr_to_block (&cond_block, gfc_call_free (destptr));
807 gfc_add_modify (&cond_block, unshare_expr (dest),
808 build_zero_cst (TREE_TYPE (dest)));
810 else_b = gfc_finish_block (&cond_block);
812 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
813 unshare_expr (srcptr), null_pointer_node);
814 gfc_add_expr_to_block (&block,
815 build3_loc (input_location, COND_EXPR,
816 void_type_node, cond,
817 then_b, else_b));
819 else
820 gfc_add_expr_to_block (&block, then_b);
822 return gfc_finish_block (&block);
825 static void
826 gfc_omp_linear_clause_add_loop (stmtblock_t *block, tree dest, tree src,
827 tree add, tree nelems)
829 stmtblock_t tmpblock;
830 tree desta, srca, index = gfc_create_var (gfc_array_index_type, "S");
831 nelems = gfc_evaluate_now (nelems, block);
833 gfc_init_block (&tmpblock);
834 if (TREE_CODE (TREE_TYPE (dest)) == ARRAY_TYPE)
836 desta = gfc_build_array_ref (dest, index, NULL);
837 srca = gfc_build_array_ref (src, index, NULL);
839 else
841 gcc_assert (POINTER_TYPE_P (TREE_TYPE (dest)));
842 tree idx = fold_build2 (MULT_EXPR, sizetype,
843 fold_convert (sizetype, index),
844 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dest))));
845 desta = build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR,
846 TREE_TYPE (dest), dest,
847 idx));
848 srca = build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR,
849 TREE_TYPE (src), src,
850 idx));
852 gfc_add_modify (&tmpblock, desta,
853 fold_build2 (PLUS_EXPR, TREE_TYPE (desta),
854 srca, add));
856 gfc_loopinfo loop;
857 gfc_init_loopinfo (&loop);
858 loop.dimen = 1;
859 loop.from[0] = gfc_index_zero_node;
860 loop.loopvar[0] = index;
861 loop.to[0] = nelems;
862 gfc_trans_scalarizing_loops (&loop, &tmpblock);
863 gfc_add_block_to_block (block, &loop.pre);
866 /* Build and return code for a constructor of DEST that initializes
867 it to SRC plus ADD (ADD is scalar integer). */
869 tree
870 gfc_omp_clause_linear_ctor (tree clause, tree dest, tree src, tree add)
872 tree type = TREE_TYPE (dest), ptr, size, nelems = NULL_TREE;
873 stmtblock_t block;
875 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LINEAR);
877 gfc_start_block (&block);
878 add = gfc_evaluate_now (add, &block);
880 if ((! GFC_DESCRIPTOR_TYPE_P (type)
881 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
882 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause)))
884 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
885 if (!TYPE_DOMAIN (type)
886 || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE
887 || TYPE_MIN_VALUE (TYPE_DOMAIN (type)) == error_mark_node
888 || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == error_mark_node)
890 nelems = fold_build2 (EXACT_DIV_EXPR, sizetype,
891 TYPE_SIZE_UNIT (type),
892 TYPE_SIZE_UNIT (TREE_TYPE (type)));
893 nelems = size_binop (MINUS_EXPR, nelems, size_one_node);
895 else
896 nelems = array_type_nelts (type);
897 nelems = fold_convert (gfc_array_index_type, nelems);
899 gfc_omp_linear_clause_add_loop (&block, dest, src, add, nelems);
900 return gfc_finish_block (&block);
903 /* Allocatable arrays in LINEAR clauses need to be allocated
904 and copied from SRC. */
905 gfc_add_modify (&block, dest, src);
906 if (GFC_DESCRIPTOR_TYPE_P (type))
908 tree rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
909 size = gfc_conv_descriptor_ubound_get (dest, rank);
910 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
911 size,
912 gfc_conv_descriptor_lbound_get (dest, rank));
913 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
914 size, gfc_index_one_node);
915 if (GFC_TYPE_ARRAY_RANK (type) > 1)
916 size = fold_build2_loc (input_location, MULT_EXPR,
917 gfc_array_index_type, size,
918 gfc_conv_descriptor_stride_get (dest, rank));
919 tree esize = fold_convert (gfc_array_index_type,
920 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
921 nelems = gfc_evaluate_now (unshare_expr (size), &block);
922 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
923 nelems, unshare_expr (esize));
924 size = gfc_evaluate_now (fold_convert (size_type_node, size),
925 &block);
926 nelems = fold_build2_loc (input_location, MINUS_EXPR,
927 gfc_array_index_type, nelems,
928 gfc_index_one_node);
930 else
931 size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (type)));
932 ptr = gfc_create_var (pvoid_type_node, NULL);
933 gfc_allocate_using_malloc (&block, ptr, size, NULL_TREE);
934 if (GFC_DESCRIPTOR_TYPE_P (type))
936 gfc_conv_descriptor_data_set (&block, unshare_expr (dest), ptr);
937 tree etype = gfc_get_element_type (type);
938 ptr = fold_convert (build_pointer_type (etype), ptr);
939 tree srcptr = gfc_conv_descriptor_data_get (unshare_expr (src));
940 srcptr = fold_convert (build_pointer_type (etype), srcptr);
941 gfc_omp_linear_clause_add_loop (&block, ptr, srcptr, add, nelems);
943 else
945 gfc_add_modify (&block, unshare_expr (dest),
946 fold_convert (TREE_TYPE (dest), ptr));
947 ptr = fold_convert (TREE_TYPE (dest), ptr);
948 tree dstm = build_fold_indirect_ref (ptr);
949 tree srcm = build_fold_indirect_ref (unshare_expr (src));
950 gfc_add_modify (&block, dstm,
951 fold_build2 (PLUS_EXPR, TREE_TYPE (add), srcm, add));
953 return gfc_finish_block (&block);
956 /* Build and return code destructing DECL. Return NULL if nothing
957 to be done. */
959 tree
960 gfc_omp_clause_dtor (tree clause, tree decl)
962 tree type = TREE_TYPE (decl), tem;
964 if ((! GFC_DESCRIPTOR_TYPE_P (type)
965 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
966 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause)))
968 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
969 return gfc_walk_alloc_comps (decl, NULL_TREE,
970 OMP_CLAUSE_DECL (clause),
971 WALK_ALLOC_COMPS_DTOR);
972 return NULL_TREE;
975 if (GFC_DESCRIPTOR_TYPE_P (type))
976 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
977 to be deallocated if they were allocated. */
978 tem = gfc_trans_dealloc_allocated (decl, false, NULL);
979 else
980 tem = gfc_call_free (decl);
981 tem = gfc_omp_unshare_expr (tem);
983 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
985 stmtblock_t block;
986 tree then_b;
988 gfc_init_block (&block);
989 gfc_add_expr_to_block (&block,
990 gfc_walk_alloc_comps (decl, NULL_TREE,
991 OMP_CLAUSE_DECL (clause),
992 WALK_ALLOC_COMPS_DTOR));
993 gfc_add_expr_to_block (&block, tem);
994 then_b = gfc_finish_block (&block);
996 tem = fold_convert (pvoid_type_node,
997 GFC_DESCRIPTOR_TYPE_P (type)
998 ? gfc_conv_descriptor_data_get (decl) : decl);
999 tem = unshare_expr (tem);
1000 tree cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
1001 tem, null_pointer_node);
1002 tem = build3_loc (input_location, COND_EXPR, void_type_node, cond,
1003 then_b, build_empty_stmt (input_location));
1005 return tem;
1009 void
1010 gfc_omp_finish_clause (tree c, gimple_seq *pre_p)
1012 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
1013 return;
1015 tree decl = OMP_CLAUSE_DECL (c);
1016 tree c2 = NULL_TREE, c3 = NULL_TREE, c4 = NULL_TREE;
1017 if (POINTER_TYPE_P (TREE_TYPE (decl)))
1019 if (!gfc_omp_privatize_by_reference (decl)
1020 && !GFC_DECL_GET_SCALAR_POINTER (decl)
1021 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (decl)
1022 && !GFC_DECL_CRAY_POINTEE (decl)
1023 && !GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
1024 return;
1025 c4 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
1026 OMP_CLAUSE_MAP_KIND (c4) = OMP_CLAUSE_MAP_POINTER;
1027 OMP_CLAUSE_DECL (c4) = decl;
1028 OMP_CLAUSE_SIZE (c4) = size_int (0);
1029 decl = build_fold_indirect_ref (decl);
1030 OMP_CLAUSE_DECL (c) = decl;
1031 OMP_CLAUSE_SIZE (c) = NULL_TREE;
1033 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
1035 stmtblock_t block;
1036 gfc_start_block (&block);
1037 tree type = TREE_TYPE (decl);
1038 tree ptr = gfc_conv_descriptor_data_get (decl);
1039 ptr = fold_convert (build_pointer_type (char_type_node), ptr);
1040 ptr = build_fold_indirect_ref (ptr);
1041 OMP_CLAUSE_DECL (c) = ptr;
1042 c2 = build_omp_clause (input_location, OMP_CLAUSE_MAP);
1043 OMP_CLAUSE_MAP_KIND (c2) = OMP_CLAUSE_MAP_TO_PSET;
1044 OMP_CLAUSE_DECL (c2) = decl;
1045 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (type);
1046 c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
1047 OMP_CLAUSE_MAP_KIND (c3) = OMP_CLAUSE_MAP_POINTER;
1048 OMP_CLAUSE_DECL (c3) = gfc_conv_descriptor_data_get (decl);
1049 OMP_CLAUSE_SIZE (c3) = size_int (0);
1050 tree size = create_tmp_var (gfc_array_index_type, NULL);
1051 tree elemsz = TYPE_SIZE_UNIT (gfc_get_element_type (type));
1052 elemsz = fold_convert (gfc_array_index_type, elemsz);
1053 if (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER
1054 || GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER_CONT)
1056 stmtblock_t cond_block;
1057 tree tem, then_b, else_b, zero, cond;
1059 gfc_init_block (&cond_block);
1060 tem = gfc_full_array_size (&cond_block, decl,
1061 GFC_TYPE_ARRAY_RANK (type));
1062 gfc_add_modify (&cond_block, size, tem);
1063 gfc_add_modify (&cond_block, size,
1064 fold_build2 (MULT_EXPR, gfc_array_index_type,
1065 size, elemsz));
1066 then_b = gfc_finish_block (&cond_block);
1067 gfc_init_block (&cond_block);
1068 zero = build_int_cst (gfc_array_index_type, 0);
1069 gfc_add_modify (&cond_block, size, zero);
1070 else_b = gfc_finish_block (&cond_block);
1071 tem = gfc_conv_descriptor_data_get (decl);
1072 tem = fold_convert (pvoid_type_node, tem);
1073 cond = fold_build2_loc (input_location, NE_EXPR,
1074 boolean_type_node, tem, null_pointer_node);
1075 gfc_add_expr_to_block (&block, build3_loc (input_location, COND_EXPR,
1076 void_type_node, cond,
1077 then_b, else_b));
1079 else
1081 gfc_add_modify (&block, size,
1082 gfc_full_array_size (&block, decl,
1083 GFC_TYPE_ARRAY_RANK (type)));
1084 gfc_add_modify (&block, size,
1085 fold_build2 (MULT_EXPR, gfc_array_index_type,
1086 size, elemsz));
1088 OMP_CLAUSE_SIZE (c) = size;
1089 tree stmt = gfc_finish_block (&block);
1090 gimplify_and_add (stmt, pre_p);
1092 tree last = c;
1093 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
1094 OMP_CLAUSE_SIZE (c)
1095 = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
1096 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
1097 if (c2)
1099 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (last);
1100 OMP_CLAUSE_CHAIN (last) = c2;
1101 last = c2;
1103 if (c3)
1105 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (last);
1106 OMP_CLAUSE_CHAIN (last) = c3;
1107 last = c3;
1109 if (c4)
1111 OMP_CLAUSE_CHAIN (c4) = OMP_CLAUSE_CHAIN (last);
1112 OMP_CLAUSE_CHAIN (last) = c4;
1113 last = c4;
1118 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1119 disregarded in OpenMP construct, because it is going to be
1120 remapped during OpenMP lowering. SHARED is true if DECL
1121 is going to be shared, false if it is going to be privatized. */
1123 bool
1124 gfc_omp_disregard_value_expr (tree decl, bool shared)
1126 if (GFC_DECL_COMMON_OR_EQUIV (decl)
1127 && DECL_HAS_VALUE_EXPR_P (decl))
1129 tree value = DECL_VALUE_EXPR (decl);
1131 if (TREE_CODE (value) == COMPONENT_REF
1132 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
1133 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
1135 /* If variable in COMMON or EQUIVALENCE is privatized, return
1136 true, as just that variable is supposed to be privatized,
1137 not the whole COMMON or whole EQUIVALENCE.
1138 For shared variables in COMMON or EQUIVALENCE, let them be
1139 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
1140 from the same COMMON or EQUIVALENCE just one sharing of the
1141 whole COMMON or EQUIVALENCE is enough. */
1142 return ! shared;
1146 if (GFC_DECL_RESULT (decl) && DECL_HAS_VALUE_EXPR_P (decl))
1147 return ! shared;
1149 return false;
1152 /* Return true if DECL that is shared iff SHARED is true should
1153 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
1154 flag set. */
1156 bool
1157 gfc_omp_private_debug_clause (tree decl, bool shared)
1159 if (GFC_DECL_CRAY_POINTEE (decl))
1160 return true;
1162 if (GFC_DECL_COMMON_OR_EQUIV (decl)
1163 && DECL_HAS_VALUE_EXPR_P (decl))
1165 tree value = DECL_VALUE_EXPR (decl);
1167 if (TREE_CODE (value) == COMPONENT_REF
1168 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
1169 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
1170 return shared;
1173 return false;
1176 /* Register language specific type size variables as potentially OpenMP
1177 firstprivate variables. */
1179 void
1180 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
1182 if (GFC_ARRAY_TYPE_P (type) || GFC_DESCRIPTOR_TYPE_P (type))
1184 int r;
1186 gcc_assert (TYPE_LANG_SPECIFIC (type) != NULL);
1187 for (r = 0; r < GFC_TYPE_ARRAY_RANK (type); r++)
1189 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_LBOUND (type, r));
1190 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_UBOUND (type, r));
1191 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_STRIDE (type, r));
1193 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_SIZE (type));
1194 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_OFFSET (type));
1199 static inline tree
1200 gfc_trans_add_clause (tree node, tree tail)
1202 OMP_CLAUSE_CHAIN (node) = tail;
1203 return node;
1206 static tree
1207 gfc_trans_omp_variable (gfc_symbol *sym, bool declare_simd)
1209 if (declare_simd)
1211 int cnt = 0;
1212 gfc_symbol *proc_sym;
1213 gfc_formal_arglist *f;
1215 gcc_assert (sym->attr.dummy);
1216 proc_sym = sym->ns->proc_name;
1217 if (proc_sym->attr.entry_master)
1218 ++cnt;
1219 if (gfc_return_by_reference (proc_sym))
1221 ++cnt;
1222 if (proc_sym->ts.type == BT_CHARACTER)
1223 ++cnt;
1225 for (f = gfc_sym_get_dummy_args (proc_sym); f; f = f->next)
1226 if (f->sym == sym)
1227 break;
1228 else if (f->sym)
1229 ++cnt;
1230 gcc_assert (f);
1231 return build_int_cst (integer_type_node, cnt);
1234 tree t = gfc_get_symbol_decl (sym);
1235 tree parent_decl;
1236 int parent_flag;
1237 bool return_value;
1238 bool alternate_entry;
1239 bool entry_master;
1241 return_value = sym->attr.function && sym->result == sym;
1242 alternate_entry = sym->attr.function && sym->attr.entry
1243 && sym->result == sym;
1244 entry_master = sym->attr.result
1245 && sym->ns->proc_name->attr.entry_master
1246 && !gfc_return_by_reference (sym->ns->proc_name);
1247 parent_decl = current_function_decl
1248 ? DECL_CONTEXT (current_function_decl) : NULL_TREE;
1250 if ((t == parent_decl && return_value)
1251 || (sym->ns && sym->ns->proc_name
1252 && sym->ns->proc_name->backend_decl == parent_decl
1253 && (alternate_entry || entry_master)))
1254 parent_flag = 1;
1255 else
1256 parent_flag = 0;
1258 /* Special case for assigning the return value of a function.
1259 Self recursive functions must have an explicit return value. */
1260 if (return_value && (t == current_function_decl || parent_flag))
1261 t = gfc_get_fake_result_decl (sym, parent_flag);
1263 /* Similarly for alternate entry points. */
1264 else if (alternate_entry
1265 && (sym->ns->proc_name->backend_decl == current_function_decl
1266 || parent_flag))
1268 gfc_entry_list *el = NULL;
1270 for (el = sym->ns->entries; el; el = el->next)
1271 if (sym == el->sym)
1273 t = gfc_get_fake_result_decl (sym, parent_flag);
1274 break;
1278 else if (entry_master
1279 && (sym->ns->proc_name->backend_decl == current_function_decl
1280 || parent_flag))
1281 t = gfc_get_fake_result_decl (sym, parent_flag);
1283 return t;
1286 static tree
1287 gfc_trans_omp_variable_list (enum omp_clause_code code,
1288 gfc_omp_namelist *namelist, tree list,
1289 bool declare_simd)
1291 for (; namelist != NULL; namelist = namelist->next)
1292 if (namelist->sym->attr.referenced || declare_simd)
1294 tree t = gfc_trans_omp_variable (namelist->sym, declare_simd);
1295 if (t != error_mark_node)
1297 tree node = build_omp_clause (input_location, code);
1298 OMP_CLAUSE_DECL (node) = t;
1299 list = gfc_trans_add_clause (node, list);
1302 return list;
1305 struct omp_udr_find_orig_data
1307 gfc_omp_udr *omp_udr;
1308 bool omp_orig_seen;
1311 static int
1312 omp_udr_find_orig (gfc_expr **e, int *walk_subtrees ATTRIBUTE_UNUSED,
1313 void *data)
1315 struct omp_udr_find_orig_data *cd = (struct omp_udr_find_orig_data *) data;
1316 if ((*e)->expr_type == EXPR_VARIABLE
1317 && (*e)->symtree->n.sym == cd->omp_udr->omp_orig)
1318 cd->omp_orig_seen = true;
1320 return 0;
1323 static void
1324 gfc_trans_omp_array_reduction_or_udr (tree c, gfc_omp_namelist *n, locus where)
1326 gfc_symbol *sym = n->sym;
1327 gfc_symtree *root1 = NULL, *root2 = NULL, *root3 = NULL, *root4 = NULL;
1328 gfc_symtree *symtree1, *symtree2, *symtree3, *symtree4 = NULL;
1329 gfc_symbol init_val_sym, outer_sym, intrinsic_sym;
1330 gfc_symbol omp_var_copy[4];
1331 gfc_expr *e1, *e2, *e3, *e4;
1332 gfc_ref *ref;
1333 tree decl, backend_decl, stmt, type, outer_decl;
1334 locus old_loc = gfc_current_locus;
1335 const char *iname;
1336 bool t;
1337 gfc_omp_udr *udr = n->udr ? n->udr->udr : NULL;
1339 decl = OMP_CLAUSE_DECL (c);
1340 gfc_current_locus = where;
1341 type = TREE_TYPE (decl);
1342 outer_decl = create_tmp_var_raw (type, NULL);
1343 if (TREE_CODE (decl) == PARM_DECL
1344 && TREE_CODE (type) == REFERENCE_TYPE
1345 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type))
1346 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type)) == GFC_ARRAY_ALLOCATABLE)
1348 decl = build_fold_indirect_ref (decl);
1349 type = TREE_TYPE (type);
1352 /* Create a fake symbol for init value. */
1353 memset (&init_val_sym, 0, sizeof (init_val_sym));
1354 init_val_sym.ns = sym->ns;
1355 init_val_sym.name = sym->name;
1356 init_val_sym.ts = sym->ts;
1357 init_val_sym.attr.referenced = 1;
1358 init_val_sym.declared_at = where;
1359 init_val_sym.attr.flavor = FL_VARIABLE;
1360 if (OMP_CLAUSE_REDUCTION_CODE (c) != ERROR_MARK)
1361 backend_decl = omp_reduction_init (c, gfc_sym_type (&init_val_sym));
1362 else if (udr->initializer_ns)
1363 backend_decl = NULL;
1364 else
1365 switch (sym->ts.type)
1367 case BT_LOGICAL:
1368 case BT_INTEGER:
1369 case BT_REAL:
1370 case BT_COMPLEX:
1371 backend_decl = build_zero_cst (gfc_sym_type (&init_val_sym));
1372 break;
1373 default:
1374 backend_decl = NULL_TREE;
1375 break;
1377 init_val_sym.backend_decl = backend_decl;
1379 /* Create a fake symbol for the outer array reference. */
1380 outer_sym = *sym;
1381 if (sym->as)
1382 outer_sym.as = gfc_copy_array_spec (sym->as);
1383 outer_sym.attr.dummy = 0;
1384 outer_sym.attr.result = 0;
1385 outer_sym.attr.flavor = FL_VARIABLE;
1386 outer_sym.backend_decl = outer_decl;
1387 if (decl != OMP_CLAUSE_DECL (c))
1388 outer_sym.backend_decl = build_fold_indirect_ref (outer_decl);
1390 /* Create fake symtrees for it. */
1391 symtree1 = gfc_new_symtree (&root1, sym->name);
1392 symtree1->n.sym = sym;
1393 gcc_assert (symtree1 == root1);
1395 symtree2 = gfc_new_symtree (&root2, sym->name);
1396 symtree2->n.sym = &init_val_sym;
1397 gcc_assert (symtree2 == root2);
1399 symtree3 = gfc_new_symtree (&root3, sym->name);
1400 symtree3->n.sym = &outer_sym;
1401 gcc_assert (symtree3 == root3);
1403 memset (omp_var_copy, 0, sizeof omp_var_copy);
1404 if (udr)
1406 omp_var_copy[0] = *udr->omp_out;
1407 omp_var_copy[1] = *udr->omp_in;
1408 *udr->omp_out = outer_sym;
1409 *udr->omp_in = *sym;
1410 if (udr->initializer_ns)
1412 omp_var_copy[2] = *udr->omp_priv;
1413 omp_var_copy[3] = *udr->omp_orig;
1414 *udr->omp_priv = *sym;
1415 *udr->omp_orig = outer_sym;
1419 /* Create expressions. */
1420 e1 = gfc_get_expr ();
1421 e1->expr_type = EXPR_VARIABLE;
1422 e1->where = where;
1423 e1->symtree = symtree1;
1424 e1->ts = sym->ts;
1425 if (sym->attr.dimension)
1427 e1->ref = ref = gfc_get_ref ();
1428 ref->type = REF_ARRAY;
1429 ref->u.ar.where = where;
1430 ref->u.ar.as = sym->as;
1431 ref->u.ar.type = AR_FULL;
1432 ref->u.ar.dimen = 0;
1434 t = gfc_resolve_expr (e1);
1435 gcc_assert (t);
1437 e2 = NULL;
1438 if (backend_decl != NULL_TREE)
1440 e2 = gfc_get_expr ();
1441 e2->expr_type = EXPR_VARIABLE;
1442 e2->where = where;
1443 e2->symtree = symtree2;
1444 e2->ts = sym->ts;
1445 t = gfc_resolve_expr (e2);
1446 gcc_assert (t);
1448 else if (udr->initializer_ns == NULL)
1450 gcc_assert (sym->ts.type == BT_DERIVED);
1451 e2 = gfc_default_initializer (&sym->ts);
1452 gcc_assert (e2);
1453 t = gfc_resolve_expr (e2);
1454 gcc_assert (t);
1456 else if (n->udr->initializer->op == EXEC_ASSIGN)
1458 e2 = gfc_copy_expr (n->udr->initializer->expr2);
1459 t = gfc_resolve_expr (e2);
1460 gcc_assert (t);
1462 if (udr && udr->initializer_ns)
1464 struct omp_udr_find_orig_data cd;
1465 cd.omp_udr = udr;
1466 cd.omp_orig_seen = false;
1467 gfc_code_walker (&n->udr->initializer,
1468 gfc_dummy_code_callback, omp_udr_find_orig, &cd);
1469 if (cd.omp_orig_seen)
1470 OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c) = 1;
1473 e3 = gfc_copy_expr (e1);
1474 e3->symtree = symtree3;
1475 t = gfc_resolve_expr (e3);
1476 gcc_assert (t);
1478 iname = NULL;
1479 e4 = NULL;
1480 switch (OMP_CLAUSE_REDUCTION_CODE (c))
1482 case PLUS_EXPR:
1483 case MINUS_EXPR:
1484 e4 = gfc_add (e3, e1);
1485 break;
1486 case MULT_EXPR:
1487 e4 = gfc_multiply (e3, e1);
1488 break;
1489 case TRUTH_ANDIF_EXPR:
1490 e4 = gfc_and (e3, e1);
1491 break;
1492 case TRUTH_ORIF_EXPR:
1493 e4 = gfc_or (e3, e1);
1494 break;
1495 case EQ_EXPR:
1496 e4 = gfc_eqv (e3, e1);
1497 break;
1498 case NE_EXPR:
1499 e4 = gfc_neqv (e3, e1);
1500 break;
1501 case MIN_EXPR:
1502 iname = "min";
1503 break;
1504 case MAX_EXPR:
1505 iname = "max";
1506 break;
1507 case BIT_AND_EXPR:
1508 iname = "iand";
1509 break;
1510 case BIT_IOR_EXPR:
1511 iname = "ior";
1512 break;
1513 case BIT_XOR_EXPR:
1514 iname = "ieor";
1515 break;
1516 case ERROR_MARK:
1517 if (n->udr->combiner->op == EXEC_ASSIGN)
1519 gfc_free_expr (e3);
1520 e3 = gfc_copy_expr (n->udr->combiner->expr1);
1521 e4 = gfc_copy_expr (n->udr->combiner->expr2);
1522 t = gfc_resolve_expr (e3);
1523 gcc_assert (t);
1524 t = gfc_resolve_expr (e4);
1525 gcc_assert (t);
1527 break;
1528 default:
1529 gcc_unreachable ();
1531 if (iname != NULL)
1533 memset (&intrinsic_sym, 0, sizeof (intrinsic_sym));
1534 intrinsic_sym.ns = sym->ns;
1535 intrinsic_sym.name = iname;
1536 intrinsic_sym.ts = sym->ts;
1537 intrinsic_sym.attr.referenced = 1;
1538 intrinsic_sym.attr.intrinsic = 1;
1539 intrinsic_sym.attr.function = 1;
1540 intrinsic_sym.result = &intrinsic_sym;
1541 intrinsic_sym.declared_at = where;
1543 symtree4 = gfc_new_symtree (&root4, iname);
1544 symtree4->n.sym = &intrinsic_sym;
1545 gcc_assert (symtree4 == root4);
1547 e4 = gfc_get_expr ();
1548 e4->expr_type = EXPR_FUNCTION;
1549 e4->where = where;
1550 e4->symtree = symtree4;
1551 e4->value.function.actual = gfc_get_actual_arglist ();
1552 e4->value.function.actual->expr = e3;
1553 e4->value.function.actual->next = gfc_get_actual_arglist ();
1554 e4->value.function.actual->next->expr = e1;
1556 if (OMP_CLAUSE_REDUCTION_CODE (c) != ERROR_MARK)
1558 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
1559 e1 = gfc_copy_expr (e1);
1560 e3 = gfc_copy_expr (e3);
1561 t = gfc_resolve_expr (e4);
1562 gcc_assert (t);
1565 /* Create the init statement list. */
1566 pushlevel ();
1567 if (e2)
1568 stmt = gfc_trans_assignment (e1, e2, false, false);
1569 else
1570 stmt = gfc_trans_call (n->udr->initializer, false,
1571 NULL_TREE, NULL_TREE, false);
1572 if (TREE_CODE (stmt) != BIND_EXPR)
1573 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
1574 else
1575 poplevel (0, 0);
1576 OMP_CLAUSE_REDUCTION_INIT (c) = stmt;
1578 /* Create the merge statement list. */
1579 pushlevel ();
1580 if (e4)
1581 stmt = gfc_trans_assignment (e3, e4, false, true);
1582 else
1583 stmt = gfc_trans_call (n->udr->combiner, false,
1584 NULL_TREE, NULL_TREE, false);
1585 if (TREE_CODE (stmt) != BIND_EXPR)
1586 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
1587 else
1588 poplevel (0, 0);
1589 OMP_CLAUSE_REDUCTION_MERGE (c) = stmt;
1591 /* And stick the placeholder VAR_DECL into the clause as well. */
1592 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = outer_decl;
1594 gfc_current_locus = old_loc;
1596 gfc_free_expr (e1);
1597 if (e2)
1598 gfc_free_expr (e2);
1599 gfc_free_expr (e3);
1600 if (e4)
1601 gfc_free_expr (e4);
1602 free (symtree1);
1603 free (symtree2);
1604 free (symtree3);
1605 free (symtree4);
1606 if (outer_sym.as)
1607 gfc_free_array_spec (outer_sym.as);
1609 if (udr)
1611 *udr->omp_out = omp_var_copy[0];
1612 *udr->omp_in = omp_var_copy[1];
1613 if (udr->initializer_ns)
1615 *udr->omp_priv = omp_var_copy[2];
1616 *udr->omp_orig = omp_var_copy[3];
1621 static tree
1622 gfc_trans_omp_reduction_list (gfc_omp_namelist *namelist, tree list,
1623 locus where)
1625 for (; namelist != NULL; namelist = namelist->next)
1626 if (namelist->sym->attr.referenced)
1628 tree t = gfc_trans_omp_variable (namelist->sym, false);
1629 if (t != error_mark_node)
1631 tree node = build_omp_clause (where.lb->location,
1632 OMP_CLAUSE_REDUCTION);
1633 OMP_CLAUSE_DECL (node) = t;
1634 switch (namelist->u.reduction_op)
1636 case OMP_REDUCTION_PLUS:
1637 OMP_CLAUSE_REDUCTION_CODE (node) = PLUS_EXPR;
1638 break;
1639 case OMP_REDUCTION_MINUS:
1640 OMP_CLAUSE_REDUCTION_CODE (node) = MINUS_EXPR;
1641 break;
1642 case OMP_REDUCTION_TIMES:
1643 OMP_CLAUSE_REDUCTION_CODE (node) = MULT_EXPR;
1644 break;
1645 case OMP_REDUCTION_AND:
1646 OMP_CLAUSE_REDUCTION_CODE (node) = TRUTH_ANDIF_EXPR;
1647 break;
1648 case OMP_REDUCTION_OR:
1649 OMP_CLAUSE_REDUCTION_CODE (node) = TRUTH_ORIF_EXPR;
1650 break;
1651 case OMP_REDUCTION_EQV:
1652 OMP_CLAUSE_REDUCTION_CODE (node) = EQ_EXPR;
1653 break;
1654 case OMP_REDUCTION_NEQV:
1655 OMP_CLAUSE_REDUCTION_CODE (node) = NE_EXPR;
1656 break;
1657 case OMP_REDUCTION_MAX:
1658 OMP_CLAUSE_REDUCTION_CODE (node) = MAX_EXPR;
1659 break;
1660 case OMP_REDUCTION_MIN:
1661 OMP_CLAUSE_REDUCTION_CODE (node) = MIN_EXPR;
1662 break;
1663 case OMP_REDUCTION_IAND:
1664 OMP_CLAUSE_REDUCTION_CODE (node) = BIT_AND_EXPR;
1665 break;
1666 case OMP_REDUCTION_IOR:
1667 OMP_CLAUSE_REDUCTION_CODE (node) = BIT_IOR_EXPR;
1668 break;
1669 case OMP_REDUCTION_IEOR:
1670 OMP_CLAUSE_REDUCTION_CODE (node) = BIT_XOR_EXPR;
1671 break;
1672 case OMP_REDUCTION_USER:
1673 OMP_CLAUSE_REDUCTION_CODE (node) = ERROR_MARK;
1674 break;
1675 default:
1676 gcc_unreachable ();
1678 if (namelist->sym->attr.dimension
1679 || namelist->u.reduction_op == OMP_REDUCTION_USER
1680 || namelist->sym->attr.allocatable)
1681 gfc_trans_omp_array_reduction_or_udr (node, namelist, where);
1682 list = gfc_trans_add_clause (node, list);
1685 return list;
1688 static tree
1689 gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
1690 locus where, bool declare_simd = false)
1692 tree omp_clauses = NULL_TREE, chunk_size, c;
1693 int list;
1694 enum omp_clause_code clause_code;
1695 gfc_se se;
1697 if (clauses == NULL)
1698 return NULL_TREE;
1700 for (list = 0; list < OMP_LIST_NUM; list++)
1702 gfc_omp_namelist *n = clauses->lists[list];
1704 if (n == NULL)
1705 continue;
1706 switch (list)
1708 case OMP_LIST_REDUCTION:
1709 omp_clauses = gfc_trans_omp_reduction_list (n, omp_clauses, where);
1710 break;
1711 case OMP_LIST_PRIVATE:
1712 clause_code = OMP_CLAUSE_PRIVATE;
1713 goto add_clause;
1714 case OMP_LIST_SHARED:
1715 clause_code = OMP_CLAUSE_SHARED;
1716 goto add_clause;
1717 case OMP_LIST_FIRSTPRIVATE:
1718 clause_code = OMP_CLAUSE_FIRSTPRIVATE;
1719 goto add_clause;
1720 case OMP_LIST_LASTPRIVATE:
1721 clause_code = OMP_CLAUSE_LASTPRIVATE;
1722 goto add_clause;
1723 case OMP_LIST_COPYIN:
1724 clause_code = OMP_CLAUSE_COPYIN;
1725 goto add_clause;
1726 case OMP_LIST_COPYPRIVATE:
1727 clause_code = OMP_CLAUSE_COPYPRIVATE;
1728 goto add_clause;
1729 case OMP_LIST_UNIFORM:
1730 clause_code = OMP_CLAUSE_UNIFORM;
1731 /* FALLTHROUGH */
1732 add_clause:
1733 omp_clauses
1734 = gfc_trans_omp_variable_list (clause_code, n, omp_clauses,
1735 declare_simd);
1736 break;
1737 case OMP_LIST_ALIGNED:
1738 for (; n != NULL; n = n->next)
1739 if (n->sym->attr.referenced || declare_simd)
1741 tree t = gfc_trans_omp_variable (n->sym, declare_simd);
1742 if (t != error_mark_node)
1744 tree node = build_omp_clause (input_location,
1745 OMP_CLAUSE_ALIGNED);
1746 OMP_CLAUSE_DECL (node) = t;
1747 if (n->expr)
1749 tree alignment_var;
1751 if (block == NULL)
1752 alignment_var = gfc_conv_constant_to_tree (n->expr);
1753 else
1755 gfc_init_se (&se, NULL);
1756 gfc_conv_expr (&se, n->expr);
1757 gfc_add_block_to_block (block, &se.pre);
1758 alignment_var = gfc_evaluate_now (se.expr, block);
1759 gfc_add_block_to_block (block, &se.post);
1761 OMP_CLAUSE_ALIGNED_ALIGNMENT (node) = alignment_var;
1763 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
1766 break;
1767 case OMP_LIST_LINEAR:
1769 gfc_expr *last_step_expr = NULL;
1770 tree last_step = NULL_TREE;
1772 for (; n != NULL; n = n->next)
1774 if (n->expr)
1776 last_step_expr = n->expr;
1777 last_step = NULL_TREE;
1779 if (n->sym->attr.referenced || declare_simd)
1781 tree t = gfc_trans_omp_variable (n->sym, declare_simd);
1782 if (t != error_mark_node)
1784 tree node = build_omp_clause (input_location,
1785 OMP_CLAUSE_LINEAR);
1786 OMP_CLAUSE_DECL (node) = t;
1787 if (last_step_expr && last_step == NULL_TREE)
1789 if (block == NULL)
1790 last_step
1791 = gfc_conv_constant_to_tree (last_step_expr);
1792 else
1794 gfc_init_se (&se, NULL);
1795 gfc_conv_expr (&se, last_step_expr);
1796 gfc_add_block_to_block (block, &se.pre);
1797 last_step = gfc_evaluate_now (se.expr, block);
1798 gfc_add_block_to_block (block, &se.post);
1801 OMP_CLAUSE_LINEAR_STEP (node)
1802 = fold_convert (gfc_typenode_for_spec (&n->sym->ts),
1803 last_step);
1804 if (n->sym->attr.dimension || n->sym->attr.allocatable)
1805 OMP_CLAUSE_LINEAR_ARRAY (node) = 1;
1806 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
1811 break;
1812 case OMP_LIST_DEPEND:
1813 for (; n != NULL; n = n->next)
1815 if (!n->sym->attr.referenced)
1816 continue;
1818 tree node = build_omp_clause (input_location, OMP_CLAUSE_DEPEND);
1819 if (n->expr == NULL || n->expr->ref->u.ar.type == AR_FULL)
1821 tree decl = gfc_get_symbol_decl (n->sym);
1822 if (gfc_omp_privatize_by_reference (decl))
1823 decl = build_fold_indirect_ref (decl);
1824 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
1826 decl = gfc_conv_descriptor_data_get (decl);
1827 decl = fold_convert (build_pointer_type (char_type_node),
1828 decl);
1829 decl = build_fold_indirect_ref (decl);
1831 else if (DECL_P (decl))
1832 TREE_ADDRESSABLE (decl) = 1;
1833 OMP_CLAUSE_DECL (node) = decl;
1835 else
1837 tree ptr;
1838 gfc_init_se (&se, NULL);
1839 if (n->expr->ref->u.ar.type == AR_ELEMENT)
1841 gfc_conv_expr_reference (&se, n->expr);
1842 ptr = se.expr;
1844 else
1846 gfc_conv_expr_descriptor (&se, n->expr);
1847 ptr = gfc_conv_array_data (se.expr);
1849 gfc_add_block_to_block (block, &se.pre);
1850 gfc_add_block_to_block (block, &se.post);
1851 ptr = fold_convert (build_pointer_type (char_type_node),
1852 ptr);
1853 OMP_CLAUSE_DECL (node) = build_fold_indirect_ref (ptr);
1855 switch (n->u.depend_op)
1857 case OMP_DEPEND_IN:
1858 OMP_CLAUSE_DEPEND_KIND (node) = OMP_CLAUSE_DEPEND_IN;
1859 break;
1860 case OMP_DEPEND_OUT:
1861 OMP_CLAUSE_DEPEND_KIND (node) = OMP_CLAUSE_DEPEND_OUT;
1862 break;
1863 case OMP_DEPEND_INOUT:
1864 OMP_CLAUSE_DEPEND_KIND (node) = OMP_CLAUSE_DEPEND_INOUT;
1865 break;
1866 default:
1867 gcc_unreachable ();
1869 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
1871 break;
1872 case OMP_LIST_MAP:
1873 for (; n != NULL; n = n->next)
1875 if (!n->sym->attr.referenced)
1876 continue;
1878 tree node = build_omp_clause (input_location, OMP_CLAUSE_MAP);
1879 tree node2 = NULL_TREE;
1880 tree node3 = NULL_TREE;
1881 tree node4 = NULL_TREE;
1882 tree decl = gfc_get_symbol_decl (n->sym);
1883 if (DECL_P (decl))
1884 TREE_ADDRESSABLE (decl) = 1;
1885 if (n->expr == NULL || n->expr->ref->u.ar.type == AR_FULL)
1887 if (POINTER_TYPE_P (TREE_TYPE (decl)))
1889 node4 = build_omp_clause (input_location,
1890 OMP_CLAUSE_MAP);
1891 OMP_CLAUSE_MAP_KIND (node4) = OMP_CLAUSE_MAP_POINTER;
1892 OMP_CLAUSE_DECL (node4) = decl;
1893 OMP_CLAUSE_SIZE (node4) = size_int (0);
1894 decl = build_fold_indirect_ref (decl);
1896 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
1898 tree type = TREE_TYPE (decl);
1899 tree ptr = gfc_conv_descriptor_data_get (decl);
1900 ptr = fold_convert (build_pointer_type (char_type_node),
1901 ptr);
1902 ptr = build_fold_indirect_ref (ptr);
1903 OMP_CLAUSE_DECL (node) = ptr;
1904 node2 = build_omp_clause (input_location,
1905 OMP_CLAUSE_MAP);
1906 OMP_CLAUSE_MAP_KIND (node2) = OMP_CLAUSE_MAP_TO_PSET;
1907 OMP_CLAUSE_DECL (node2) = decl;
1908 OMP_CLAUSE_SIZE (node2) = TYPE_SIZE_UNIT (type);
1909 node3 = build_omp_clause (input_location,
1910 OMP_CLAUSE_MAP);
1911 OMP_CLAUSE_MAP_KIND (node3) = OMP_CLAUSE_MAP_POINTER;
1912 OMP_CLAUSE_DECL (node3)
1913 = gfc_conv_descriptor_data_get (decl);
1914 OMP_CLAUSE_SIZE (node3) = size_int (0);
1915 if (n->sym->attr.pointer)
1917 stmtblock_t cond_block;
1918 tree size
1919 = gfc_create_var (gfc_array_index_type, NULL);
1920 tree tem, then_b, else_b, zero, cond;
1922 gfc_init_block (&cond_block);
1924 = gfc_full_array_size (&cond_block, decl,
1925 GFC_TYPE_ARRAY_RANK (type));
1926 gfc_add_modify (&cond_block, size, tem);
1927 then_b = gfc_finish_block (&cond_block);
1928 gfc_init_block (&cond_block);
1929 zero = build_int_cst (gfc_array_index_type, 0);
1930 gfc_add_modify (&cond_block, size, zero);
1931 else_b = gfc_finish_block (&cond_block);
1932 tem = gfc_conv_descriptor_data_get (decl);
1933 tem = fold_convert (pvoid_type_node, tem);
1934 cond = fold_build2_loc (input_location, NE_EXPR,
1935 boolean_type_node,
1936 tem, null_pointer_node);
1937 gfc_add_expr_to_block (block,
1938 build3_loc (input_location,
1939 COND_EXPR,
1940 void_type_node,
1941 cond, then_b,
1942 else_b));
1943 OMP_CLAUSE_SIZE (node) = size;
1945 else
1946 OMP_CLAUSE_SIZE (node)
1947 = gfc_full_array_size (block, decl,
1948 GFC_TYPE_ARRAY_RANK (type));
1949 tree elemsz
1950 = TYPE_SIZE_UNIT (gfc_get_element_type (type));
1951 elemsz = fold_convert (gfc_array_index_type, elemsz);
1952 OMP_CLAUSE_SIZE (node)
1953 = fold_build2 (MULT_EXPR, gfc_array_index_type,
1954 OMP_CLAUSE_SIZE (node), elemsz);
1956 else
1957 OMP_CLAUSE_DECL (node) = decl;
1959 else
1961 tree ptr, ptr2;
1962 gfc_init_se (&se, NULL);
1963 if (n->expr->ref->u.ar.type == AR_ELEMENT)
1965 gfc_conv_expr_reference (&se, n->expr);
1966 gfc_add_block_to_block (block, &se.pre);
1967 ptr = se.expr;
1968 OMP_CLAUSE_SIZE (node)
1969 = TYPE_SIZE_UNIT (TREE_TYPE (ptr));
1971 else
1973 gfc_conv_expr_descriptor (&se, n->expr);
1974 ptr = gfc_conv_array_data (se.expr);
1975 tree type = TREE_TYPE (se.expr);
1976 gfc_add_block_to_block (block, &se.pre);
1977 OMP_CLAUSE_SIZE (node)
1978 = gfc_full_array_size (block, se.expr,
1979 GFC_TYPE_ARRAY_RANK (type));
1980 tree elemsz
1981 = TYPE_SIZE_UNIT (gfc_get_element_type (type));
1982 elemsz = fold_convert (gfc_array_index_type, elemsz);
1983 OMP_CLAUSE_SIZE (node)
1984 = fold_build2 (MULT_EXPR, gfc_array_index_type,
1985 OMP_CLAUSE_SIZE (node), elemsz);
1987 gfc_add_block_to_block (block, &se.post);
1988 ptr = fold_convert (build_pointer_type (char_type_node),
1989 ptr);
1990 OMP_CLAUSE_DECL (node) = build_fold_indirect_ref (ptr);
1992 if (POINTER_TYPE_P (TREE_TYPE (decl))
1993 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
1995 node4 = build_omp_clause (input_location,
1996 OMP_CLAUSE_MAP);
1997 OMP_CLAUSE_MAP_KIND (node4) = OMP_CLAUSE_MAP_POINTER;
1998 OMP_CLAUSE_DECL (node4) = decl;
1999 OMP_CLAUSE_SIZE (node4) = size_int (0);
2000 decl = build_fold_indirect_ref (decl);
2002 ptr = fold_convert (sizetype, ptr);
2003 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
2005 tree type = TREE_TYPE (decl);
2006 ptr2 = gfc_conv_descriptor_data_get (decl);
2007 node2 = build_omp_clause (input_location,
2008 OMP_CLAUSE_MAP);
2009 OMP_CLAUSE_MAP_KIND (node2) = OMP_CLAUSE_MAP_TO_PSET;
2010 OMP_CLAUSE_DECL (node2) = decl;
2011 OMP_CLAUSE_SIZE (node2) = TYPE_SIZE_UNIT (type);
2012 node3 = build_omp_clause (input_location,
2013 OMP_CLAUSE_MAP);
2014 OMP_CLAUSE_MAP_KIND (node3) = OMP_CLAUSE_MAP_POINTER;
2015 OMP_CLAUSE_DECL (node3)
2016 = gfc_conv_descriptor_data_get (decl);
2018 else
2020 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
2021 ptr2 = build_fold_addr_expr (decl);
2022 else
2024 gcc_assert (POINTER_TYPE_P (TREE_TYPE (decl)));
2025 ptr2 = decl;
2027 node3 = build_omp_clause (input_location,
2028 OMP_CLAUSE_MAP);
2029 OMP_CLAUSE_MAP_KIND (node3) = OMP_CLAUSE_MAP_POINTER;
2030 OMP_CLAUSE_DECL (node3) = decl;
2032 ptr2 = fold_convert (sizetype, ptr2);
2033 OMP_CLAUSE_SIZE (node3)
2034 = fold_build2 (MINUS_EXPR, sizetype, ptr, ptr2);
2036 switch (n->u.map_op)
2038 case OMP_MAP_ALLOC:
2039 OMP_CLAUSE_MAP_KIND (node) = OMP_CLAUSE_MAP_ALLOC;
2040 break;
2041 case OMP_MAP_TO:
2042 OMP_CLAUSE_MAP_KIND (node) = OMP_CLAUSE_MAP_TO;
2043 break;
2044 case OMP_MAP_FROM:
2045 OMP_CLAUSE_MAP_KIND (node) = OMP_CLAUSE_MAP_FROM;
2046 break;
2047 case OMP_MAP_TOFROM:
2048 OMP_CLAUSE_MAP_KIND (node) = OMP_CLAUSE_MAP_TOFROM;
2049 break;
2050 default:
2051 gcc_unreachable ();
2053 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
2054 if (node2)
2055 omp_clauses = gfc_trans_add_clause (node2, omp_clauses);
2056 if (node3)
2057 omp_clauses = gfc_trans_add_clause (node3, omp_clauses);
2058 if (node4)
2059 omp_clauses = gfc_trans_add_clause (node4, omp_clauses);
2061 break;
2062 case OMP_LIST_TO:
2063 case OMP_LIST_FROM:
2064 for (; n != NULL; n = n->next)
2066 if (!n->sym->attr.referenced)
2067 continue;
2069 tree node = build_omp_clause (input_location,
2070 list == OMP_LIST_TO
2071 ? OMP_CLAUSE_TO : OMP_CLAUSE_FROM);
2072 if (n->expr == NULL || n->expr->ref->u.ar.type == AR_FULL)
2074 tree decl = gfc_get_symbol_decl (n->sym);
2075 if (gfc_omp_privatize_by_reference (decl))
2076 decl = build_fold_indirect_ref (decl);
2077 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
2079 tree type = TREE_TYPE (decl);
2080 tree ptr = gfc_conv_descriptor_data_get (decl);
2081 ptr = fold_convert (build_pointer_type (char_type_node),
2082 ptr);
2083 ptr = build_fold_indirect_ref (ptr);
2084 OMP_CLAUSE_DECL (node) = ptr;
2085 OMP_CLAUSE_SIZE (node)
2086 = gfc_full_array_size (block, decl,
2087 GFC_TYPE_ARRAY_RANK (type));
2088 tree elemsz
2089 = TYPE_SIZE_UNIT (gfc_get_element_type (type));
2090 elemsz = fold_convert (gfc_array_index_type, elemsz);
2091 OMP_CLAUSE_SIZE (node)
2092 = fold_build2 (MULT_EXPR, gfc_array_index_type,
2093 OMP_CLAUSE_SIZE (node), elemsz);
2095 else
2096 OMP_CLAUSE_DECL (node) = decl;
2098 else
2100 tree ptr;
2101 gfc_init_se (&se, NULL);
2102 if (n->expr->ref->u.ar.type == AR_ELEMENT)
2104 gfc_conv_expr_reference (&se, n->expr);
2105 ptr = se.expr;
2106 gfc_add_block_to_block (block, &se.pre);
2107 OMP_CLAUSE_SIZE (node)
2108 = TYPE_SIZE_UNIT (TREE_TYPE (ptr));
2110 else
2112 gfc_conv_expr_descriptor (&se, n->expr);
2113 ptr = gfc_conv_array_data (se.expr);
2114 tree type = TREE_TYPE (se.expr);
2115 gfc_add_block_to_block (block, &se.pre);
2116 OMP_CLAUSE_SIZE (node)
2117 = gfc_full_array_size (block, se.expr,
2118 GFC_TYPE_ARRAY_RANK (type));
2119 tree elemsz
2120 = TYPE_SIZE_UNIT (gfc_get_element_type (type));
2121 elemsz = fold_convert (gfc_array_index_type, elemsz);
2122 OMP_CLAUSE_SIZE (node)
2123 = fold_build2 (MULT_EXPR, gfc_array_index_type,
2124 OMP_CLAUSE_SIZE (node), elemsz);
2126 gfc_add_block_to_block (block, &se.post);
2127 ptr = fold_convert (build_pointer_type (char_type_node),
2128 ptr);
2129 OMP_CLAUSE_DECL (node) = build_fold_indirect_ref (ptr);
2131 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
2133 break;
2134 default:
2135 break;
2139 if (clauses->if_expr)
2141 tree if_var;
2143 gfc_init_se (&se, NULL);
2144 gfc_conv_expr (&se, clauses->if_expr);
2145 gfc_add_block_to_block (block, &se.pre);
2146 if_var = gfc_evaluate_now (se.expr, block);
2147 gfc_add_block_to_block (block, &se.post);
2149 c = build_omp_clause (where.lb->location, OMP_CLAUSE_IF);
2150 OMP_CLAUSE_IF_EXPR (c) = if_var;
2151 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2154 if (clauses->final_expr)
2156 tree final_var;
2158 gfc_init_se (&se, NULL);
2159 gfc_conv_expr (&se, clauses->final_expr);
2160 gfc_add_block_to_block (block, &se.pre);
2161 final_var = gfc_evaluate_now (se.expr, block);
2162 gfc_add_block_to_block (block, &se.post);
2164 c = build_omp_clause (where.lb->location, OMP_CLAUSE_FINAL);
2165 OMP_CLAUSE_FINAL_EXPR (c) = final_var;
2166 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2169 if (clauses->num_threads)
2171 tree num_threads;
2173 gfc_init_se (&se, NULL);
2174 gfc_conv_expr (&se, clauses->num_threads);
2175 gfc_add_block_to_block (block, &se.pre);
2176 num_threads = gfc_evaluate_now (se.expr, block);
2177 gfc_add_block_to_block (block, &se.post);
2179 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NUM_THREADS);
2180 OMP_CLAUSE_NUM_THREADS_EXPR (c) = num_threads;
2181 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2184 chunk_size = NULL_TREE;
2185 if (clauses->chunk_size)
2187 gfc_init_se (&se, NULL);
2188 gfc_conv_expr (&se, clauses->chunk_size);
2189 gfc_add_block_to_block (block, &se.pre);
2190 chunk_size = gfc_evaluate_now (se.expr, block);
2191 gfc_add_block_to_block (block, &se.post);
2194 if (clauses->sched_kind != OMP_SCHED_NONE)
2196 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SCHEDULE);
2197 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = chunk_size;
2198 switch (clauses->sched_kind)
2200 case OMP_SCHED_STATIC:
2201 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_STATIC;
2202 break;
2203 case OMP_SCHED_DYNAMIC:
2204 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_DYNAMIC;
2205 break;
2206 case OMP_SCHED_GUIDED:
2207 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_GUIDED;
2208 break;
2209 case OMP_SCHED_RUNTIME:
2210 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_RUNTIME;
2211 break;
2212 case OMP_SCHED_AUTO:
2213 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_AUTO;
2214 break;
2215 default:
2216 gcc_unreachable ();
2218 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2221 if (clauses->default_sharing != OMP_DEFAULT_UNKNOWN)
2223 c = build_omp_clause (where.lb->location, OMP_CLAUSE_DEFAULT);
2224 switch (clauses->default_sharing)
2226 case OMP_DEFAULT_NONE:
2227 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_NONE;
2228 break;
2229 case OMP_DEFAULT_SHARED:
2230 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_SHARED;
2231 break;
2232 case OMP_DEFAULT_PRIVATE:
2233 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_PRIVATE;
2234 break;
2235 case OMP_DEFAULT_FIRSTPRIVATE:
2236 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
2237 break;
2238 default:
2239 gcc_unreachable ();
2241 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2244 if (clauses->nowait)
2246 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NOWAIT);
2247 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2250 if (clauses->ordered)
2252 c = build_omp_clause (where.lb->location, OMP_CLAUSE_ORDERED);
2253 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2256 if (clauses->untied)
2258 c = build_omp_clause (where.lb->location, OMP_CLAUSE_UNTIED);
2259 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2262 if (clauses->mergeable)
2264 c = build_omp_clause (where.lb->location, OMP_CLAUSE_MERGEABLE);
2265 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2268 if (clauses->collapse)
2270 c = build_omp_clause (where.lb->location, OMP_CLAUSE_COLLAPSE);
2271 OMP_CLAUSE_COLLAPSE_EXPR (c)
2272 = build_int_cst (integer_type_node, clauses->collapse);
2273 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2276 if (clauses->inbranch)
2278 c = build_omp_clause (where.lb->location, OMP_CLAUSE_INBRANCH);
2279 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2282 if (clauses->notinbranch)
2284 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NOTINBRANCH);
2285 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2288 switch (clauses->cancel)
2290 case OMP_CANCEL_UNKNOWN:
2291 break;
2292 case OMP_CANCEL_PARALLEL:
2293 c = build_omp_clause (where.lb->location, OMP_CLAUSE_PARALLEL);
2294 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2295 break;
2296 case OMP_CANCEL_SECTIONS:
2297 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SECTIONS);
2298 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2299 break;
2300 case OMP_CANCEL_DO:
2301 c = build_omp_clause (where.lb->location, OMP_CLAUSE_FOR);
2302 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2303 break;
2304 case OMP_CANCEL_TASKGROUP:
2305 c = build_omp_clause (where.lb->location, OMP_CLAUSE_TASKGROUP);
2306 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2307 break;
2310 if (clauses->proc_bind != OMP_PROC_BIND_UNKNOWN)
2312 c = build_omp_clause (where.lb->location, OMP_CLAUSE_PROC_BIND);
2313 switch (clauses->proc_bind)
2315 case OMP_PROC_BIND_MASTER:
2316 OMP_CLAUSE_PROC_BIND_KIND (c) = OMP_CLAUSE_PROC_BIND_MASTER;
2317 break;
2318 case OMP_PROC_BIND_SPREAD:
2319 OMP_CLAUSE_PROC_BIND_KIND (c) = OMP_CLAUSE_PROC_BIND_SPREAD;
2320 break;
2321 case OMP_PROC_BIND_CLOSE:
2322 OMP_CLAUSE_PROC_BIND_KIND (c) = OMP_CLAUSE_PROC_BIND_CLOSE;
2323 break;
2324 default:
2325 gcc_unreachable ();
2327 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2330 if (clauses->safelen_expr)
2332 tree safelen_var;
2334 gfc_init_se (&se, NULL);
2335 gfc_conv_expr (&se, clauses->safelen_expr);
2336 gfc_add_block_to_block (block, &se.pre);
2337 safelen_var = gfc_evaluate_now (se.expr, block);
2338 gfc_add_block_to_block (block, &se.post);
2340 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SAFELEN);
2341 OMP_CLAUSE_SAFELEN_EXPR (c) = safelen_var;
2342 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2345 if (clauses->simdlen_expr)
2347 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SIMDLEN);
2348 OMP_CLAUSE_SIMDLEN_EXPR (c)
2349 = gfc_conv_constant_to_tree (clauses->simdlen_expr);
2350 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2353 if (clauses->num_teams)
2355 tree num_teams;
2357 gfc_init_se (&se, NULL);
2358 gfc_conv_expr (&se, clauses->num_teams);
2359 gfc_add_block_to_block (block, &se.pre);
2360 num_teams = gfc_evaluate_now (se.expr, block);
2361 gfc_add_block_to_block (block, &se.post);
2363 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NUM_TEAMS);
2364 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
2365 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2368 if (clauses->device)
2370 tree device;
2372 gfc_init_se (&se, NULL);
2373 gfc_conv_expr (&se, clauses->device);
2374 gfc_add_block_to_block (block, &se.pre);
2375 device = gfc_evaluate_now (se.expr, block);
2376 gfc_add_block_to_block (block, &se.post);
2378 c = build_omp_clause (where.lb->location, OMP_CLAUSE_DEVICE);
2379 OMP_CLAUSE_DEVICE_ID (c) = device;
2380 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2383 if (clauses->thread_limit)
2385 tree thread_limit;
2387 gfc_init_se (&se, NULL);
2388 gfc_conv_expr (&se, clauses->thread_limit);
2389 gfc_add_block_to_block (block, &se.pre);
2390 thread_limit = gfc_evaluate_now (se.expr, block);
2391 gfc_add_block_to_block (block, &se.post);
2393 c = build_omp_clause (where.lb->location, OMP_CLAUSE_THREAD_LIMIT);
2394 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
2395 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2398 chunk_size = NULL_TREE;
2399 if (clauses->dist_chunk_size)
2401 gfc_init_se (&se, NULL);
2402 gfc_conv_expr (&se, clauses->dist_chunk_size);
2403 gfc_add_block_to_block (block, &se.pre);
2404 chunk_size = gfc_evaluate_now (se.expr, block);
2405 gfc_add_block_to_block (block, &se.post);
2408 if (clauses->dist_sched_kind != OMP_SCHED_NONE)
2410 c = build_omp_clause (where.lb->location, OMP_CLAUSE_DIST_SCHEDULE);
2411 OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c) = chunk_size;
2412 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2415 return nreverse (omp_clauses);
2418 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
2420 static tree
2421 gfc_trans_omp_code (gfc_code *code, bool force_empty)
2423 tree stmt;
2425 pushlevel ();
2426 stmt = gfc_trans_code (code);
2427 if (TREE_CODE (stmt) != BIND_EXPR)
2429 if (!IS_EMPTY_STMT (stmt) || force_empty)
2431 tree block = poplevel (1, 0);
2432 stmt = build3_v (BIND_EXPR, NULL, stmt, block);
2434 else
2435 poplevel (0, 0);
2437 else
2438 poplevel (0, 0);
2439 return stmt;
2443 static tree gfc_trans_omp_sections (gfc_code *, gfc_omp_clauses *);
2444 static tree gfc_trans_omp_workshare (gfc_code *, gfc_omp_clauses *);
2446 static tree
2447 gfc_trans_omp_atomic (gfc_code *code)
2449 gfc_code *atomic_code = code;
2450 gfc_se lse;
2451 gfc_se rse;
2452 gfc_se vse;
2453 gfc_expr *expr2, *e;
2454 gfc_symbol *var;
2455 stmtblock_t block;
2456 tree lhsaddr, type, rhs, x;
2457 enum tree_code op = ERROR_MARK;
2458 enum tree_code aop = OMP_ATOMIC;
2459 bool var_on_left = false;
2460 bool seq_cst = (atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_SEQ_CST) != 0;
2462 code = code->block->next;
2463 gcc_assert (code->op == EXEC_ASSIGN);
2464 var = code->expr1->symtree->n.sym;
2466 gfc_init_se (&lse, NULL);
2467 gfc_init_se (&rse, NULL);
2468 gfc_init_se (&vse, NULL);
2469 gfc_start_block (&block);
2471 expr2 = code->expr2;
2472 if (expr2->expr_type == EXPR_FUNCTION
2473 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
2474 expr2 = expr2->value.function.actual->expr;
2476 switch (atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_MASK)
2478 case GFC_OMP_ATOMIC_READ:
2479 gfc_conv_expr (&vse, code->expr1);
2480 gfc_add_block_to_block (&block, &vse.pre);
2482 gfc_conv_expr (&lse, expr2);
2483 gfc_add_block_to_block (&block, &lse.pre);
2484 type = TREE_TYPE (lse.expr);
2485 lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
2487 x = build1 (OMP_ATOMIC_READ, type, lhsaddr);
2488 OMP_ATOMIC_SEQ_CST (x) = seq_cst;
2489 x = convert (TREE_TYPE (vse.expr), x);
2490 gfc_add_modify (&block, vse.expr, x);
2492 gfc_add_block_to_block (&block, &lse.pre);
2493 gfc_add_block_to_block (&block, &rse.pre);
2495 return gfc_finish_block (&block);
2496 case GFC_OMP_ATOMIC_CAPTURE:
2497 aop = OMP_ATOMIC_CAPTURE_NEW;
2498 if (expr2->expr_type == EXPR_VARIABLE)
2500 aop = OMP_ATOMIC_CAPTURE_OLD;
2501 gfc_conv_expr (&vse, code->expr1);
2502 gfc_add_block_to_block (&block, &vse.pre);
2504 gfc_conv_expr (&lse, expr2);
2505 gfc_add_block_to_block (&block, &lse.pre);
2506 gfc_init_se (&lse, NULL);
2507 code = code->next;
2508 var = code->expr1->symtree->n.sym;
2509 expr2 = code->expr2;
2510 if (expr2->expr_type == EXPR_FUNCTION
2511 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
2512 expr2 = expr2->value.function.actual->expr;
2514 break;
2515 default:
2516 break;
2519 gfc_conv_expr (&lse, code->expr1);
2520 gfc_add_block_to_block (&block, &lse.pre);
2521 type = TREE_TYPE (lse.expr);
2522 lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
2524 if (((atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_MASK)
2525 == GFC_OMP_ATOMIC_WRITE)
2526 || (atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_SWAP))
2528 gfc_conv_expr (&rse, expr2);
2529 gfc_add_block_to_block (&block, &rse.pre);
2531 else if (expr2->expr_type == EXPR_OP)
2533 gfc_expr *e;
2534 switch (expr2->value.op.op)
2536 case INTRINSIC_PLUS:
2537 op = PLUS_EXPR;
2538 break;
2539 case INTRINSIC_TIMES:
2540 op = MULT_EXPR;
2541 break;
2542 case INTRINSIC_MINUS:
2543 op = MINUS_EXPR;
2544 break;
2545 case INTRINSIC_DIVIDE:
2546 if (expr2->ts.type == BT_INTEGER)
2547 op = TRUNC_DIV_EXPR;
2548 else
2549 op = RDIV_EXPR;
2550 break;
2551 case INTRINSIC_AND:
2552 op = TRUTH_ANDIF_EXPR;
2553 break;
2554 case INTRINSIC_OR:
2555 op = TRUTH_ORIF_EXPR;
2556 break;
2557 case INTRINSIC_EQV:
2558 op = EQ_EXPR;
2559 break;
2560 case INTRINSIC_NEQV:
2561 op = NE_EXPR;
2562 break;
2563 default:
2564 gcc_unreachable ();
2566 e = expr2->value.op.op1;
2567 if (e->expr_type == EXPR_FUNCTION
2568 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
2569 e = e->value.function.actual->expr;
2570 if (e->expr_type == EXPR_VARIABLE
2571 && e->symtree != NULL
2572 && e->symtree->n.sym == var)
2574 expr2 = expr2->value.op.op2;
2575 var_on_left = true;
2577 else
2579 e = expr2->value.op.op2;
2580 if (e->expr_type == EXPR_FUNCTION
2581 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
2582 e = e->value.function.actual->expr;
2583 gcc_assert (e->expr_type == EXPR_VARIABLE
2584 && e->symtree != NULL
2585 && e->symtree->n.sym == var);
2586 expr2 = expr2->value.op.op1;
2587 var_on_left = false;
2589 gfc_conv_expr (&rse, expr2);
2590 gfc_add_block_to_block (&block, &rse.pre);
2592 else
2594 gcc_assert (expr2->expr_type == EXPR_FUNCTION);
2595 switch (expr2->value.function.isym->id)
2597 case GFC_ISYM_MIN:
2598 op = MIN_EXPR;
2599 break;
2600 case GFC_ISYM_MAX:
2601 op = MAX_EXPR;
2602 break;
2603 case GFC_ISYM_IAND:
2604 op = BIT_AND_EXPR;
2605 break;
2606 case GFC_ISYM_IOR:
2607 op = BIT_IOR_EXPR;
2608 break;
2609 case GFC_ISYM_IEOR:
2610 op = BIT_XOR_EXPR;
2611 break;
2612 default:
2613 gcc_unreachable ();
2615 e = expr2->value.function.actual->expr;
2616 gcc_assert (e->expr_type == EXPR_VARIABLE
2617 && e->symtree != NULL
2618 && e->symtree->n.sym == var);
2620 gfc_conv_expr (&rse, expr2->value.function.actual->next->expr);
2621 gfc_add_block_to_block (&block, &rse.pre);
2622 if (expr2->value.function.actual->next->next != NULL)
2624 tree accum = gfc_create_var (TREE_TYPE (rse.expr), NULL);
2625 gfc_actual_arglist *arg;
2627 gfc_add_modify (&block, accum, rse.expr);
2628 for (arg = expr2->value.function.actual->next->next; arg;
2629 arg = arg->next)
2631 gfc_init_block (&rse.pre);
2632 gfc_conv_expr (&rse, arg->expr);
2633 gfc_add_block_to_block (&block, &rse.pre);
2634 x = fold_build2_loc (input_location, op, TREE_TYPE (accum),
2635 accum, rse.expr);
2636 gfc_add_modify (&block, accum, x);
2639 rse.expr = accum;
2642 expr2 = expr2->value.function.actual->next->expr;
2645 lhsaddr = save_expr (lhsaddr);
2646 rhs = gfc_evaluate_now (rse.expr, &block);
2648 if (((atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_MASK)
2649 == GFC_OMP_ATOMIC_WRITE)
2650 || (atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_SWAP))
2651 x = rhs;
2652 else
2654 x = convert (TREE_TYPE (rhs),
2655 build_fold_indirect_ref_loc (input_location, lhsaddr));
2656 if (var_on_left)
2657 x = fold_build2_loc (input_location, op, TREE_TYPE (rhs), x, rhs);
2658 else
2659 x = fold_build2_loc (input_location, op, TREE_TYPE (rhs), rhs, x);
2662 if (TREE_CODE (TREE_TYPE (rhs)) == COMPLEX_TYPE
2663 && TREE_CODE (type) != COMPLEX_TYPE)
2664 x = fold_build1_loc (input_location, REALPART_EXPR,
2665 TREE_TYPE (TREE_TYPE (rhs)), x);
2667 gfc_add_block_to_block (&block, &lse.pre);
2668 gfc_add_block_to_block (&block, &rse.pre);
2670 if (aop == OMP_ATOMIC)
2672 x = build2_v (OMP_ATOMIC, lhsaddr, convert (type, x));
2673 OMP_ATOMIC_SEQ_CST (x) = seq_cst;
2674 gfc_add_expr_to_block (&block, x);
2676 else
2678 if (aop == OMP_ATOMIC_CAPTURE_NEW)
2680 code = code->next;
2681 expr2 = code->expr2;
2682 if (expr2->expr_type == EXPR_FUNCTION
2683 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
2684 expr2 = expr2->value.function.actual->expr;
2686 gcc_assert (expr2->expr_type == EXPR_VARIABLE);
2687 gfc_conv_expr (&vse, code->expr1);
2688 gfc_add_block_to_block (&block, &vse.pre);
2690 gfc_init_se (&lse, NULL);
2691 gfc_conv_expr (&lse, expr2);
2692 gfc_add_block_to_block (&block, &lse.pre);
2694 x = build2 (aop, type, lhsaddr, convert (type, x));
2695 OMP_ATOMIC_SEQ_CST (x) = seq_cst;
2696 x = convert (TREE_TYPE (vse.expr), x);
2697 gfc_add_modify (&block, vse.expr, x);
2700 return gfc_finish_block (&block);
2703 static tree
2704 gfc_trans_omp_barrier (void)
2706 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER);
2707 return build_call_expr_loc (input_location, decl, 0);
2710 static tree
2711 gfc_trans_omp_cancel (gfc_code *code)
2713 int mask = 0;
2714 tree ifc = boolean_true_node;
2715 stmtblock_t block;
2716 switch (code->ext.omp_clauses->cancel)
2718 case OMP_CANCEL_PARALLEL: mask = 1; break;
2719 case OMP_CANCEL_DO: mask = 2; break;
2720 case OMP_CANCEL_SECTIONS: mask = 4; break;
2721 case OMP_CANCEL_TASKGROUP: mask = 8; break;
2722 default: gcc_unreachable ();
2724 gfc_start_block (&block);
2725 if (code->ext.omp_clauses->if_expr)
2727 gfc_se se;
2728 tree if_var;
2730 gfc_init_se (&se, NULL);
2731 gfc_conv_expr (&se, code->ext.omp_clauses->if_expr);
2732 gfc_add_block_to_block (&block, &se.pre);
2733 if_var = gfc_evaluate_now (se.expr, &block);
2734 gfc_add_block_to_block (&block, &se.post);
2735 tree type = TREE_TYPE (if_var);
2736 ifc = fold_build2_loc (input_location, NE_EXPR,
2737 boolean_type_node, if_var,
2738 build_zero_cst (type));
2740 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
2741 tree c_bool_type = TREE_TYPE (TREE_TYPE (decl));
2742 ifc = fold_convert (c_bool_type, ifc);
2743 gfc_add_expr_to_block (&block,
2744 build_call_expr_loc (input_location, decl, 2,
2745 build_int_cst (integer_type_node,
2746 mask), ifc));
2747 return gfc_finish_block (&block);
2750 static tree
2751 gfc_trans_omp_cancellation_point (gfc_code *code)
2753 int mask = 0;
2754 switch (code->ext.omp_clauses->cancel)
2756 case OMP_CANCEL_PARALLEL: mask = 1; break;
2757 case OMP_CANCEL_DO: mask = 2; break;
2758 case OMP_CANCEL_SECTIONS: mask = 4; break;
2759 case OMP_CANCEL_TASKGROUP: mask = 8; break;
2760 default: gcc_unreachable ();
2762 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_CANCELLATION_POINT);
2763 return build_call_expr_loc (input_location, decl, 1,
2764 build_int_cst (integer_type_node, mask));
2767 static tree
2768 gfc_trans_omp_critical (gfc_code *code)
2770 tree name = NULL_TREE, stmt;
2771 if (code->ext.omp_name != NULL)
2772 name = get_identifier (code->ext.omp_name);
2773 stmt = gfc_trans_code (code->block->next);
2774 return build2_loc (input_location, OMP_CRITICAL, void_type_node, stmt, name);
2777 typedef struct dovar_init_d {
2778 tree var;
2779 tree init;
2780 } dovar_init;
2783 static tree
2784 gfc_trans_omp_do (gfc_code *code, gfc_exec_op op, stmtblock_t *pblock,
2785 gfc_omp_clauses *do_clauses, tree par_clauses)
2787 gfc_se se;
2788 tree dovar, stmt, from, to, step, type, init, cond, incr;
2789 tree count = NULL_TREE, cycle_label, tmp, omp_clauses;
2790 stmtblock_t block;
2791 stmtblock_t body;
2792 gfc_omp_clauses *clauses = code->ext.omp_clauses;
2793 int i, collapse = clauses->collapse;
2794 vec<dovar_init> inits = vNULL;
2795 dovar_init *di;
2796 unsigned ix;
2798 if (collapse <= 0)
2799 collapse = 1;
2801 code = code->block->next;
2802 gcc_assert (code->op == EXEC_DO);
2804 init = make_tree_vec (collapse);
2805 cond = make_tree_vec (collapse);
2806 incr = make_tree_vec (collapse);
2808 if (pblock == NULL)
2810 gfc_start_block (&block);
2811 pblock = &block;
2814 omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc);
2816 for (i = 0; i < collapse; i++)
2818 int simple = 0;
2819 int dovar_found = 0;
2820 tree dovar_decl;
2822 if (clauses)
2824 gfc_omp_namelist *n = NULL;
2825 if (op != EXEC_OMP_DISTRIBUTE)
2826 for (n = clauses->lists[(op == EXEC_OMP_SIMD && collapse == 1)
2827 ? OMP_LIST_LINEAR : OMP_LIST_LASTPRIVATE];
2828 n != NULL; n = n->next)
2829 if (code->ext.iterator->var->symtree->n.sym == n->sym)
2830 break;
2831 if (n != NULL)
2832 dovar_found = 1;
2833 else if (n == NULL && op != EXEC_OMP_SIMD)
2834 for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next)
2835 if (code->ext.iterator->var->symtree->n.sym == n->sym)
2836 break;
2837 if (n != NULL)
2838 dovar_found++;
2841 /* Evaluate all the expressions in the iterator. */
2842 gfc_init_se (&se, NULL);
2843 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
2844 gfc_add_block_to_block (pblock, &se.pre);
2845 dovar = se.expr;
2846 type = TREE_TYPE (dovar);
2847 gcc_assert (TREE_CODE (type) == INTEGER_TYPE);
2849 gfc_init_se (&se, NULL);
2850 gfc_conv_expr_val (&se, code->ext.iterator->start);
2851 gfc_add_block_to_block (pblock, &se.pre);
2852 from = gfc_evaluate_now (se.expr, pblock);
2854 gfc_init_se (&se, NULL);
2855 gfc_conv_expr_val (&se, code->ext.iterator->end);
2856 gfc_add_block_to_block (pblock, &se.pre);
2857 to = gfc_evaluate_now (se.expr, pblock);
2859 gfc_init_se (&se, NULL);
2860 gfc_conv_expr_val (&se, code->ext.iterator->step);
2861 gfc_add_block_to_block (pblock, &se.pre);
2862 step = gfc_evaluate_now (se.expr, pblock);
2863 dovar_decl = dovar;
2865 /* Special case simple loops. */
2866 if (TREE_CODE (dovar) == VAR_DECL)
2868 if (integer_onep (step))
2869 simple = 1;
2870 else if (tree_int_cst_equal (step, integer_minus_one_node))
2871 simple = -1;
2873 else
2874 dovar_decl
2875 = gfc_trans_omp_variable (code->ext.iterator->var->symtree->n.sym,
2876 false);
2878 /* Loop body. */
2879 if (simple)
2881 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, dovar, from);
2882 /* The condition should not be folded. */
2883 TREE_VEC_ELT (cond, i) = build2_loc (input_location, simple > 0
2884 ? LE_EXPR : GE_EXPR,
2885 boolean_type_node, dovar, to);
2886 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location, PLUS_EXPR,
2887 type, dovar, step);
2888 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location,
2889 MODIFY_EXPR,
2890 type, dovar,
2891 TREE_VEC_ELT (incr, i));
2893 else
2895 /* STEP is not 1 or -1. Use:
2896 for (count = 0; count < (to + step - from) / step; count++)
2898 dovar = from + count * step;
2899 body;
2900 cycle_label:;
2901 } */
2902 tmp = fold_build2_loc (input_location, MINUS_EXPR, type, step, from);
2903 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, to, tmp);
2904 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR, type, tmp,
2905 step);
2906 tmp = gfc_evaluate_now (tmp, pblock);
2907 count = gfc_create_var (type, "count");
2908 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, count,
2909 build_int_cst (type, 0));
2910 /* The condition should not be folded. */
2911 TREE_VEC_ELT (cond, i) = build2_loc (input_location, LT_EXPR,
2912 boolean_type_node,
2913 count, tmp);
2914 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location, PLUS_EXPR,
2915 type, count,
2916 build_int_cst (type, 1));
2917 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location,
2918 MODIFY_EXPR, type, count,
2919 TREE_VEC_ELT (incr, i));
2921 /* Initialize DOVAR. */
2922 tmp = fold_build2_loc (input_location, MULT_EXPR, type, count, step);
2923 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, from, tmp);
2924 dovar_init e = {dovar, tmp};
2925 inits.safe_push (e);
2928 if (!dovar_found)
2930 if (op == EXEC_OMP_SIMD)
2932 if (collapse == 1)
2934 tmp = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
2935 OMP_CLAUSE_LINEAR_STEP (tmp) = step;
2937 else
2938 tmp = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
2939 if (!simple)
2940 dovar_found = 2;
2942 else
2943 tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
2944 OMP_CLAUSE_DECL (tmp) = dovar_decl;
2945 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
2947 if (dovar_found == 2)
2949 tree c = NULL;
2951 tmp = NULL;
2952 if (!simple)
2954 /* If dovar is lastprivate, but different counter is used,
2955 dovar += step needs to be added to
2956 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
2957 will have the value on entry of the last loop, rather
2958 than value after iterator increment. */
2959 tmp = gfc_evaluate_now (step, pblock);
2960 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, dovar,
2961 tmp);
2962 tmp = fold_build2_loc (input_location, MODIFY_EXPR, type,
2963 dovar, tmp);
2964 for (c = omp_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
2965 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
2966 && OMP_CLAUSE_DECL (c) == dovar_decl)
2968 OMP_CLAUSE_LASTPRIVATE_STMT (c) = tmp;
2969 break;
2971 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
2972 && OMP_CLAUSE_DECL (c) == dovar_decl)
2974 OMP_CLAUSE_LINEAR_STMT (c) = tmp;
2975 break;
2978 if (c == NULL && op == EXEC_OMP_DO && par_clauses != NULL)
2980 for (c = par_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
2981 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
2982 && OMP_CLAUSE_DECL (c) == dovar_decl)
2984 tree l = build_omp_clause (input_location,
2985 OMP_CLAUSE_LASTPRIVATE);
2986 OMP_CLAUSE_DECL (l) = dovar_decl;
2987 OMP_CLAUSE_CHAIN (l) = omp_clauses;
2988 OMP_CLAUSE_LASTPRIVATE_STMT (l) = tmp;
2989 omp_clauses = l;
2990 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_SHARED);
2991 break;
2994 gcc_assert (simple || c != NULL);
2996 if (!simple)
2998 if (op != EXEC_OMP_SIMD)
2999 tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
3000 else if (collapse == 1)
3002 tmp = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
3003 OMP_CLAUSE_LINEAR_STEP (tmp) = step;
3004 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp) = 1;
3005 OMP_CLAUSE_LINEAR_NO_COPYOUT (tmp) = 1;
3007 else
3008 tmp = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
3009 OMP_CLAUSE_DECL (tmp) = count;
3010 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
3013 if (i + 1 < collapse)
3014 code = code->block->next;
3017 if (pblock != &block)
3019 pushlevel ();
3020 gfc_start_block (&block);
3023 gfc_start_block (&body);
3025 FOR_EACH_VEC_ELT (inits, ix, di)
3026 gfc_add_modify (&body, di->var, di->init);
3027 inits.release ();
3029 /* Cycle statement is implemented with a goto. Exit statement must not be
3030 present for this loop. */
3031 cycle_label = gfc_build_label_decl (NULL_TREE);
3033 /* Put these labels where they can be found later. */
3035 code->cycle_label = cycle_label;
3036 code->exit_label = NULL_TREE;
3038 /* Main loop body. */
3039 tmp = gfc_trans_omp_code (code->block->next, true);
3040 gfc_add_expr_to_block (&body, tmp);
3042 /* Label for cycle statements (if needed). */
3043 if (TREE_USED (cycle_label))
3045 tmp = build1_v (LABEL_EXPR, cycle_label);
3046 gfc_add_expr_to_block (&body, tmp);
3049 /* End of loop body. */
3050 switch (op)
3052 case EXEC_OMP_SIMD: stmt = make_node (OMP_SIMD); break;
3053 case EXEC_OMP_DO: stmt = make_node (OMP_FOR); break;
3054 case EXEC_OMP_DISTRIBUTE: stmt = make_node (OMP_DISTRIBUTE); break;
3055 default: gcc_unreachable ();
3058 TREE_TYPE (stmt) = void_type_node;
3059 OMP_FOR_BODY (stmt) = gfc_finish_block (&body);
3060 OMP_FOR_CLAUSES (stmt) = omp_clauses;
3061 OMP_FOR_INIT (stmt) = init;
3062 OMP_FOR_COND (stmt) = cond;
3063 OMP_FOR_INCR (stmt) = incr;
3064 gfc_add_expr_to_block (&block, stmt);
3066 return gfc_finish_block (&block);
3069 static tree
3070 gfc_trans_omp_flush (void)
3072 tree decl = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
3073 return build_call_expr_loc (input_location, decl, 0);
3076 static tree
3077 gfc_trans_omp_master (gfc_code *code)
3079 tree stmt = gfc_trans_code (code->block->next);
3080 if (IS_EMPTY_STMT (stmt))
3081 return stmt;
3082 return build1_v (OMP_MASTER, stmt);
3085 static tree
3086 gfc_trans_omp_ordered (gfc_code *code)
3088 return build1_v (OMP_ORDERED, gfc_trans_code (code->block->next));
3091 static tree
3092 gfc_trans_omp_parallel (gfc_code *code)
3094 stmtblock_t block;
3095 tree stmt, omp_clauses;
3097 gfc_start_block (&block);
3098 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
3099 code->loc);
3100 stmt = gfc_trans_omp_code (code->block->next, true);
3101 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
3102 omp_clauses);
3103 gfc_add_expr_to_block (&block, stmt);
3104 return gfc_finish_block (&block);
3107 enum
3109 GFC_OMP_SPLIT_SIMD,
3110 GFC_OMP_SPLIT_DO,
3111 GFC_OMP_SPLIT_PARALLEL,
3112 GFC_OMP_SPLIT_DISTRIBUTE,
3113 GFC_OMP_SPLIT_TEAMS,
3114 GFC_OMP_SPLIT_TARGET,
3115 GFC_OMP_SPLIT_NUM
3118 enum
3120 GFC_OMP_MASK_SIMD = (1 << GFC_OMP_SPLIT_SIMD),
3121 GFC_OMP_MASK_DO = (1 << GFC_OMP_SPLIT_DO),
3122 GFC_OMP_MASK_PARALLEL = (1 << GFC_OMP_SPLIT_PARALLEL),
3123 GFC_OMP_MASK_DISTRIBUTE = (1 << GFC_OMP_SPLIT_DISTRIBUTE),
3124 GFC_OMP_MASK_TEAMS = (1 << GFC_OMP_SPLIT_TEAMS),
3125 GFC_OMP_MASK_TARGET = (1 << GFC_OMP_SPLIT_TARGET)
3128 static void
3129 gfc_split_omp_clauses (gfc_code *code,
3130 gfc_omp_clauses clausesa[GFC_OMP_SPLIT_NUM])
3132 int mask = 0, innermost = 0;
3133 memset (clausesa, 0, GFC_OMP_SPLIT_NUM * sizeof (gfc_omp_clauses));
3134 switch (code->op)
3136 case EXEC_OMP_DISTRIBUTE:
3137 innermost = GFC_OMP_SPLIT_DISTRIBUTE;
3138 break;
3139 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO:
3140 mask = GFC_OMP_MASK_DISTRIBUTE | GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO;
3141 innermost = GFC_OMP_SPLIT_DO;
3142 break;
3143 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD:
3144 mask = GFC_OMP_MASK_DISTRIBUTE | GFC_OMP_MASK_PARALLEL
3145 | GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
3146 innermost = GFC_OMP_SPLIT_SIMD;
3147 break;
3148 case EXEC_OMP_DISTRIBUTE_SIMD:
3149 mask = GFC_OMP_MASK_DISTRIBUTE | GFC_OMP_MASK_SIMD;
3150 innermost = GFC_OMP_SPLIT_SIMD;
3151 break;
3152 case EXEC_OMP_DO:
3153 innermost = GFC_OMP_SPLIT_DO;
3154 break;
3155 case EXEC_OMP_DO_SIMD:
3156 mask = GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
3157 innermost = GFC_OMP_SPLIT_SIMD;
3158 break;
3159 case EXEC_OMP_PARALLEL:
3160 innermost = GFC_OMP_SPLIT_PARALLEL;
3161 break;
3162 case EXEC_OMP_PARALLEL_DO:
3163 mask = GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO;
3164 innermost = GFC_OMP_SPLIT_DO;
3165 break;
3166 case EXEC_OMP_PARALLEL_DO_SIMD:
3167 mask = GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
3168 innermost = GFC_OMP_SPLIT_SIMD;
3169 break;
3170 case EXEC_OMP_SIMD:
3171 innermost = GFC_OMP_SPLIT_SIMD;
3172 break;
3173 case EXEC_OMP_TARGET:
3174 innermost = GFC_OMP_SPLIT_TARGET;
3175 break;
3176 case EXEC_OMP_TARGET_TEAMS:
3177 mask = GFC_OMP_MASK_TARGET | GFC_OMP_MASK_TEAMS;
3178 innermost = GFC_OMP_SPLIT_TEAMS;
3179 break;
3180 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE:
3181 mask = GFC_OMP_MASK_TARGET | GFC_OMP_MASK_TEAMS
3182 | GFC_OMP_MASK_DISTRIBUTE;
3183 innermost = GFC_OMP_SPLIT_DISTRIBUTE;
3184 break;
3185 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO:
3186 mask = GFC_OMP_MASK_TARGET | GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE
3187 | GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO;
3188 innermost = GFC_OMP_SPLIT_DO;
3189 break;
3190 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
3191 mask = GFC_OMP_MASK_TARGET | GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE
3192 | GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
3193 innermost = GFC_OMP_SPLIT_SIMD;
3194 break;
3195 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD:
3196 mask = GFC_OMP_MASK_TARGET | GFC_OMP_MASK_TEAMS
3197 | GFC_OMP_MASK_DISTRIBUTE | GFC_OMP_MASK_SIMD;
3198 innermost = GFC_OMP_SPLIT_SIMD;
3199 break;
3200 case EXEC_OMP_TEAMS:
3201 innermost = GFC_OMP_SPLIT_TEAMS;
3202 break;
3203 case EXEC_OMP_TEAMS_DISTRIBUTE:
3204 mask = GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE;
3205 innermost = GFC_OMP_SPLIT_DISTRIBUTE;
3206 break;
3207 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO:
3208 mask = GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE
3209 | GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO;
3210 innermost = GFC_OMP_SPLIT_DO;
3211 break;
3212 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
3213 mask = GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE
3214 | GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
3215 innermost = GFC_OMP_SPLIT_SIMD;
3216 break;
3217 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD:
3218 mask = GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE | GFC_OMP_MASK_SIMD;
3219 innermost = GFC_OMP_SPLIT_SIMD;
3220 break;
3221 default:
3222 gcc_unreachable ();
3224 if (mask == 0)
3226 clausesa[innermost] = *code->ext.omp_clauses;
3227 return;
3229 if (code->ext.omp_clauses != NULL)
3231 if (mask & GFC_OMP_MASK_TARGET)
3233 /* First the clauses that are unique to some constructs. */
3234 clausesa[GFC_OMP_SPLIT_TARGET].lists[OMP_LIST_MAP]
3235 = code->ext.omp_clauses->lists[OMP_LIST_MAP];
3236 clausesa[GFC_OMP_SPLIT_TARGET].device
3237 = code->ext.omp_clauses->device;
3239 if (mask & GFC_OMP_MASK_TEAMS)
3241 /* First the clauses that are unique to some constructs. */
3242 clausesa[GFC_OMP_SPLIT_TEAMS].num_teams
3243 = code->ext.omp_clauses->num_teams;
3244 clausesa[GFC_OMP_SPLIT_TEAMS].thread_limit
3245 = code->ext.omp_clauses->thread_limit;
3246 /* Shared and default clauses are allowed on parallel and teams. */
3247 clausesa[GFC_OMP_SPLIT_TEAMS].lists[OMP_LIST_SHARED]
3248 = code->ext.omp_clauses->lists[OMP_LIST_SHARED];
3249 clausesa[GFC_OMP_SPLIT_TEAMS].default_sharing
3250 = code->ext.omp_clauses->default_sharing;
3252 if (mask & GFC_OMP_MASK_DISTRIBUTE)
3254 /* First the clauses that are unique to some constructs. */
3255 clausesa[GFC_OMP_SPLIT_DISTRIBUTE].dist_sched_kind
3256 = code->ext.omp_clauses->dist_sched_kind;
3257 clausesa[GFC_OMP_SPLIT_DISTRIBUTE].dist_chunk_size
3258 = code->ext.omp_clauses->dist_chunk_size;
3259 /* Duplicate collapse. */
3260 clausesa[GFC_OMP_SPLIT_DISTRIBUTE].collapse
3261 = code->ext.omp_clauses->collapse;
3263 if (mask & GFC_OMP_MASK_PARALLEL)
3265 /* First the clauses that are unique to some constructs. */
3266 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_COPYIN]
3267 = code->ext.omp_clauses->lists[OMP_LIST_COPYIN];
3268 clausesa[GFC_OMP_SPLIT_PARALLEL].num_threads
3269 = code->ext.omp_clauses->num_threads;
3270 clausesa[GFC_OMP_SPLIT_PARALLEL].proc_bind
3271 = code->ext.omp_clauses->proc_bind;
3272 /* Shared and default clauses are allowed on parallel and teams. */
3273 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_SHARED]
3274 = code->ext.omp_clauses->lists[OMP_LIST_SHARED];
3275 clausesa[GFC_OMP_SPLIT_PARALLEL].default_sharing
3276 = code->ext.omp_clauses->default_sharing;
3278 if (mask & GFC_OMP_MASK_DO)
3280 /* First the clauses that are unique to some constructs. */
3281 clausesa[GFC_OMP_SPLIT_DO].ordered
3282 = code->ext.omp_clauses->ordered;
3283 clausesa[GFC_OMP_SPLIT_DO].sched_kind
3284 = code->ext.omp_clauses->sched_kind;
3285 clausesa[GFC_OMP_SPLIT_DO].chunk_size
3286 = code->ext.omp_clauses->chunk_size;
3287 clausesa[GFC_OMP_SPLIT_DO].nowait
3288 = code->ext.omp_clauses->nowait;
3289 /* Duplicate collapse. */
3290 clausesa[GFC_OMP_SPLIT_DO].collapse
3291 = code->ext.omp_clauses->collapse;
3293 if (mask & GFC_OMP_MASK_SIMD)
3295 clausesa[GFC_OMP_SPLIT_SIMD].safelen_expr
3296 = code->ext.omp_clauses->safelen_expr;
3297 clausesa[GFC_OMP_SPLIT_SIMD].lists[OMP_LIST_LINEAR]
3298 = code->ext.omp_clauses->lists[OMP_LIST_LINEAR];
3299 clausesa[GFC_OMP_SPLIT_SIMD].lists[OMP_LIST_ALIGNED]
3300 = code->ext.omp_clauses->lists[OMP_LIST_ALIGNED];
3301 /* Duplicate collapse. */
3302 clausesa[GFC_OMP_SPLIT_SIMD].collapse
3303 = code->ext.omp_clauses->collapse;
3305 /* Private clause is supported on all constructs but target,
3306 it is enough to put it on the innermost one. For
3307 !$ omp do put it on parallel though,
3308 as that's what we did for OpenMP 3.1. */
3309 clausesa[innermost == GFC_OMP_SPLIT_DO
3310 ? (int) GFC_OMP_SPLIT_PARALLEL
3311 : innermost].lists[OMP_LIST_PRIVATE]
3312 = code->ext.omp_clauses->lists[OMP_LIST_PRIVATE];
3313 /* Firstprivate clause is supported on all constructs but
3314 target and simd. Put it on the outermost of those and
3315 duplicate on parallel. */
3316 if (mask & GFC_OMP_MASK_TEAMS)
3317 clausesa[GFC_OMP_SPLIT_TEAMS].lists[OMP_LIST_FIRSTPRIVATE]
3318 = code->ext.omp_clauses->lists[OMP_LIST_FIRSTPRIVATE];
3319 else if (mask & GFC_OMP_MASK_DISTRIBUTE)
3320 clausesa[GFC_OMP_SPLIT_DISTRIBUTE].lists[OMP_LIST_FIRSTPRIVATE]
3321 = code->ext.omp_clauses->lists[OMP_LIST_FIRSTPRIVATE];
3322 if (mask & GFC_OMP_MASK_PARALLEL)
3323 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_FIRSTPRIVATE]
3324 = code->ext.omp_clauses->lists[OMP_LIST_FIRSTPRIVATE];
3325 else if (mask & GFC_OMP_MASK_DO)
3326 clausesa[GFC_OMP_SPLIT_DO].lists[OMP_LIST_FIRSTPRIVATE]
3327 = code->ext.omp_clauses->lists[OMP_LIST_FIRSTPRIVATE];
3328 /* Lastprivate is allowed on do and simd. In
3329 parallel do{, simd} we actually want to put it on
3330 parallel rather than do. */
3331 if (mask & GFC_OMP_MASK_PARALLEL)
3332 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_LASTPRIVATE]
3333 = code->ext.omp_clauses->lists[OMP_LIST_LASTPRIVATE];
3334 else if (mask & GFC_OMP_MASK_DO)
3335 clausesa[GFC_OMP_SPLIT_DO].lists[OMP_LIST_LASTPRIVATE]
3336 = code->ext.omp_clauses->lists[OMP_LIST_LASTPRIVATE];
3337 if (mask & GFC_OMP_MASK_SIMD)
3338 clausesa[GFC_OMP_SPLIT_SIMD].lists[OMP_LIST_LASTPRIVATE]
3339 = code->ext.omp_clauses->lists[OMP_LIST_LASTPRIVATE];
3340 /* Reduction is allowed on simd, do, parallel and teams.
3341 Duplicate it on all of them, but omit on do if
3342 parallel is present. */
3343 if (mask & GFC_OMP_MASK_TEAMS)
3344 clausesa[GFC_OMP_SPLIT_TEAMS].lists[OMP_LIST_REDUCTION]
3345 = code->ext.omp_clauses->lists[OMP_LIST_REDUCTION];
3346 if (mask & GFC_OMP_MASK_PARALLEL)
3347 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_REDUCTION]
3348 = code->ext.omp_clauses->lists[OMP_LIST_REDUCTION];
3349 else if (mask & GFC_OMP_MASK_DO)
3350 clausesa[GFC_OMP_SPLIT_DO].lists[OMP_LIST_REDUCTION]
3351 = code->ext.omp_clauses->lists[OMP_LIST_REDUCTION];
3352 if (mask & GFC_OMP_MASK_SIMD)
3353 clausesa[GFC_OMP_SPLIT_SIMD].lists[OMP_LIST_REDUCTION]
3354 = code->ext.omp_clauses->lists[OMP_LIST_REDUCTION];
3355 /* FIXME: This is currently being discussed. */
3356 if (mask & GFC_OMP_MASK_PARALLEL)
3357 clausesa[GFC_OMP_SPLIT_PARALLEL].if_expr
3358 = code->ext.omp_clauses->if_expr;
3359 else
3360 clausesa[GFC_OMP_SPLIT_TARGET].if_expr
3361 = code->ext.omp_clauses->if_expr;
3363 if ((mask & (GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO))
3364 == (GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO))
3365 clausesa[GFC_OMP_SPLIT_DO].nowait = true;
3368 static tree
3369 gfc_trans_omp_do_simd (gfc_code *code, stmtblock_t *pblock,
3370 gfc_omp_clauses *clausesa, tree omp_clauses)
3372 stmtblock_t block;
3373 gfc_omp_clauses clausesa_buf[GFC_OMP_SPLIT_NUM];
3374 tree stmt, body, omp_do_clauses = NULL_TREE;
3376 if (pblock == NULL)
3377 gfc_start_block (&block);
3378 else
3379 gfc_init_block (&block);
3381 if (clausesa == NULL)
3383 clausesa = clausesa_buf;
3384 gfc_split_omp_clauses (code, clausesa);
3386 if (gfc_option.gfc_flag_openmp)
3387 omp_do_clauses
3388 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_DO], code->loc);
3389 body = gfc_trans_omp_do (code, EXEC_OMP_SIMD, pblock ? pblock : &block,
3390 &clausesa[GFC_OMP_SPLIT_SIMD], omp_clauses);
3391 if (pblock == NULL)
3393 if (TREE_CODE (body) != BIND_EXPR)
3394 body = build3_v (BIND_EXPR, NULL, body, poplevel (1, 0));
3395 else
3396 poplevel (0, 0);
3398 else if (TREE_CODE (body) != BIND_EXPR)
3399 body = build3_v (BIND_EXPR, NULL, body, NULL_TREE);
3400 if (gfc_option.gfc_flag_openmp)
3402 stmt = make_node (OMP_FOR);
3403 TREE_TYPE (stmt) = void_type_node;
3404 OMP_FOR_BODY (stmt) = body;
3405 OMP_FOR_CLAUSES (stmt) = omp_do_clauses;
3407 else
3408 stmt = body;
3409 gfc_add_expr_to_block (&block, stmt);
3410 return gfc_finish_block (&block);
3413 static tree
3414 gfc_trans_omp_parallel_do (gfc_code *code, stmtblock_t *pblock,
3415 gfc_omp_clauses *clausesa)
3417 stmtblock_t block, *new_pblock = pblock;
3418 gfc_omp_clauses clausesa_buf[GFC_OMP_SPLIT_NUM];
3419 tree stmt, omp_clauses = NULL_TREE;
3421 if (pblock == NULL)
3422 gfc_start_block (&block);
3423 else
3424 gfc_init_block (&block);
3426 if (clausesa == NULL)
3428 clausesa = clausesa_buf;
3429 gfc_split_omp_clauses (code, clausesa);
3431 omp_clauses
3432 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_PARALLEL],
3433 code->loc);
3434 if (pblock == NULL)
3436 if (!clausesa[GFC_OMP_SPLIT_DO].ordered
3437 && clausesa[GFC_OMP_SPLIT_DO].sched_kind != OMP_SCHED_STATIC)
3438 new_pblock = &block;
3439 else
3440 pushlevel ();
3442 stmt = gfc_trans_omp_do (code, EXEC_OMP_DO, new_pblock,
3443 &clausesa[GFC_OMP_SPLIT_DO], omp_clauses);
3444 if (pblock == NULL)
3446 if (TREE_CODE (stmt) != BIND_EXPR)
3447 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3448 else
3449 poplevel (0, 0);
3451 else if (TREE_CODE (stmt) != BIND_EXPR)
3452 stmt = build3_v (BIND_EXPR, NULL, stmt, NULL_TREE);
3453 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
3454 omp_clauses);
3455 OMP_PARALLEL_COMBINED (stmt) = 1;
3456 gfc_add_expr_to_block (&block, stmt);
3457 return gfc_finish_block (&block);
3460 static tree
3461 gfc_trans_omp_parallel_do_simd (gfc_code *code, stmtblock_t *pblock,
3462 gfc_omp_clauses *clausesa)
3464 stmtblock_t block;
3465 gfc_omp_clauses clausesa_buf[GFC_OMP_SPLIT_NUM];
3466 tree stmt, omp_clauses = NULL_TREE;
3468 if (pblock == NULL)
3469 gfc_start_block (&block);
3470 else
3471 gfc_init_block (&block);
3473 if (clausesa == NULL)
3475 clausesa = clausesa_buf;
3476 gfc_split_omp_clauses (code, clausesa);
3478 if (gfc_option.gfc_flag_openmp)
3479 omp_clauses
3480 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_PARALLEL],
3481 code->loc);
3482 if (pblock == NULL)
3483 pushlevel ();
3484 stmt = gfc_trans_omp_do_simd (code, pblock, clausesa, omp_clauses);
3485 if (pblock == NULL)
3487 if (TREE_CODE (stmt) != BIND_EXPR)
3488 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3489 else
3490 poplevel (0, 0);
3492 else if (TREE_CODE (stmt) != BIND_EXPR)
3493 stmt = build3_v (BIND_EXPR, NULL, stmt, NULL_TREE);
3494 if (gfc_option.gfc_flag_openmp)
3496 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
3497 omp_clauses);
3498 OMP_PARALLEL_COMBINED (stmt) = 1;
3500 gfc_add_expr_to_block (&block, stmt);
3501 return gfc_finish_block (&block);
3504 static tree
3505 gfc_trans_omp_parallel_sections (gfc_code *code)
3507 stmtblock_t block;
3508 gfc_omp_clauses section_clauses;
3509 tree stmt, omp_clauses;
3511 memset (&section_clauses, 0, sizeof (section_clauses));
3512 section_clauses.nowait = true;
3514 gfc_start_block (&block);
3515 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
3516 code->loc);
3517 pushlevel ();
3518 stmt = gfc_trans_omp_sections (code, &section_clauses);
3519 if (TREE_CODE (stmt) != BIND_EXPR)
3520 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3521 else
3522 poplevel (0, 0);
3523 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
3524 omp_clauses);
3525 OMP_PARALLEL_COMBINED (stmt) = 1;
3526 gfc_add_expr_to_block (&block, stmt);
3527 return gfc_finish_block (&block);
3530 static tree
3531 gfc_trans_omp_parallel_workshare (gfc_code *code)
3533 stmtblock_t block;
3534 gfc_omp_clauses workshare_clauses;
3535 tree stmt, omp_clauses;
3537 memset (&workshare_clauses, 0, sizeof (workshare_clauses));
3538 workshare_clauses.nowait = true;
3540 gfc_start_block (&block);
3541 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
3542 code->loc);
3543 pushlevel ();
3544 stmt = gfc_trans_omp_workshare (code, &workshare_clauses);
3545 if (TREE_CODE (stmt) != BIND_EXPR)
3546 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3547 else
3548 poplevel (0, 0);
3549 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
3550 omp_clauses);
3551 OMP_PARALLEL_COMBINED (stmt) = 1;
3552 gfc_add_expr_to_block (&block, stmt);
3553 return gfc_finish_block (&block);
3556 static tree
3557 gfc_trans_omp_sections (gfc_code *code, gfc_omp_clauses *clauses)
3559 stmtblock_t block, body;
3560 tree omp_clauses, stmt;
3561 bool has_lastprivate = clauses->lists[OMP_LIST_LASTPRIVATE] != NULL;
3563 gfc_start_block (&block);
3565 omp_clauses = gfc_trans_omp_clauses (&block, clauses, code->loc);
3567 gfc_init_block (&body);
3568 for (code = code->block; code; code = code->block)
3570 /* Last section is special because of lastprivate, so even if it
3571 is empty, chain it in. */
3572 stmt = gfc_trans_omp_code (code->next,
3573 has_lastprivate && code->block == NULL);
3574 if (! IS_EMPTY_STMT (stmt))
3576 stmt = build1_v (OMP_SECTION, stmt);
3577 gfc_add_expr_to_block (&body, stmt);
3580 stmt = gfc_finish_block (&body);
3582 stmt = build2_loc (input_location, OMP_SECTIONS, void_type_node, stmt,
3583 omp_clauses);
3584 gfc_add_expr_to_block (&block, stmt);
3586 return gfc_finish_block (&block);
3589 static tree
3590 gfc_trans_omp_single (gfc_code *code, gfc_omp_clauses *clauses)
3592 tree omp_clauses = gfc_trans_omp_clauses (NULL, clauses, code->loc);
3593 tree stmt = gfc_trans_omp_code (code->block->next, true);
3594 stmt = build2_loc (input_location, OMP_SINGLE, void_type_node, stmt,
3595 omp_clauses);
3596 return stmt;
3599 static tree
3600 gfc_trans_omp_task (gfc_code *code)
3602 stmtblock_t block;
3603 tree stmt, omp_clauses;
3605 gfc_start_block (&block);
3606 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
3607 code->loc);
3608 stmt = gfc_trans_omp_code (code->block->next, true);
3609 stmt = build2_loc (input_location, OMP_TASK, void_type_node, stmt,
3610 omp_clauses);
3611 gfc_add_expr_to_block (&block, stmt);
3612 return gfc_finish_block (&block);
3615 static tree
3616 gfc_trans_omp_taskgroup (gfc_code *code)
3618 tree stmt = gfc_trans_code (code->block->next);
3619 return build1_loc (input_location, OMP_TASKGROUP, void_type_node, stmt);
3622 static tree
3623 gfc_trans_omp_taskwait (void)
3625 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT);
3626 return build_call_expr_loc (input_location, decl, 0);
3629 static tree
3630 gfc_trans_omp_taskyield (void)
3632 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD);
3633 return build_call_expr_loc (input_location, decl, 0);
3636 static tree
3637 gfc_trans_omp_distribute (gfc_code *code, gfc_omp_clauses *clausesa)
3639 stmtblock_t block;
3640 gfc_omp_clauses clausesa_buf[GFC_OMP_SPLIT_NUM];
3641 tree stmt, omp_clauses = NULL_TREE;
3643 gfc_start_block (&block);
3644 if (clausesa == NULL)
3646 clausesa = clausesa_buf;
3647 gfc_split_omp_clauses (code, clausesa);
3649 if (gfc_option.gfc_flag_openmp)
3650 omp_clauses
3651 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_DISTRIBUTE],
3652 code->loc);
3653 switch (code->op)
3655 case EXEC_OMP_DISTRIBUTE:
3656 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE:
3657 case EXEC_OMP_TEAMS_DISTRIBUTE:
3658 /* This is handled in gfc_trans_omp_do. */
3659 gcc_unreachable ();
3660 break;
3661 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO:
3662 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO:
3663 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO:
3664 stmt = gfc_trans_omp_parallel_do (code, &block, clausesa);
3665 if (TREE_CODE (stmt) != BIND_EXPR)
3666 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3667 else
3668 poplevel (0, 0);
3669 break;
3670 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD:
3671 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
3672 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
3673 stmt = gfc_trans_omp_parallel_do_simd (code, &block, clausesa);
3674 if (TREE_CODE (stmt) != BIND_EXPR)
3675 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3676 else
3677 poplevel (0, 0);
3678 break;
3679 case EXEC_OMP_DISTRIBUTE_SIMD:
3680 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD:
3681 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD:
3682 stmt = gfc_trans_omp_do (code, EXEC_OMP_SIMD, &block,
3683 &clausesa[GFC_OMP_SPLIT_SIMD], NULL_TREE);
3684 if (TREE_CODE (stmt) != BIND_EXPR)
3685 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3686 else
3687 poplevel (0, 0);
3688 break;
3689 default:
3690 gcc_unreachable ();
3692 if (gfc_option.gfc_flag_openmp)
3694 tree distribute = make_node (OMP_DISTRIBUTE);
3695 TREE_TYPE (distribute) = void_type_node;
3696 OMP_FOR_BODY (distribute) = stmt;
3697 OMP_FOR_CLAUSES (distribute) = omp_clauses;
3698 stmt = distribute;
3700 gfc_add_expr_to_block (&block, stmt);
3701 return gfc_finish_block (&block);
3704 static tree
3705 gfc_trans_omp_teams (gfc_code *code, gfc_omp_clauses *clausesa)
3707 stmtblock_t block;
3708 gfc_omp_clauses clausesa_buf[GFC_OMP_SPLIT_NUM];
3709 tree stmt, omp_clauses = NULL_TREE;
3711 gfc_start_block (&block);
3712 if (clausesa == NULL)
3714 clausesa = clausesa_buf;
3715 gfc_split_omp_clauses (code, clausesa);
3717 if (gfc_option.gfc_flag_openmp)
3718 omp_clauses
3719 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_TEAMS],
3720 code->loc);
3721 switch (code->op)
3723 case EXEC_OMP_TARGET_TEAMS:
3724 case EXEC_OMP_TEAMS:
3725 stmt = gfc_trans_omp_code (code->block->next, true);
3726 break;
3727 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE:
3728 case EXEC_OMP_TEAMS_DISTRIBUTE:
3729 stmt = gfc_trans_omp_do (code, EXEC_OMP_DISTRIBUTE, NULL,
3730 &clausesa[GFC_OMP_SPLIT_DISTRIBUTE],
3731 NULL);
3732 break;
3733 default:
3734 stmt = gfc_trans_omp_distribute (code, clausesa);
3735 break;
3737 stmt = build2_loc (input_location, OMP_TEAMS, void_type_node, stmt,
3738 omp_clauses);
3739 gfc_add_expr_to_block (&block, stmt);
3740 return gfc_finish_block (&block);
3743 static tree
3744 gfc_trans_omp_target (gfc_code *code)
3746 stmtblock_t block;
3747 gfc_omp_clauses clausesa[GFC_OMP_SPLIT_NUM];
3748 tree stmt, omp_clauses = NULL_TREE;
3750 gfc_start_block (&block);
3751 gfc_split_omp_clauses (code, clausesa);
3752 if (gfc_option.gfc_flag_openmp)
3753 omp_clauses
3754 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_TARGET],
3755 code->loc);
3756 if (code->op == EXEC_OMP_TARGET)
3757 stmt = gfc_trans_omp_code (code->block->next, true);
3758 else
3759 stmt = gfc_trans_omp_teams (code, clausesa);
3760 if (TREE_CODE (stmt) != BIND_EXPR)
3761 stmt = build3_v (BIND_EXPR, NULL, stmt, NULL_TREE);
3762 if (gfc_option.gfc_flag_openmp)
3763 stmt = build2_loc (input_location, OMP_TARGET, void_type_node, stmt,
3764 omp_clauses);
3765 gfc_add_expr_to_block (&block, stmt);
3766 return gfc_finish_block (&block);
3769 static tree
3770 gfc_trans_omp_target_data (gfc_code *code)
3772 stmtblock_t block;
3773 tree stmt, omp_clauses;
3775 gfc_start_block (&block);
3776 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
3777 code->loc);
3778 stmt = gfc_trans_omp_code (code->block->next, true);
3779 stmt = build2_loc (input_location, OMP_TARGET_DATA, void_type_node, stmt,
3780 omp_clauses);
3781 gfc_add_expr_to_block (&block, stmt);
3782 return gfc_finish_block (&block);
3785 static tree
3786 gfc_trans_omp_target_update (gfc_code *code)
3788 stmtblock_t block;
3789 tree stmt, omp_clauses;
3791 gfc_start_block (&block);
3792 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
3793 code->loc);
3794 stmt = build1_loc (input_location, OMP_TARGET_UPDATE, void_type_node,
3795 omp_clauses);
3796 gfc_add_expr_to_block (&block, stmt);
3797 return gfc_finish_block (&block);
3800 static tree
3801 gfc_trans_omp_workshare (gfc_code *code, gfc_omp_clauses *clauses)
3803 tree res, tmp, stmt;
3804 stmtblock_t block, *pblock = NULL;
3805 stmtblock_t singleblock;
3806 int saved_ompws_flags;
3807 bool singleblock_in_progress = false;
3808 /* True if previous gfc_code in workshare construct is not workshared. */
3809 bool prev_singleunit;
3811 code = code->block->next;
3813 pushlevel ();
3815 gfc_start_block (&block);
3816 pblock = &block;
3818 ompws_flags = OMPWS_WORKSHARE_FLAG;
3819 prev_singleunit = false;
3821 /* Translate statements one by one to trees until we reach
3822 the end of the workshare construct. Adjacent gfc_codes that
3823 are a single unit of work are clustered and encapsulated in a
3824 single OMP_SINGLE construct. */
3825 for (; code; code = code->next)
3827 if (code->here != 0)
3829 res = gfc_trans_label_here (code);
3830 gfc_add_expr_to_block (pblock, res);
3833 /* No dependence analysis, use for clauses with wait.
3834 If this is the last gfc_code, use default omp_clauses. */
3835 if (code->next == NULL && clauses->nowait)
3836 ompws_flags |= OMPWS_NOWAIT;
3838 /* By default, every gfc_code is a single unit of work. */
3839 ompws_flags |= OMPWS_CURR_SINGLEUNIT;
3840 ompws_flags &= ~OMPWS_SCALARIZER_WS;
3842 switch (code->op)
3844 case EXEC_NOP:
3845 res = NULL_TREE;
3846 break;
3848 case EXEC_ASSIGN:
3849 res = gfc_trans_assign (code);
3850 break;
3852 case EXEC_POINTER_ASSIGN:
3853 res = gfc_trans_pointer_assign (code);
3854 break;
3856 case EXEC_INIT_ASSIGN:
3857 res = gfc_trans_init_assign (code);
3858 break;
3860 case EXEC_FORALL:
3861 res = gfc_trans_forall (code);
3862 break;
3864 case EXEC_WHERE:
3865 res = gfc_trans_where (code);
3866 break;
3868 case EXEC_OMP_ATOMIC:
3869 res = gfc_trans_omp_directive (code);
3870 break;
3872 case EXEC_OMP_PARALLEL:
3873 case EXEC_OMP_PARALLEL_DO:
3874 case EXEC_OMP_PARALLEL_SECTIONS:
3875 case EXEC_OMP_PARALLEL_WORKSHARE:
3876 case EXEC_OMP_CRITICAL:
3877 saved_ompws_flags = ompws_flags;
3878 ompws_flags = 0;
3879 res = gfc_trans_omp_directive (code);
3880 ompws_flags = saved_ompws_flags;
3881 break;
3883 default:
3884 internal_error ("gfc_trans_omp_workshare(): Bad statement code");
3887 gfc_set_backend_locus (&code->loc);
3889 if (res != NULL_TREE && ! IS_EMPTY_STMT (res))
3891 if (prev_singleunit)
3893 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
3894 /* Add current gfc_code to single block. */
3895 gfc_add_expr_to_block (&singleblock, res);
3896 else
3898 /* Finish single block and add it to pblock. */
3899 tmp = gfc_finish_block (&singleblock);
3900 tmp = build2_loc (input_location, OMP_SINGLE,
3901 void_type_node, tmp, NULL_TREE);
3902 gfc_add_expr_to_block (pblock, tmp);
3903 /* Add current gfc_code to pblock. */
3904 gfc_add_expr_to_block (pblock, res);
3905 singleblock_in_progress = false;
3908 else
3910 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
3912 /* Start single block. */
3913 gfc_init_block (&singleblock);
3914 gfc_add_expr_to_block (&singleblock, res);
3915 singleblock_in_progress = true;
3917 else
3918 /* Add the new statement to the block. */
3919 gfc_add_expr_to_block (pblock, res);
3921 prev_singleunit = (ompws_flags & OMPWS_CURR_SINGLEUNIT) != 0;
3925 /* Finish remaining SINGLE block, if we were in the middle of one. */
3926 if (singleblock_in_progress)
3928 /* Finish single block and add it to pblock. */
3929 tmp = gfc_finish_block (&singleblock);
3930 tmp = build2_loc (input_location, OMP_SINGLE, void_type_node, tmp,
3931 clauses->nowait
3932 ? build_omp_clause (input_location, OMP_CLAUSE_NOWAIT)
3933 : NULL_TREE);
3934 gfc_add_expr_to_block (pblock, tmp);
3937 stmt = gfc_finish_block (pblock);
3938 if (TREE_CODE (stmt) != BIND_EXPR)
3940 if (!IS_EMPTY_STMT (stmt))
3942 tree bindblock = poplevel (1, 0);
3943 stmt = build3_v (BIND_EXPR, NULL, stmt, bindblock);
3945 else
3946 poplevel (0, 0);
3948 else
3949 poplevel (0, 0);
3951 if (IS_EMPTY_STMT (stmt) && !clauses->nowait)
3952 stmt = gfc_trans_omp_barrier ();
3954 ompws_flags = 0;
3955 return stmt;
3958 tree
3959 gfc_trans_omp_directive (gfc_code *code)
3961 switch (code->op)
3963 case EXEC_OMP_ATOMIC:
3964 return gfc_trans_omp_atomic (code);
3965 case EXEC_OMP_BARRIER:
3966 return gfc_trans_omp_barrier ();
3967 case EXEC_OMP_CANCEL:
3968 return gfc_trans_omp_cancel (code);
3969 case EXEC_OMP_CANCELLATION_POINT:
3970 return gfc_trans_omp_cancellation_point (code);
3971 case EXEC_OMP_CRITICAL:
3972 return gfc_trans_omp_critical (code);
3973 case EXEC_OMP_DISTRIBUTE:
3974 case EXEC_OMP_DO:
3975 case EXEC_OMP_SIMD:
3976 return gfc_trans_omp_do (code, code->op, NULL, code->ext.omp_clauses,
3977 NULL);
3978 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO:
3979 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD:
3980 case EXEC_OMP_DISTRIBUTE_SIMD:
3981 return gfc_trans_omp_distribute (code, NULL);
3982 case EXEC_OMP_DO_SIMD:
3983 return gfc_trans_omp_do_simd (code, NULL, NULL, NULL_TREE);
3984 case EXEC_OMP_FLUSH:
3985 return gfc_trans_omp_flush ();
3986 case EXEC_OMP_MASTER:
3987 return gfc_trans_omp_master (code);
3988 case EXEC_OMP_ORDERED:
3989 return gfc_trans_omp_ordered (code);
3990 case EXEC_OMP_PARALLEL:
3991 return gfc_trans_omp_parallel (code);
3992 case EXEC_OMP_PARALLEL_DO:
3993 return gfc_trans_omp_parallel_do (code, NULL, NULL);
3994 case EXEC_OMP_PARALLEL_DO_SIMD:
3995 return gfc_trans_omp_parallel_do_simd (code, NULL, NULL);
3996 case EXEC_OMP_PARALLEL_SECTIONS:
3997 return gfc_trans_omp_parallel_sections (code);
3998 case EXEC_OMP_PARALLEL_WORKSHARE:
3999 return gfc_trans_omp_parallel_workshare (code);
4000 case EXEC_OMP_SECTIONS:
4001 return gfc_trans_omp_sections (code, code->ext.omp_clauses);
4002 case EXEC_OMP_SINGLE:
4003 return gfc_trans_omp_single (code, code->ext.omp_clauses);
4004 case EXEC_OMP_TARGET:
4005 case EXEC_OMP_TARGET_TEAMS:
4006 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE:
4007 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO:
4008 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
4009 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD:
4010 return gfc_trans_omp_target (code);
4011 case EXEC_OMP_TARGET_DATA:
4012 return gfc_trans_omp_target_data (code);
4013 case EXEC_OMP_TARGET_UPDATE:
4014 return gfc_trans_omp_target_update (code);
4015 case EXEC_OMP_TASK:
4016 return gfc_trans_omp_task (code);
4017 case EXEC_OMP_TASKGROUP:
4018 return gfc_trans_omp_taskgroup (code);
4019 case EXEC_OMP_TASKWAIT:
4020 return gfc_trans_omp_taskwait ();
4021 case EXEC_OMP_TASKYIELD:
4022 return gfc_trans_omp_taskyield ();
4023 case EXEC_OMP_TEAMS:
4024 case EXEC_OMP_TEAMS_DISTRIBUTE:
4025 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO:
4026 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
4027 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD:
4028 return gfc_trans_omp_teams (code, NULL);
4029 case EXEC_OMP_WORKSHARE:
4030 return gfc_trans_omp_workshare (code, code->ext.omp_clauses);
4031 default:
4032 gcc_unreachable ();
4036 void
4037 gfc_trans_omp_declare_simd (gfc_namespace *ns)
4039 if (ns->entries)
4040 return;
4042 gfc_omp_declare_simd *ods;
4043 for (ods = ns->omp_declare_simd; ods; ods = ods->next)
4045 tree c = gfc_trans_omp_clauses (NULL, ods->clauses, ods->where, true);
4046 tree fndecl = ns->proc_name->backend_decl;
4047 if (c != NULL_TREE)
4048 c = tree_cons (NULL_TREE, c, NULL_TREE);
4049 c = build_tree_list (get_identifier ("omp declare simd"), c);
4050 TREE_CHAIN (c) = DECL_ATTRIBUTES (fndecl);
4051 DECL_ATTRIBUTES (fndecl) = c;