compiler, runtime: drop size arguments to hash/equal functions
[official-gcc.git] / gcc / omp-low.c
blobe8d78a91abc9463c7f626655d7a91a77832d27c2
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2017 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa.h"
62 /* Lowering of OMP parallel and workshare constructs proceeds in two
63 phases. The first phase scans the function looking for OMP statements
64 and then for variables that must be replaced to satisfy data sharing
65 clauses. The second phase expands code for the constructs, as well as
66 re-gimplifying things when variables have been replaced with complex
67 expressions.
69 Final code generation is done by pass_expand_omp. The flowgraph is
70 scanned for regions which are then moved to a new
71 function, to be invoked by the thread library, or offloaded. */
73 /* Context structure. Used to store information about each parallel
74 directive in the code. */
76 struct omp_context
78 /* This field must be at the beginning, as we do "inheritance": Some
79 callback functions for tree-inline.c (e.g., omp_copy_decl)
80 receive a copy_body_data pointer that is up-casted to an
81 omp_context pointer. */
82 copy_body_data cb;
84 /* The tree of contexts corresponding to the encountered constructs. */
85 struct omp_context *outer;
86 gimple *stmt;
88 /* Map variables to fields in a structure that allows communication
89 between sending and receiving threads. */
90 splay_tree field_map;
91 tree record_type;
92 tree sender_decl;
93 tree receiver_decl;
95 /* These are used just by task contexts, if task firstprivate fn is
96 needed. srecord_type is used to communicate from the thread
97 that encountered the task construct to task firstprivate fn,
98 record_type is allocated by GOMP_task, initialized by task firstprivate
99 fn and passed to the task body fn. */
100 splay_tree sfield_map;
101 tree srecord_type;
103 /* A chain of variables to add to the top-level block surrounding the
104 construct. In the case of a parallel, this is in the child function. */
105 tree block_vars;
107 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
108 barriers should jump to during omplower pass. */
109 tree cancel_label;
111 /* What to do with variables with implicitly determined sharing
112 attributes. */
113 enum omp_clause_default_kind default_kind;
115 /* Nesting depth of this context. Used to beautify error messages re
116 invalid gotos. The outermost ctx is depth 1, with depth 0 being
117 reserved for the main body of the function. */
118 int depth;
120 /* True if this parallel directive is nested within another. */
121 bool is_nested;
123 /* True if this construct can be cancelled. */
124 bool cancellable;
127 static splay_tree all_contexts;
128 static int taskreg_nesting_level;
129 static int target_nesting_level;
130 static bitmap task_shared_vars;
131 static vec<omp_context *> taskreg_contexts;
133 static void scan_omp (gimple_seq *, omp_context *);
134 static tree scan_omp_1_op (tree *, int *, void *);
136 #define WALK_SUBSTMTS \
137 case GIMPLE_BIND: \
138 case GIMPLE_TRY: \
139 case GIMPLE_CATCH: \
140 case GIMPLE_EH_FILTER: \
141 case GIMPLE_TRANSACTION: \
142 /* The sub-statements for these should be walked. */ \
143 *handled_ops_p = false; \
144 break;
146 /* Return true if CTX corresponds to an oacc parallel region. */
148 static bool
149 is_oacc_parallel (omp_context *ctx)
151 enum gimple_code outer_type = gimple_code (ctx->stmt);
152 return ((outer_type == GIMPLE_OMP_TARGET)
153 && (gimple_omp_target_kind (ctx->stmt)
154 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
157 /* Return true if CTX corresponds to an oacc kernels region. */
159 static bool
160 is_oacc_kernels (omp_context *ctx)
162 enum gimple_code outer_type = gimple_code (ctx->stmt);
163 return ((outer_type == GIMPLE_OMP_TARGET)
164 && (gimple_omp_target_kind (ctx->stmt)
165 == GF_OMP_TARGET_KIND_OACC_KERNELS));
168 /* If DECL is the artificial dummy VAR_DECL created for non-static
169 data member privatization, return the underlying "this" parameter,
170 otherwise return NULL. */
172 tree
173 omp_member_access_dummy_var (tree decl)
175 if (!VAR_P (decl)
176 || !DECL_ARTIFICIAL (decl)
177 || !DECL_IGNORED_P (decl)
178 || !DECL_HAS_VALUE_EXPR_P (decl)
179 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
180 return NULL_TREE;
182 tree v = DECL_VALUE_EXPR (decl);
183 if (TREE_CODE (v) != COMPONENT_REF)
184 return NULL_TREE;
186 while (1)
187 switch (TREE_CODE (v))
189 case COMPONENT_REF:
190 case MEM_REF:
191 case INDIRECT_REF:
192 CASE_CONVERT:
193 case POINTER_PLUS_EXPR:
194 v = TREE_OPERAND (v, 0);
195 continue;
196 case PARM_DECL:
197 if (DECL_CONTEXT (v) == current_function_decl
198 && DECL_ARTIFICIAL (v)
199 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
200 return v;
201 return NULL_TREE;
202 default:
203 return NULL_TREE;
207 /* Helper for unshare_and_remap, called through walk_tree. */
209 static tree
210 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
212 tree *pair = (tree *) data;
213 if (*tp == pair[0])
215 *tp = unshare_expr (pair[1]);
216 *walk_subtrees = 0;
218 else if (IS_TYPE_OR_DECL_P (*tp))
219 *walk_subtrees = 0;
220 return NULL_TREE;
223 /* Return unshare_expr (X) with all occurrences of FROM
224 replaced with TO. */
226 static tree
227 unshare_and_remap (tree x, tree from, tree to)
229 tree pair[2] = { from, to };
230 x = unshare_expr (x);
231 walk_tree (&x, unshare_and_remap_1, pair, NULL);
232 return x;
235 /* Convenience function for calling scan_omp_1_op on tree operands. */
237 static inline tree
238 scan_omp_op (tree *tp, omp_context *ctx)
240 struct walk_stmt_info wi;
242 memset (&wi, 0, sizeof (wi));
243 wi.info = ctx;
244 wi.want_locations = true;
246 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
249 static void lower_omp (gimple_seq *, omp_context *);
250 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
251 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
253 /* Return true if CTX is for an omp parallel. */
255 static inline bool
256 is_parallel_ctx (omp_context *ctx)
258 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
262 /* Return true if CTX is for an omp task. */
264 static inline bool
265 is_task_ctx (omp_context *ctx)
267 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
271 /* Return true if CTX is for an omp taskloop. */
273 static inline bool
274 is_taskloop_ctx (omp_context *ctx)
276 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
277 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
281 /* Return true if CTX is for an omp parallel or omp task. */
283 static inline bool
284 is_taskreg_ctx (omp_context *ctx)
286 return is_parallel_ctx (ctx) || is_task_ctx (ctx);
289 /* Return true if EXPR is variable sized. */
291 static inline bool
292 is_variable_sized (const_tree expr)
294 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
297 /* Lookup variables. The "maybe" form
298 allows for the variable form to not have been entered, otherwise we
299 assert that the variable must have been entered. */
301 static inline tree
302 lookup_decl (tree var, omp_context *ctx)
304 tree *n = ctx->cb.decl_map->get (var);
305 return *n;
308 static inline tree
309 maybe_lookup_decl (const_tree var, omp_context *ctx)
311 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
312 return n ? *n : NULL_TREE;
315 static inline tree
316 lookup_field (tree var, omp_context *ctx)
318 splay_tree_node n;
319 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
320 return (tree) n->value;
323 static inline tree
324 lookup_sfield (splay_tree_key key, omp_context *ctx)
326 splay_tree_node n;
327 n = splay_tree_lookup (ctx->sfield_map
328 ? ctx->sfield_map : ctx->field_map, key);
329 return (tree) n->value;
332 static inline tree
333 lookup_sfield (tree var, omp_context *ctx)
335 return lookup_sfield ((splay_tree_key) var, ctx);
338 static inline tree
339 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
341 splay_tree_node n;
342 n = splay_tree_lookup (ctx->field_map, key);
343 return n ? (tree) n->value : NULL_TREE;
346 static inline tree
347 maybe_lookup_field (tree var, omp_context *ctx)
349 return maybe_lookup_field ((splay_tree_key) var, ctx);
352 /* Return true if DECL should be copied by pointer. SHARED_CTX is
353 the parallel context if DECL is to be shared. */
355 static bool
356 use_pointer_for_field (tree decl, omp_context *shared_ctx)
358 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
359 || TYPE_ATOMIC (TREE_TYPE (decl)))
360 return true;
362 /* We can only use copy-in/copy-out semantics for shared variables
363 when we know the value is not accessible from an outer scope. */
364 if (shared_ctx)
366 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
368 /* ??? Trivially accessible from anywhere. But why would we even
369 be passing an address in this case? Should we simply assert
370 this to be false, or should we have a cleanup pass that removes
371 these from the list of mappings? */
372 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
373 return true;
375 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
376 without analyzing the expression whether or not its location
377 is accessible to anyone else. In the case of nested parallel
378 regions it certainly may be. */
379 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
380 return true;
382 /* Do not use copy-in/copy-out for variables that have their
383 address taken. */
384 if (TREE_ADDRESSABLE (decl))
385 return true;
387 /* lower_send_shared_vars only uses copy-in, but not copy-out
388 for these. */
389 if (TREE_READONLY (decl)
390 || ((TREE_CODE (decl) == RESULT_DECL
391 || TREE_CODE (decl) == PARM_DECL)
392 && DECL_BY_REFERENCE (decl)))
393 return false;
395 /* Disallow copy-in/out in nested parallel if
396 decl is shared in outer parallel, otherwise
397 each thread could store the shared variable
398 in its own copy-in location, making the
399 variable no longer really shared. */
400 if (shared_ctx->is_nested)
402 omp_context *up;
404 for (up = shared_ctx->outer; up; up = up->outer)
405 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
406 break;
408 if (up)
410 tree c;
412 for (c = gimple_omp_taskreg_clauses (up->stmt);
413 c; c = OMP_CLAUSE_CHAIN (c))
414 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
415 && OMP_CLAUSE_DECL (c) == decl)
416 break;
418 if (c)
419 goto maybe_mark_addressable_and_ret;
423 /* For tasks avoid using copy-in/out. As tasks can be
424 deferred or executed in different thread, when GOMP_task
425 returns, the task hasn't necessarily terminated. */
426 if (is_task_ctx (shared_ctx))
428 tree outer;
429 maybe_mark_addressable_and_ret:
430 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
431 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
433 /* Taking address of OUTER in lower_send_shared_vars
434 might need regimplification of everything that uses the
435 variable. */
436 if (!task_shared_vars)
437 task_shared_vars = BITMAP_ALLOC (NULL);
438 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
439 TREE_ADDRESSABLE (outer) = 1;
441 return true;
445 return false;
448 /* Construct a new automatic decl similar to VAR. */
450 static tree
451 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
453 tree copy = copy_var_decl (var, name, type);
455 DECL_CONTEXT (copy) = current_function_decl;
456 DECL_CHAIN (copy) = ctx->block_vars;
457 /* If VAR is listed in task_shared_vars, it means it wasn't
458 originally addressable and is just because task needs to take
459 it's address. But we don't need to take address of privatizations
460 from that var. */
461 if (TREE_ADDRESSABLE (var)
462 && task_shared_vars
463 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
464 TREE_ADDRESSABLE (copy) = 0;
465 ctx->block_vars = copy;
467 return copy;
470 static tree
471 omp_copy_decl_1 (tree var, omp_context *ctx)
473 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
476 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
477 as appropriate. */
478 static tree
479 omp_build_component_ref (tree obj, tree field)
481 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
482 if (TREE_THIS_VOLATILE (field))
483 TREE_THIS_VOLATILE (ret) |= 1;
484 if (TREE_READONLY (field))
485 TREE_READONLY (ret) |= 1;
486 return ret;
489 /* Build tree nodes to access the field for VAR on the receiver side. */
491 static tree
492 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
494 tree x, field = lookup_field (var, ctx);
496 /* If the receiver record type was remapped in the child function,
497 remap the field into the new record type. */
498 x = maybe_lookup_field (field, ctx);
499 if (x != NULL)
500 field = x;
502 x = build_simple_mem_ref (ctx->receiver_decl);
503 TREE_THIS_NOTRAP (x) = 1;
504 x = omp_build_component_ref (x, field);
505 if (by_ref)
507 x = build_simple_mem_ref (x);
508 TREE_THIS_NOTRAP (x) = 1;
511 return x;
514 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
515 of a parallel, this is a component reference; for workshare constructs
516 this is some variable. */
518 static tree
519 build_outer_var_ref (tree var, omp_context *ctx,
520 enum omp_clause_code code = OMP_CLAUSE_ERROR)
522 tree x;
524 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
525 x = var;
526 else if (is_variable_sized (var))
528 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
529 x = build_outer_var_ref (x, ctx, code);
530 x = build_simple_mem_ref (x);
532 else if (is_taskreg_ctx (ctx))
534 bool by_ref = use_pointer_for_field (var, NULL);
535 x = build_receiver_ref (var, by_ref, ctx);
537 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
538 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
539 || (code == OMP_CLAUSE_PRIVATE
540 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
541 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
542 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
544 /* #pragma omp simd isn't a worksharing construct, and can reference
545 even private vars in its linear etc. clauses.
546 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
547 to private vars in all worksharing constructs. */
548 x = NULL_TREE;
549 if (ctx->outer && is_taskreg_ctx (ctx))
550 x = lookup_decl (var, ctx->outer);
551 else if (ctx->outer)
552 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
553 if (x == NULL_TREE)
554 x = var;
556 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
558 gcc_assert (ctx->outer);
559 splay_tree_node n
560 = splay_tree_lookup (ctx->outer->field_map,
561 (splay_tree_key) &DECL_UID (var));
562 if (n == NULL)
564 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
565 x = var;
566 else
567 x = lookup_decl (var, ctx->outer);
569 else
571 tree field = (tree) n->value;
572 /* If the receiver record type was remapped in the child function,
573 remap the field into the new record type. */
574 x = maybe_lookup_field (field, ctx->outer);
575 if (x != NULL)
576 field = x;
578 x = build_simple_mem_ref (ctx->outer->receiver_decl);
579 x = omp_build_component_ref (x, field);
580 if (use_pointer_for_field (var, ctx->outer))
581 x = build_simple_mem_ref (x);
584 else if (ctx->outer)
586 omp_context *outer = ctx->outer;
587 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
589 outer = outer->outer;
590 gcc_assert (outer
591 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
593 x = lookup_decl (var, outer);
595 else if (omp_is_reference (var))
596 /* This can happen with orphaned constructs. If var is reference, it is
597 possible it is shared and as such valid. */
598 x = var;
599 else if (omp_member_access_dummy_var (var))
600 x = var;
601 else
602 gcc_unreachable ();
604 if (x == var)
606 tree t = omp_member_access_dummy_var (var);
607 if (t)
609 x = DECL_VALUE_EXPR (var);
610 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
611 if (o != t)
612 x = unshare_and_remap (x, t, o);
613 else
614 x = unshare_expr (x);
618 if (omp_is_reference (var))
619 x = build_simple_mem_ref (x);
621 return x;
624 /* Build tree nodes to access the field for VAR on the sender side. */
626 static tree
627 build_sender_ref (splay_tree_key key, omp_context *ctx)
629 tree field = lookup_sfield (key, ctx);
630 return omp_build_component_ref (ctx->sender_decl, field);
633 static tree
634 build_sender_ref (tree var, omp_context *ctx)
636 return build_sender_ref ((splay_tree_key) var, ctx);
639 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
640 BASE_POINTERS_RESTRICT, declare the field with restrict. */
642 static void
643 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx,
644 bool base_pointers_restrict = false)
646 tree field, type, sfield = NULL_TREE;
647 splay_tree_key key = (splay_tree_key) var;
649 if ((mask & 8) != 0)
651 key = (splay_tree_key) &DECL_UID (var);
652 gcc_checking_assert (key != (splay_tree_key) var);
654 gcc_assert ((mask & 1) == 0
655 || !splay_tree_lookup (ctx->field_map, key));
656 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
657 || !splay_tree_lookup (ctx->sfield_map, key));
658 gcc_assert ((mask & 3) == 3
659 || !is_gimple_omp_oacc (ctx->stmt));
661 type = TREE_TYPE (var);
662 /* Prevent redeclaring the var in the split-off function with a restrict
663 pointer type. Note that we only clear type itself, restrict qualifiers in
664 the pointed-to type will be ignored by points-to analysis. */
665 if (POINTER_TYPE_P (type)
666 && TYPE_RESTRICT (type))
667 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
669 if (mask & 4)
671 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
672 type = build_pointer_type (build_pointer_type (type));
674 else if (by_ref)
676 type = build_pointer_type (type);
677 if (base_pointers_restrict)
678 type = build_qualified_type (type, TYPE_QUAL_RESTRICT);
680 else if ((mask & 3) == 1 && omp_is_reference (var))
681 type = TREE_TYPE (type);
683 field = build_decl (DECL_SOURCE_LOCATION (var),
684 FIELD_DECL, DECL_NAME (var), type);
686 /* Remember what variable this field was created for. This does have a
687 side effect of making dwarf2out ignore this member, so for helpful
688 debugging we clear it later in delete_omp_context. */
689 DECL_ABSTRACT_ORIGIN (field) = var;
690 if (type == TREE_TYPE (var))
692 SET_DECL_ALIGN (field, DECL_ALIGN (var));
693 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
694 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
696 else
697 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
699 if ((mask & 3) == 3)
701 insert_field_into_struct (ctx->record_type, field);
702 if (ctx->srecord_type)
704 sfield = build_decl (DECL_SOURCE_LOCATION (var),
705 FIELD_DECL, DECL_NAME (var), type);
706 DECL_ABSTRACT_ORIGIN (sfield) = var;
707 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
708 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
709 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
710 insert_field_into_struct (ctx->srecord_type, sfield);
713 else
715 if (ctx->srecord_type == NULL_TREE)
717 tree t;
719 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
720 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
721 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
723 sfield = build_decl (DECL_SOURCE_LOCATION (t),
724 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
725 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
726 insert_field_into_struct (ctx->srecord_type, sfield);
727 splay_tree_insert (ctx->sfield_map,
728 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
729 (splay_tree_value) sfield);
732 sfield = field;
733 insert_field_into_struct ((mask & 1) ? ctx->record_type
734 : ctx->srecord_type, field);
737 if (mask & 1)
738 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
739 if ((mask & 2) && ctx->sfield_map)
740 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
743 static tree
744 install_var_local (tree var, omp_context *ctx)
746 tree new_var = omp_copy_decl_1 (var, ctx);
747 insert_decl_map (&ctx->cb, var, new_var);
748 return new_var;
751 /* Adjust the replacement for DECL in CTX for the new context. This means
752 copying the DECL_VALUE_EXPR, and fixing up the type. */
754 static void
755 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
757 tree new_decl, size;
759 new_decl = lookup_decl (decl, ctx);
761 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
763 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
764 && DECL_HAS_VALUE_EXPR_P (decl))
766 tree ve = DECL_VALUE_EXPR (decl);
767 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
768 SET_DECL_VALUE_EXPR (new_decl, ve);
769 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
772 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
774 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
775 if (size == error_mark_node)
776 size = TYPE_SIZE (TREE_TYPE (new_decl));
777 DECL_SIZE (new_decl) = size;
779 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
780 if (size == error_mark_node)
781 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
782 DECL_SIZE_UNIT (new_decl) = size;
786 /* The callback for remap_decl. Search all containing contexts for a
787 mapping of the variable; this avoids having to duplicate the splay
788 tree ahead of time. We know a mapping doesn't already exist in the
789 given context. Create new mappings to implement default semantics. */
791 static tree
792 omp_copy_decl (tree var, copy_body_data *cb)
794 omp_context *ctx = (omp_context *) cb;
795 tree new_var;
797 if (TREE_CODE (var) == LABEL_DECL)
799 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
800 DECL_CONTEXT (new_var) = current_function_decl;
801 insert_decl_map (&ctx->cb, var, new_var);
802 return new_var;
805 while (!is_taskreg_ctx (ctx))
807 ctx = ctx->outer;
808 if (ctx == NULL)
809 return var;
810 new_var = maybe_lookup_decl (var, ctx);
811 if (new_var)
812 return new_var;
815 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
816 return var;
818 return error_mark_node;
821 /* Create a new context, with OUTER_CTX being the surrounding context. */
823 static omp_context *
824 new_omp_context (gimple *stmt, omp_context *outer_ctx)
826 omp_context *ctx = XCNEW (omp_context);
828 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
829 (splay_tree_value) ctx);
830 ctx->stmt = stmt;
832 if (outer_ctx)
834 ctx->outer = outer_ctx;
835 ctx->cb = outer_ctx->cb;
836 ctx->cb.block = NULL;
837 ctx->depth = outer_ctx->depth + 1;
839 else
841 ctx->cb.src_fn = current_function_decl;
842 ctx->cb.dst_fn = current_function_decl;
843 ctx->cb.src_node = cgraph_node::get (current_function_decl);
844 gcc_checking_assert (ctx->cb.src_node);
845 ctx->cb.dst_node = ctx->cb.src_node;
846 ctx->cb.src_cfun = cfun;
847 ctx->cb.copy_decl = omp_copy_decl;
848 ctx->cb.eh_lp_nr = 0;
849 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
850 ctx->depth = 1;
853 ctx->cb.decl_map = new hash_map<tree, tree>;
855 return ctx;
858 static gimple_seq maybe_catch_exception (gimple_seq);
860 /* Finalize task copyfn. */
862 static void
863 finalize_task_copyfn (gomp_task *task_stmt)
865 struct function *child_cfun;
866 tree child_fn;
867 gimple_seq seq = NULL, new_seq;
868 gbind *bind;
870 child_fn = gimple_omp_task_copy_fn (task_stmt);
871 if (child_fn == NULL_TREE)
872 return;
874 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
875 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
877 push_cfun (child_cfun);
878 bind = gimplify_body (child_fn, false);
879 gimple_seq_add_stmt (&seq, bind);
880 new_seq = maybe_catch_exception (seq);
881 if (new_seq != seq)
883 bind = gimple_build_bind (NULL, new_seq, NULL);
884 seq = NULL;
885 gimple_seq_add_stmt (&seq, bind);
887 gimple_set_body (child_fn, seq);
888 pop_cfun ();
890 /* Inform the callgraph about the new function. */
891 cgraph_node *node = cgraph_node::get_create (child_fn);
892 node->parallelized_function = 1;
893 cgraph_node::add_new_function (child_fn, false);
896 /* Destroy a omp_context data structures. Called through the splay tree
897 value delete callback. */
899 static void
900 delete_omp_context (splay_tree_value value)
902 omp_context *ctx = (omp_context *) value;
904 delete ctx->cb.decl_map;
906 if (ctx->field_map)
907 splay_tree_delete (ctx->field_map);
908 if (ctx->sfield_map)
909 splay_tree_delete (ctx->sfield_map);
911 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
912 it produces corrupt debug information. */
913 if (ctx->record_type)
915 tree t;
916 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
917 DECL_ABSTRACT_ORIGIN (t) = NULL;
919 if (ctx->srecord_type)
921 tree t;
922 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
923 DECL_ABSTRACT_ORIGIN (t) = NULL;
926 if (is_task_ctx (ctx))
927 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
929 XDELETE (ctx);
932 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
933 context. */
935 static void
936 fixup_child_record_type (omp_context *ctx)
938 tree f, type = ctx->record_type;
940 if (!ctx->receiver_decl)
941 return;
942 /* ??? It isn't sufficient to just call remap_type here, because
943 variably_modified_type_p doesn't work the way we expect for
944 record types. Testing each field for whether it needs remapping
945 and creating a new record by hand works, however. */
946 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
947 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
948 break;
949 if (f)
951 tree name, new_fields = NULL;
953 type = lang_hooks.types.make_type (RECORD_TYPE);
954 name = DECL_NAME (TYPE_NAME (ctx->record_type));
955 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
956 TYPE_DECL, name, type);
957 TYPE_NAME (type) = name;
959 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
961 tree new_f = copy_node (f);
962 DECL_CONTEXT (new_f) = type;
963 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
964 DECL_CHAIN (new_f) = new_fields;
965 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
966 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
967 &ctx->cb, NULL);
968 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
969 &ctx->cb, NULL);
970 new_fields = new_f;
972 /* Arrange to be able to look up the receiver field
973 given the sender field. */
974 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
975 (splay_tree_value) new_f);
977 TYPE_FIELDS (type) = nreverse (new_fields);
978 layout_type (type);
981 /* In a target region we never modify any of the pointers in *.omp_data_i,
982 so attempt to help the optimizers. */
983 if (is_gimple_omp_offloaded (ctx->stmt))
984 type = build_qualified_type (type, TYPE_QUAL_CONST);
986 TREE_TYPE (ctx->receiver_decl)
987 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
990 /* Instantiate decls as necessary in CTX to satisfy the data sharing
991 specified by CLAUSES. If BASE_POINTERS_RESTRICT, install var field with
992 restrict. */
994 static void
995 scan_sharing_clauses (tree clauses, omp_context *ctx,
996 bool base_pointers_restrict = false)
998 tree c, decl;
999 bool scan_array_reductions = false;
1001 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1003 bool by_ref;
1005 switch (OMP_CLAUSE_CODE (c))
1007 case OMP_CLAUSE_PRIVATE:
1008 decl = OMP_CLAUSE_DECL (c);
1009 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1010 goto do_private;
1011 else if (!is_variable_sized (decl))
1012 install_var_local (decl, ctx);
1013 break;
1015 case OMP_CLAUSE_SHARED:
1016 decl = OMP_CLAUSE_DECL (c);
1017 /* Ignore shared directives in teams construct. */
1018 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1020 /* Global variables don't need to be copied,
1021 the receiver side will use them directly. */
1022 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1023 if (is_global_var (odecl))
1024 break;
1025 insert_decl_map (&ctx->cb, decl, odecl);
1026 break;
1028 gcc_assert (is_taskreg_ctx (ctx));
1029 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1030 || !is_variable_sized (decl));
1031 /* Global variables don't need to be copied,
1032 the receiver side will use them directly. */
1033 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1034 break;
1035 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1037 use_pointer_for_field (decl, ctx);
1038 break;
1040 by_ref = use_pointer_for_field (decl, NULL);
1041 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1042 || TREE_ADDRESSABLE (decl)
1043 || by_ref
1044 || omp_is_reference (decl))
1046 by_ref = use_pointer_for_field (decl, ctx);
1047 install_var_field (decl, by_ref, 3, ctx);
1048 install_var_local (decl, ctx);
1049 break;
1051 /* We don't need to copy const scalar vars back. */
1052 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1053 goto do_private;
1055 case OMP_CLAUSE_REDUCTION:
1056 decl = OMP_CLAUSE_DECL (c);
1057 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1058 && TREE_CODE (decl) == MEM_REF)
1060 tree t = TREE_OPERAND (decl, 0);
1061 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1062 t = TREE_OPERAND (t, 0);
1063 if (TREE_CODE (t) == INDIRECT_REF
1064 || TREE_CODE (t) == ADDR_EXPR)
1065 t = TREE_OPERAND (t, 0);
1066 install_var_local (t, ctx);
1067 if (is_taskreg_ctx (ctx)
1068 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1069 && !is_variable_sized (t))
1071 by_ref = use_pointer_for_field (t, ctx);
1072 install_var_field (t, by_ref, 3, ctx);
1074 break;
1076 goto do_private;
1078 case OMP_CLAUSE_LASTPRIVATE:
1079 /* Let the corresponding firstprivate clause create
1080 the variable. */
1081 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1082 break;
1083 /* FALLTHRU */
1085 case OMP_CLAUSE_FIRSTPRIVATE:
1086 case OMP_CLAUSE_LINEAR:
1087 decl = OMP_CLAUSE_DECL (c);
1088 do_private:
1089 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1090 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1091 && is_gimple_omp_offloaded (ctx->stmt))
1093 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1094 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1095 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1096 install_var_field (decl, true, 3, ctx);
1097 else
1098 install_var_field (decl, false, 3, ctx);
1100 if (is_variable_sized (decl))
1102 if (is_task_ctx (ctx))
1103 install_var_field (decl, false, 1, ctx);
1104 break;
1106 else if (is_taskreg_ctx (ctx))
1108 bool global
1109 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1110 by_ref = use_pointer_for_field (decl, NULL);
1112 if (is_task_ctx (ctx)
1113 && (global || by_ref || omp_is_reference (decl)))
1115 install_var_field (decl, false, 1, ctx);
1116 if (!global)
1117 install_var_field (decl, by_ref, 2, ctx);
1119 else if (!global)
1120 install_var_field (decl, by_ref, 3, ctx);
1122 install_var_local (decl, ctx);
1123 break;
1125 case OMP_CLAUSE_USE_DEVICE_PTR:
1126 decl = OMP_CLAUSE_DECL (c);
1127 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1128 install_var_field (decl, true, 3, ctx);
1129 else
1130 install_var_field (decl, false, 3, ctx);
1131 if (DECL_SIZE (decl)
1132 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1134 tree decl2 = DECL_VALUE_EXPR (decl);
1135 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1136 decl2 = TREE_OPERAND (decl2, 0);
1137 gcc_assert (DECL_P (decl2));
1138 install_var_local (decl2, ctx);
1140 install_var_local (decl, ctx);
1141 break;
1143 case OMP_CLAUSE_IS_DEVICE_PTR:
1144 decl = OMP_CLAUSE_DECL (c);
1145 goto do_private;
1147 case OMP_CLAUSE__LOOPTEMP_:
1148 gcc_assert (is_taskreg_ctx (ctx));
1149 decl = OMP_CLAUSE_DECL (c);
1150 install_var_field (decl, false, 3, ctx);
1151 install_var_local (decl, ctx);
1152 break;
1154 case OMP_CLAUSE_COPYPRIVATE:
1155 case OMP_CLAUSE_COPYIN:
1156 decl = OMP_CLAUSE_DECL (c);
1157 by_ref = use_pointer_for_field (decl, NULL);
1158 install_var_field (decl, by_ref, 3, ctx);
1159 break;
1161 case OMP_CLAUSE_DEFAULT:
1162 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
1163 break;
1165 case OMP_CLAUSE_FINAL:
1166 case OMP_CLAUSE_IF:
1167 case OMP_CLAUSE_NUM_THREADS:
1168 case OMP_CLAUSE_NUM_TEAMS:
1169 case OMP_CLAUSE_THREAD_LIMIT:
1170 case OMP_CLAUSE_DEVICE:
1171 case OMP_CLAUSE_SCHEDULE:
1172 case OMP_CLAUSE_DIST_SCHEDULE:
1173 case OMP_CLAUSE_DEPEND:
1174 case OMP_CLAUSE_PRIORITY:
1175 case OMP_CLAUSE_GRAINSIZE:
1176 case OMP_CLAUSE_NUM_TASKS:
1177 case OMP_CLAUSE__CILK_FOR_COUNT_:
1178 case OMP_CLAUSE_NUM_GANGS:
1179 case OMP_CLAUSE_NUM_WORKERS:
1180 case OMP_CLAUSE_VECTOR_LENGTH:
1181 if (ctx->outer)
1182 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1183 break;
1185 case OMP_CLAUSE_TO:
1186 case OMP_CLAUSE_FROM:
1187 case OMP_CLAUSE_MAP:
1188 if (ctx->outer)
1189 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1190 decl = OMP_CLAUSE_DECL (c);
1191 /* Global variables with "omp declare target" attribute
1192 don't need to be copied, the receiver side will use them
1193 directly. However, global variables with "omp declare target link"
1194 attribute need to be copied. */
1195 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1196 && DECL_P (decl)
1197 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1198 && (OMP_CLAUSE_MAP_KIND (c)
1199 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1200 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1201 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1202 && varpool_node::get_create (decl)->offloadable
1203 && !lookup_attribute ("omp declare target link",
1204 DECL_ATTRIBUTES (decl)))
1205 break;
1206 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1207 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1209 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1210 not offloaded; there is nothing to map for those. */
1211 if (!is_gimple_omp_offloaded (ctx->stmt)
1212 && !POINTER_TYPE_P (TREE_TYPE (decl))
1213 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1214 break;
1216 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1217 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1218 || (OMP_CLAUSE_MAP_KIND (c)
1219 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1221 if (TREE_CODE (decl) == COMPONENT_REF
1222 || (TREE_CODE (decl) == INDIRECT_REF
1223 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1224 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1225 == REFERENCE_TYPE)))
1226 break;
1227 if (DECL_SIZE (decl)
1228 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1230 tree decl2 = DECL_VALUE_EXPR (decl);
1231 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1232 decl2 = TREE_OPERAND (decl2, 0);
1233 gcc_assert (DECL_P (decl2));
1234 install_var_local (decl2, ctx);
1236 install_var_local (decl, ctx);
1237 break;
1239 if (DECL_P (decl))
1241 if (DECL_SIZE (decl)
1242 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1244 tree decl2 = DECL_VALUE_EXPR (decl);
1245 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1246 decl2 = TREE_OPERAND (decl2, 0);
1247 gcc_assert (DECL_P (decl2));
1248 install_var_field (decl2, true, 3, ctx);
1249 install_var_local (decl2, ctx);
1250 install_var_local (decl, ctx);
1252 else
1254 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1255 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1256 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1257 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1258 install_var_field (decl, true, 7, ctx);
1259 else
1260 install_var_field (decl, true, 3, ctx,
1261 base_pointers_restrict);
1262 if (is_gimple_omp_offloaded (ctx->stmt)
1263 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1264 install_var_local (decl, ctx);
1267 else
1269 tree base = get_base_address (decl);
1270 tree nc = OMP_CLAUSE_CHAIN (c);
1271 if (DECL_P (base)
1272 && nc != NULL_TREE
1273 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1274 && OMP_CLAUSE_DECL (nc) == base
1275 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1276 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1278 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1279 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1281 else
1283 if (ctx->outer)
1285 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1286 decl = OMP_CLAUSE_DECL (c);
1288 gcc_assert (!splay_tree_lookup (ctx->field_map,
1289 (splay_tree_key) decl));
1290 tree field
1291 = build_decl (OMP_CLAUSE_LOCATION (c),
1292 FIELD_DECL, NULL_TREE, ptr_type_node);
1293 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1294 insert_field_into_struct (ctx->record_type, field);
1295 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1296 (splay_tree_value) field);
1299 break;
1301 case OMP_CLAUSE__GRIDDIM_:
1302 if (ctx->outer)
1304 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1305 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1307 break;
1309 case OMP_CLAUSE_NOWAIT:
1310 case OMP_CLAUSE_ORDERED:
1311 case OMP_CLAUSE_COLLAPSE:
1312 case OMP_CLAUSE_UNTIED:
1313 case OMP_CLAUSE_MERGEABLE:
1314 case OMP_CLAUSE_PROC_BIND:
1315 case OMP_CLAUSE_SAFELEN:
1316 case OMP_CLAUSE_SIMDLEN:
1317 case OMP_CLAUSE_THREADS:
1318 case OMP_CLAUSE_SIMD:
1319 case OMP_CLAUSE_NOGROUP:
1320 case OMP_CLAUSE_DEFAULTMAP:
1321 case OMP_CLAUSE_ASYNC:
1322 case OMP_CLAUSE_WAIT:
1323 case OMP_CLAUSE_GANG:
1324 case OMP_CLAUSE_WORKER:
1325 case OMP_CLAUSE_VECTOR:
1326 case OMP_CLAUSE_INDEPENDENT:
1327 case OMP_CLAUSE_AUTO:
1328 case OMP_CLAUSE_SEQ:
1329 case OMP_CLAUSE__SIMT_:
1330 break;
1332 case OMP_CLAUSE_ALIGNED:
1333 decl = OMP_CLAUSE_DECL (c);
1334 if (is_global_var (decl)
1335 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1336 install_var_local (decl, ctx);
1337 break;
1339 case OMP_CLAUSE_TILE:
1340 case OMP_CLAUSE__CACHE_:
1341 default:
1342 gcc_unreachable ();
1346 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1348 switch (OMP_CLAUSE_CODE (c))
1350 case OMP_CLAUSE_LASTPRIVATE:
1351 /* Let the corresponding firstprivate clause create
1352 the variable. */
1353 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1354 scan_array_reductions = true;
1355 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1356 break;
1357 /* FALLTHRU */
1359 case OMP_CLAUSE_FIRSTPRIVATE:
1360 case OMP_CLAUSE_PRIVATE:
1361 case OMP_CLAUSE_LINEAR:
1362 case OMP_CLAUSE_IS_DEVICE_PTR:
1363 decl = OMP_CLAUSE_DECL (c);
1364 if (is_variable_sized (decl))
1366 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1367 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1368 && is_gimple_omp_offloaded (ctx->stmt))
1370 tree decl2 = DECL_VALUE_EXPR (decl);
1371 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1372 decl2 = TREE_OPERAND (decl2, 0);
1373 gcc_assert (DECL_P (decl2));
1374 install_var_local (decl2, ctx);
1375 fixup_remapped_decl (decl2, ctx, false);
1377 install_var_local (decl, ctx);
1379 fixup_remapped_decl (decl, ctx,
1380 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1381 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1382 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1383 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1384 scan_array_reductions = true;
1385 break;
1387 case OMP_CLAUSE_REDUCTION:
1388 decl = OMP_CLAUSE_DECL (c);
1389 if (TREE_CODE (decl) != MEM_REF)
1391 if (is_variable_sized (decl))
1392 install_var_local (decl, ctx);
1393 fixup_remapped_decl (decl, ctx, false);
1395 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1396 scan_array_reductions = true;
1397 break;
1399 case OMP_CLAUSE_SHARED:
1400 /* Ignore shared directives in teams construct. */
1401 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1402 break;
1403 decl = OMP_CLAUSE_DECL (c);
1404 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1405 break;
1406 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1408 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1409 ctx->outer)))
1410 break;
1411 bool by_ref = use_pointer_for_field (decl, ctx);
1412 install_var_field (decl, by_ref, 11, ctx);
1413 break;
1415 fixup_remapped_decl (decl, ctx, false);
1416 break;
1418 case OMP_CLAUSE_MAP:
1419 if (!is_gimple_omp_offloaded (ctx->stmt))
1420 break;
1421 decl = OMP_CLAUSE_DECL (c);
1422 if (DECL_P (decl)
1423 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1424 && (OMP_CLAUSE_MAP_KIND (c)
1425 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1426 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1427 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1428 && varpool_node::get_create (decl)->offloadable)
1429 break;
1430 if (DECL_P (decl))
1432 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1433 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1434 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1435 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1437 tree new_decl = lookup_decl (decl, ctx);
1438 TREE_TYPE (new_decl)
1439 = remap_type (TREE_TYPE (decl), &ctx->cb);
1441 else if (DECL_SIZE (decl)
1442 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1444 tree decl2 = DECL_VALUE_EXPR (decl);
1445 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1446 decl2 = TREE_OPERAND (decl2, 0);
1447 gcc_assert (DECL_P (decl2));
1448 fixup_remapped_decl (decl2, ctx, false);
1449 fixup_remapped_decl (decl, ctx, true);
1451 else
1452 fixup_remapped_decl (decl, ctx, false);
1454 break;
1456 case OMP_CLAUSE_COPYPRIVATE:
1457 case OMP_CLAUSE_COPYIN:
1458 case OMP_CLAUSE_DEFAULT:
1459 case OMP_CLAUSE_IF:
1460 case OMP_CLAUSE_NUM_THREADS:
1461 case OMP_CLAUSE_NUM_TEAMS:
1462 case OMP_CLAUSE_THREAD_LIMIT:
1463 case OMP_CLAUSE_DEVICE:
1464 case OMP_CLAUSE_SCHEDULE:
1465 case OMP_CLAUSE_DIST_SCHEDULE:
1466 case OMP_CLAUSE_NOWAIT:
1467 case OMP_CLAUSE_ORDERED:
1468 case OMP_CLAUSE_COLLAPSE:
1469 case OMP_CLAUSE_UNTIED:
1470 case OMP_CLAUSE_FINAL:
1471 case OMP_CLAUSE_MERGEABLE:
1472 case OMP_CLAUSE_PROC_BIND:
1473 case OMP_CLAUSE_SAFELEN:
1474 case OMP_CLAUSE_SIMDLEN:
1475 case OMP_CLAUSE_ALIGNED:
1476 case OMP_CLAUSE_DEPEND:
1477 case OMP_CLAUSE__LOOPTEMP_:
1478 case OMP_CLAUSE_TO:
1479 case OMP_CLAUSE_FROM:
1480 case OMP_CLAUSE_PRIORITY:
1481 case OMP_CLAUSE_GRAINSIZE:
1482 case OMP_CLAUSE_NUM_TASKS:
1483 case OMP_CLAUSE_THREADS:
1484 case OMP_CLAUSE_SIMD:
1485 case OMP_CLAUSE_NOGROUP:
1486 case OMP_CLAUSE_DEFAULTMAP:
1487 case OMP_CLAUSE_USE_DEVICE_PTR:
1488 case OMP_CLAUSE__CILK_FOR_COUNT_:
1489 case OMP_CLAUSE_ASYNC:
1490 case OMP_CLAUSE_WAIT:
1491 case OMP_CLAUSE_NUM_GANGS:
1492 case OMP_CLAUSE_NUM_WORKERS:
1493 case OMP_CLAUSE_VECTOR_LENGTH:
1494 case OMP_CLAUSE_GANG:
1495 case OMP_CLAUSE_WORKER:
1496 case OMP_CLAUSE_VECTOR:
1497 case OMP_CLAUSE_INDEPENDENT:
1498 case OMP_CLAUSE_AUTO:
1499 case OMP_CLAUSE_SEQ:
1500 case OMP_CLAUSE__GRIDDIM_:
1501 case OMP_CLAUSE__SIMT_:
1502 break;
1504 case OMP_CLAUSE_TILE:
1505 case OMP_CLAUSE__CACHE_:
1506 default:
1507 gcc_unreachable ();
1511 gcc_checking_assert (!scan_array_reductions
1512 || !is_gimple_omp_oacc (ctx->stmt));
1513 if (scan_array_reductions)
1515 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1516 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1517 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1519 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1520 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1522 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1523 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1524 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1525 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1526 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1527 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1531 /* Create a new name for omp child function. Returns an identifier. If
1532 IS_CILK_FOR is true then the suffix for the child function is
1533 "_cilk_for_fn." */
1535 static tree
1536 create_omp_child_function_name (bool task_copy, bool is_cilk_for)
1538 if (is_cilk_for)
1539 return clone_function_name (current_function_decl, "_cilk_for_fn");
1540 return clone_function_name (current_function_decl,
1541 task_copy ? "_omp_cpyfn" : "_omp_fn");
1544 /* Returns the type of the induction variable for the child function for
1545 _Cilk_for and the types for _high and _low variables based on TYPE. */
1547 static tree
1548 cilk_for_check_loop_diff_type (tree type)
1550 if (TYPE_PRECISION (type) <= TYPE_PRECISION (uint32_type_node))
1552 if (TYPE_UNSIGNED (type))
1553 return uint32_type_node;
1554 else
1555 return integer_type_node;
1557 else
1559 if (TYPE_UNSIGNED (type))
1560 return uint64_type_node;
1561 else
1562 return long_long_integer_type_node;
1566 /* Return true if CTX may belong to offloaded code: either if current function
1567 is offloaded, or any enclosing context corresponds to a target region. */
1569 static bool
1570 omp_maybe_offloaded_ctx (omp_context *ctx)
1572 if (cgraph_node::get (current_function_decl)->offloadable)
1573 return true;
1574 for (; ctx; ctx = ctx->outer)
1575 if (is_gimple_omp_offloaded (ctx->stmt))
1576 return true;
1577 return false;
1580 /* Build a decl for the omp child function. It'll not contain a body
1581 yet, just the bare decl. */
1583 static void
1584 create_omp_child_function (omp_context *ctx, bool task_copy)
1586 tree decl, type, name, t;
1588 tree cilk_for_count
1589 = (flag_cilkplus && gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1590 ? omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
1591 OMP_CLAUSE__CILK_FOR_COUNT_) : NULL_TREE;
1592 tree cilk_var_type = NULL_TREE;
1594 name = create_omp_child_function_name (task_copy,
1595 cilk_for_count != NULL_TREE);
1596 if (task_copy)
1597 type = build_function_type_list (void_type_node, ptr_type_node,
1598 ptr_type_node, NULL_TREE);
1599 else if (cilk_for_count)
1601 type = TREE_TYPE (OMP_CLAUSE_OPERAND (cilk_for_count, 0));
1602 cilk_var_type = cilk_for_check_loop_diff_type (type);
1603 type = build_function_type_list (void_type_node, ptr_type_node,
1604 cilk_var_type, cilk_var_type, NULL_TREE);
1606 else
1607 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1609 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1611 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1612 || !task_copy);
1613 if (!task_copy)
1614 ctx->cb.dst_fn = decl;
1615 else
1616 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1618 TREE_STATIC (decl) = 1;
1619 TREE_USED (decl) = 1;
1620 DECL_ARTIFICIAL (decl) = 1;
1621 DECL_IGNORED_P (decl) = 0;
1622 TREE_PUBLIC (decl) = 0;
1623 DECL_UNINLINABLE (decl) = 1;
1624 DECL_EXTERNAL (decl) = 0;
1625 DECL_CONTEXT (decl) = NULL_TREE;
1626 DECL_INITIAL (decl) = make_node (BLOCK);
1627 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1628 if (omp_maybe_offloaded_ctx (ctx))
1630 cgraph_node::get_create (decl)->offloadable = 1;
1631 if (ENABLE_OFFLOADING)
1632 g->have_offload = true;
1635 if (cgraph_node::get_create (decl)->offloadable
1636 && !lookup_attribute ("omp declare target",
1637 DECL_ATTRIBUTES (current_function_decl)))
1639 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1640 ? "omp target entrypoint"
1641 : "omp declare target");
1642 DECL_ATTRIBUTES (decl)
1643 = tree_cons (get_identifier (target_attr),
1644 NULL_TREE, DECL_ATTRIBUTES (decl));
1647 t = build_decl (DECL_SOURCE_LOCATION (decl),
1648 RESULT_DECL, NULL_TREE, void_type_node);
1649 DECL_ARTIFICIAL (t) = 1;
1650 DECL_IGNORED_P (t) = 1;
1651 DECL_CONTEXT (t) = decl;
1652 DECL_RESULT (decl) = t;
1654 /* _Cilk_for's child function requires two extra parameters called
1655 __low and __high that are set the by Cilk runtime when it calls this
1656 function. */
1657 if (cilk_for_count)
1659 t = build_decl (DECL_SOURCE_LOCATION (decl),
1660 PARM_DECL, get_identifier ("__high"), cilk_var_type);
1661 DECL_ARTIFICIAL (t) = 1;
1662 DECL_NAMELESS (t) = 1;
1663 DECL_ARG_TYPE (t) = ptr_type_node;
1664 DECL_CONTEXT (t) = current_function_decl;
1665 TREE_USED (t) = 1;
1666 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1667 DECL_ARGUMENTS (decl) = t;
1669 t = build_decl (DECL_SOURCE_LOCATION (decl),
1670 PARM_DECL, get_identifier ("__low"), cilk_var_type);
1671 DECL_ARTIFICIAL (t) = 1;
1672 DECL_NAMELESS (t) = 1;
1673 DECL_ARG_TYPE (t) = ptr_type_node;
1674 DECL_CONTEXT (t) = current_function_decl;
1675 TREE_USED (t) = 1;
1676 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1677 DECL_ARGUMENTS (decl) = t;
1680 tree data_name = get_identifier (".omp_data_i");
1681 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1682 ptr_type_node);
1683 DECL_ARTIFICIAL (t) = 1;
1684 DECL_NAMELESS (t) = 1;
1685 DECL_ARG_TYPE (t) = ptr_type_node;
1686 DECL_CONTEXT (t) = current_function_decl;
1687 TREE_USED (t) = 1;
1688 TREE_READONLY (t) = 1;
1689 if (cilk_for_count)
1690 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1691 DECL_ARGUMENTS (decl) = t;
1692 if (!task_copy)
1693 ctx->receiver_decl = t;
1694 else
1696 t = build_decl (DECL_SOURCE_LOCATION (decl),
1697 PARM_DECL, get_identifier (".omp_data_o"),
1698 ptr_type_node);
1699 DECL_ARTIFICIAL (t) = 1;
1700 DECL_NAMELESS (t) = 1;
1701 DECL_ARG_TYPE (t) = ptr_type_node;
1702 DECL_CONTEXT (t) = current_function_decl;
1703 TREE_USED (t) = 1;
1704 TREE_ADDRESSABLE (t) = 1;
1705 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1706 DECL_ARGUMENTS (decl) = t;
1709 /* Allocate memory for the function structure. The call to
1710 allocate_struct_function clobbers CFUN, so we need to restore
1711 it afterward. */
1712 push_struct_function (decl);
1713 cfun->function_end_locus = gimple_location (ctx->stmt);
1714 init_tree_ssa (cfun);
1715 pop_cfun ();
1718 /* Callback for walk_gimple_seq. Check if combined parallel
1719 contains gimple_omp_for_combined_into_p OMP_FOR. */
1721 tree
1722 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1723 bool *handled_ops_p,
1724 struct walk_stmt_info *wi)
1726 gimple *stmt = gsi_stmt (*gsi_p);
1728 *handled_ops_p = true;
1729 switch (gimple_code (stmt))
1731 WALK_SUBSTMTS;
1733 case GIMPLE_OMP_FOR:
1734 if (gimple_omp_for_combined_into_p (stmt)
1735 && gimple_omp_for_kind (stmt)
1736 == *(const enum gf_mask *) (wi->info))
1738 wi->info = stmt;
1739 return integer_zero_node;
1741 break;
1742 default:
1743 break;
1745 return NULL;
1748 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1750 static void
1751 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1752 omp_context *outer_ctx)
1754 struct walk_stmt_info wi;
1756 memset (&wi, 0, sizeof (wi));
1757 wi.val_only = true;
1758 wi.info = (void *) &msk;
1759 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1760 if (wi.info != (void *) &msk)
1762 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1763 struct omp_for_data fd;
1764 omp_extract_for_data (for_stmt, &fd, NULL);
1765 /* We need two temporaries with fd.loop.v type (istart/iend)
1766 and then (fd.collapse - 1) temporaries with the same
1767 type for count2 ... countN-1 vars if not constant. */
1768 size_t count = 2, i;
1769 tree type = fd.iter_type;
1770 if (fd.collapse > 1
1771 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1773 count += fd.collapse - 1;
1774 /* If there are lastprivate clauses on the inner
1775 GIMPLE_OMP_FOR, add one more temporaries for the total number
1776 of iterations (product of count1 ... countN-1). */
1777 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1778 OMP_CLAUSE_LASTPRIVATE))
1779 count++;
1780 else if (msk == GF_OMP_FOR_KIND_FOR
1781 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1782 OMP_CLAUSE_LASTPRIVATE))
1783 count++;
1785 for (i = 0; i < count; i++)
1787 tree temp = create_tmp_var (type);
1788 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1789 insert_decl_map (&outer_ctx->cb, temp, temp);
1790 OMP_CLAUSE_DECL (c) = temp;
1791 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1792 gimple_omp_taskreg_set_clauses (stmt, c);
1797 /* Scan an OpenMP parallel directive. */
1799 static void
1800 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1802 omp_context *ctx;
1803 tree name;
1804 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1806 /* Ignore parallel directives with empty bodies, unless there
1807 are copyin clauses. */
1808 if (optimize > 0
1809 && empty_body_p (gimple_omp_body (stmt))
1810 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1811 OMP_CLAUSE_COPYIN) == NULL)
1813 gsi_replace (gsi, gimple_build_nop (), false);
1814 return;
1817 if (gimple_omp_parallel_combined_p (stmt))
1818 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1820 ctx = new_omp_context (stmt, outer_ctx);
1821 taskreg_contexts.safe_push (ctx);
1822 if (taskreg_nesting_level > 1)
1823 ctx->is_nested = true;
1824 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1825 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1826 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1827 name = create_tmp_var_name (".omp_data_s");
1828 name = build_decl (gimple_location (stmt),
1829 TYPE_DECL, name, ctx->record_type);
1830 DECL_ARTIFICIAL (name) = 1;
1831 DECL_NAMELESS (name) = 1;
1832 TYPE_NAME (ctx->record_type) = name;
1833 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1834 if (!gimple_omp_parallel_grid_phony (stmt))
1836 create_omp_child_function (ctx, false);
1837 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1840 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1841 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1843 if (TYPE_FIELDS (ctx->record_type) == NULL)
1844 ctx->record_type = ctx->receiver_decl = NULL;
1847 /* Scan an OpenMP task directive. */
1849 static void
1850 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1852 omp_context *ctx;
1853 tree name, t;
1854 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1856 /* Ignore task directives with empty bodies. */
1857 if (optimize > 0
1858 && empty_body_p (gimple_omp_body (stmt)))
1860 gsi_replace (gsi, gimple_build_nop (), false);
1861 return;
1864 if (gimple_omp_task_taskloop_p (stmt))
1865 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1867 ctx = new_omp_context (stmt, outer_ctx);
1868 taskreg_contexts.safe_push (ctx);
1869 if (taskreg_nesting_level > 1)
1870 ctx->is_nested = true;
1871 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1872 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1873 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1874 name = create_tmp_var_name (".omp_data_s");
1875 name = build_decl (gimple_location (stmt),
1876 TYPE_DECL, name, ctx->record_type);
1877 DECL_ARTIFICIAL (name) = 1;
1878 DECL_NAMELESS (name) = 1;
1879 TYPE_NAME (ctx->record_type) = name;
1880 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1881 create_omp_child_function (ctx, false);
1882 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1884 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1886 if (ctx->srecord_type)
1888 name = create_tmp_var_name (".omp_data_a");
1889 name = build_decl (gimple_location (stmt),
1890 TYPE_DECL, name, ctx->srecord_type);
1891 DECL_ARTIFICIAL (name) = 1;
1892 DECL_NAMELESS (name) = 1;
1893 TYPE_NAME (ctx->srecord_type) = name;
1894 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1895 create_omp_child_function (ctx, true);
1898 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1900 if (TYPE_FIELDS (ctx->record_type) == NULL)
1902 ctx->record_type = ctx->receiver_decl = NULL;
1903 t = build_int_cst (long_integer_type_node, 0);
1904 gimple_omp_task_set_arg_size (stmt, t);
1905 t = build_int_cst (long_integer_type_node, 1);
1906 gimple_omp_task_set_arg_align (stmt, t);
1911 /* If any decls have been made addressable during scan_omp,
1912 adjust their fields if needed, and layout record types
1913 of parallel/task constructs. */
1915 static void
1916 finish_taskreg_scan (omp_context *ctx)
1918 if (ctx->record_type == NULL_TREE)
1919 return;
1921 /* If any task_shared_vars were needed, verify all
1922 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1923 statements if use_pointer_for_field hasn't changed
1924 because of that. If it did, update field types now. */
1925 if (task_shared_vars)
1927 tree c;
1929 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1930 c; c = OMP_CLAUSE_CHAIN (c))
1931 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1932 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1934 tree decl = OMP_CLAUSE_DECL (c);
1936 /* Global variables don't need to be copied,
1937 the receiver side will use them directly. */
1938 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1939 continue;
1940 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1941 || !use_pointer_for_field (decl, ctx))
1942 continue;
1943 tree field = lookup_field (decl, ctx);
1944 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1945 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1946 continue;
1947 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1948 TREE_THIS_VOLATILE (field) = 0;
1949 DECL_USER_ALIGN (field) = 0;
1950 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1951 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1952 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1953 if (ctx->srecord_type)
1955 tree sfield = lookup_sfield (decl, ctx);
1956 TREE_TYPE (sfield) = TREE_TYPE (field);
1957 TREE_THIS_VOLATILE (sfield) = 0;
1958 DECL_USER_ALIGN (sfield) = 0;
1959 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1960 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1961 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1966 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1968 layout_type (ctx->record_type);
1969 fixup_child_record_type (ctx);
1971 else
1973 location_t loc = gimple_location (ctx->stmt);
1974 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1975 /* Move VLA fields to the end. */
1976 p = &TYPE_FIELDS (ctx->record_type);
1977 while (*p)
1978 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1979 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1981 *q = *p;
1982 *p = TREE_CHAIN (*p);
1983 TREE_CHAIN (*q) = NULL_TREE;
1984 q = &TREE_CHAIN (*q);
1986 else
1987 p = &DECL_CHAIN (*p);
1988 *p = vla_fields;
1989 if (gimple_omp_task_taskloop_p (ctx->stmt))
1991 /* Move fields corresponding to first and second _looptemp_
1992 clause first. There are filled by GOMP_taskloop
1993 and thus need to be in specific positions. */
1994 tree c1 = gimple_omp_task_clauses (ctx->stmt);
1995 c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
1996 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
1997 OMP_CLAUSE__LOOPTEMP_);
1998 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
1999 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2000 p = &TYPE_FIELDS (ctx->record_type);
2001 while (*p)
2002 if (*p == f1 || *p == f2)
2003 *p = DECL_CHAIN (*p);
2004 else
2005 p = &DECL_CHAIN (*p);
2006 DECL_CHAIN (f1) = f2;
2007 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2008 TYPE_FIELDS (ctx->record_type) = f1;
2009 if (ctx->srecord_type)
2011 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2012 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2013 p = &TYPE_FIELDS (ctx->srecord_type);
2014 while (*p)
2015 if (*p == f1 || *p == f2)
2016 *p = DECL_CHAIN (*p);
2017 else
2018 p = &DECL_CHAIN (*p);
2019 DECL_CHAIN (f1) = f2;
2020 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2021 TYPE_FIELDS (ctx->srecord_type) = f1;
2024 layout_type (ctx->record_type);
2025 fixup_child_record_type (ctx);
2026 if (ctx->srecord_type)
2027 layout_type (ctx->srecord_type);
2028 tree t = fold_convert_loc (loc, long_integer_type_node,
2029 TYPE_SIZE_UNIT (ctx->record_type));
2030 gimple_omp_task_set_arg_size (ctx->stmt, t);
2031 t = build_int_cst (long_integer_type_node,
2032 TYPE_ALIGN_UNIT (ctx->record_type));
2033 gimple_omp_task_set_arg_align (ctx->stmt, t);
2037 /* Find the enclosing offload context. */
2039 static omp_context *
2040 enclosing_target_ctx (omp_context *ctx)
2042 for (; ctx; ctx = ctx->outer)
2043 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2044 break;
2046 return ctx;
2049 /* Return true if ctx is part of an oacc kernels region. */
2051 static bool
2052 ctx_in_oacc_kernels_region (omp_context *ctx)
2054 for (;ctx != NULL; ctx = ctx->outer)
2056 gimple *stmt = ctx->stmt;
2057 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2058 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2059 return true;
2062 return false;
2065 /* Check the parallelism clauses inside a kernels regions.
2066 Until kernels handling moves to use the same loop indirection
2067 scheme as parallel, we need to do this checking early. */
2069 static unsigned
2070 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2072 bool checking = true;
2073 unsigned outer_mask = 0;
2074 unsigned this_mask = 0;
2075 bool has_seq = false, has_auto = false;
2077 if (ctx->outer)
2078 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2079 if (!stmt)
2081 checking = false;
2082 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2083 return outer_mask;
2084 stmt = as_a <gomp_for *> (ctx->stmt);
2087 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2089 switch (OMP_CLAUSE_CODE (c))
2091 case OMP_CLAUSE_GANG:
2092 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2093 break;
2094 case OMP_CLAUSE_WORKER:
2095 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2096 break;
2097 case OMP_CLAUSE_VECTOR:
2098 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2099 break;
2100 case OMP_CLAUSE_SEQ:
2101 has_seq = true;
2102 break;
2103 case OMP_CLAUSE_AUTO:
2104 has_auto = true;
2105 break;
2106 default:
2107 break;
2111 if (checking)
2113 if (has_seq && (this_mask || has_auto))
2114 error_at (gimple_location (stmt), "%<seq%> overrides other"
2115 " OpenACC loop specifiers");
2116 else if (has_auto && this_mask)
2117 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2118 " OpenACC loop specifiers");
2120 if (this_mask & outer_mask)
2121 error_at (gimple_location (stmt), "inner loop uses same"
2122 " OpenACC parallelism as containing loop");
2125 return outer_mask | this_mask;
2128 /* Scan a GIMPLE_OMP_FOR. */
2130 static void
2131 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2133 omp_context *ctx;
2134 size_t i;
2135 tree clauses = gimple_omp_for_clauses (stmt);
2137 ctx = new_omp_context (stmt, outer_ctx);
2139 if (is_gimple_omp_oacc (stmt))
2141 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2143 if (!tgt || is_oacc_parallel (tgt))
2144 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2146 char const *check = NULL;
2148 switch (OMP_CLAUSE_CODE (c))
2150 case OMP_CLAUSE_GANG:
2151 check = "gang";
2152 break;
2154 case OMP_CLAUSE_WORKER:
2155 check = "worker";
2156 break;
2158 case OMP_CLAUSE_VECTOR:
2159 check = "vector";
2160 break;
2162 default:
2163 break;
2166 if (check && OMP_CLAUSE_OPERAND (c, 0))
2167 error_at (gimple_location (stmt),
2168 "argument not permitted on %qs clause in"
2169 " OpenACC %<parallel%>", check);
2172 if (tgt && is_oacc_kernels (tgt))
2174 /* Strip out reductions, as they are not handled yet. */
2175 tree *prev_ptr = &clauses;
2177 while (tree probe = *prev_ptr)
2179 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2181 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2182 *prev_ptr = *next_ptr;
2183 else
2184 prev_ptr = next_ptr;
2187 gimple_omp_for_set_clauses (stmt, clauses);
2188 check_oacc_kernel_gwv (stmt, ctx);
2192 scan_sharing_clauses (clauses, ctx);
2194 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2195 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2197 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2198 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2199 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2200 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2202 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2205 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2207 static void
2208 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2209 omp_context *outer_ctx)
2211 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2212 gsi_replace (gsi, bind, false);
2213 gimple_seq seq = NULL;
2214 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2215 tree cond = create_tmp_var_raw (integer_type_node);
2216 DECL_CONTEXT (cond) = current_function_decl;
2217 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2218 gimple_bind_set_vars (bind, cond);
2219 gimple_call_set_lhs (g, cond);
2220 gimple_seq_add_stmt (&seq, g);
2221 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2222 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2223 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2224 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2225 gimple_seq_add_stmt (&seq, g);
2226 g = gimple_build_label (lab1);
2227 gimple_seq_add_stmt (&seq, g);
2228 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2229 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2230 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2231 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2232 gimple_omp_for_set_clauses (new_stmt, clause);
2233 gimple_seq_add_stmt (&seq, new_stmt);
2234 g = gimple_build_goto (lab3);
2235 gimple_seq_add_stmt (&seq, g);
2236 g = gimple_build_label (lab2);
2237 gimple_seq_add_stmt (&seq, g);
2238 gimple_seq_add_stmt (&seq, stmt);
2239 g = gimple_build_label (lab3);
2240 gimple_seq_add_stmt (&seq, g);
2241 gimple_bind_set_body (bind, seq);
2242 update_stmt (bind);
2243 scan_omp_for (new_stmt, outer_ctx);
2244 scan_omp_for (stmt, outer_ctx);
2247 /* Scan an OpenMP sections directive. */
2249 static void
2250 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2252 omp_context *ctx;
2254 ctx = new_omp_context (stmt, outer_ctx);
2255 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2256 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2259 /* Scan an OpenMP single directive. */
2261 static void
2262 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2264 omp_context *ctx;
2265 tree name;
2267 ctx = new_omp_context (stmt, outer_ctx);
2268 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2269 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2270 name = create_tmp_var_name (".omp_copy_s");
2271 name = build_decl (gimple_location (stmt),
2272 TYPE_DECL, name, ctx->record_type);
2273 TYPE_NAME (ctx->record_type) = name;
2275 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2276 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2278 if (TYPE_FIELDS (ctx->record_type) == NULL)
2279 ctx->record_type = NULL;
2280 else
2281 layout_type (ctx->record_type);
2284 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2285 used in the corresponding offloaded function are restrict. */
2287 static bool
2288 omp_target_base_pointers_restrict_p (tree clauses)
2290 /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2291 used by OpenACC. */
2292 if (flag_openacc == 0)
2293 return false;
2295 /* I. Basic example:
2297 void foo (void)
2299 unsigned int a[2], b[2];
2301 #pragma acc kernels \
2302 copyout (a) \
2303 copyout (b)
2305 a[0] = 0;
2306 b[0] = 1;
2310 After gimplification, we have:
2312 #pragma omp target oacc_kernels \
2313 map(force_from:a [len: 8]) \
2314 map(force_from:b [len: 8])
2316 a[0] = 0;
2317 b[0] = 1;
2320 Because both mappings have the force prefix, we know that they will be
2321 allocated when calling the corresponding offloaded function, which means we
2322 can mark the base pointers for a and b in the offloaded function as
2323 restrict. */
2325 tree c;
2326 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2328 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
2329 return false;
2331 switch (OMP_CLAUSE_MAP_KIND (c))
2333 case GOMP_MAP_FORCE_ALLOC:
2334 case GOMP_MAP_FORCE_TO:
2335 case GOMP_MAP_FORCE_FROM:
2336 case GOMP_MAP_FORCE_TOFROM:
2337 break;
2338 default:
2339 return false;
2343 return true;
2346 /* Scan a GIMPLE_OMP_TARGET. */
2348 static void
2349 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2351 omp_context *ctx;
2352 tree name;
2353 bool offloaded = is_gimple_omp_offloaded (stmt);
2354 tree clauses = gimple_omp_target_clauses (stmt);
2356 ctx = new_omp_context (stmt, outer_ctx);
2357 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2358 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
2359 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2360 name = create_tmp_var_name (".omp_data_t");
2361 name = build_decl (gimple_location (stmt),
2362 TYPE_DECL, name, ctx->record_type);
2363 DECL_ARTIFICIAL (name) = 1;
2364 DECL_NAMELESS (name) = 1;
2365 TYPE_NAME (ctx->record_type) = name;
2366 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2368 bool base_pointers_restrict = false;
2369 if (offloaded)
2371 create_omp_child_function (ctx, false);
2372 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2374 base_pointers_restrict = omp_target_base_pointers_restrict_p (clauses);
2375 if (base_pointers_restrict
2376 && dump_file && (dump_flags & TDF_DETAILS))
2377 fprintf (dump_file,
2378 "Base pointers in offloaded function are restrict\n");
2381 scan_sharing_clauses (clauses, ctx, base_pointers_restrict);
2382 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2384 if (TYPE_FIELDS (ctx->record_type) == NULL)
2385 ctx->record_type = ctx->receiver_decl = NULL;
2386 else
2388 TYPE_FIELDS (ctx->record_type)
2389 = nreverse (TYPE_FIELDS (ctx->record_type));
2390 if (flag_checking)
2392 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2393 for (tree field = TYPE_FIELDS (ctx->record_type);
2394 field;
2395 field = DECL_CHAIN (field))
2396 gcc_assert (DECL_ALIGN (field) == align);
2398 layout_type (ctx->record_type);
2399 if (offloaded)
2400 fixup_child_record_type (ctx);
2404 /* Scan an OpenMP teams directive. */
2406 static void
2407 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2409 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2410 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2411 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2414 /* Check nesting restrictions. */
2415 static bool
2416 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2418 tree c;
2420 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2421 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2422 the original copy of its contents. */
2423 return true;
2425 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2426 inside an OpenACC CTX. */
2427 if (!(is_gimple_omp (stmt)
2428 && is_gimple_omp_oacc (stmt))
2429 /* Except for atomic codes that we share with OpenMP. */
2430 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2431 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2433 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2435 error_at (gimple_location (stmt),
2436 "non-OpenACC construct inside of OpenACC routine");
2437 return false;
2439 else
2440 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2441 if (is_gimple_omp (octx->stmt)
2442 && is_gimple_omp_oacc (octx->stmt))
2444 error_at (gimple_location (stmt),
2445 "non-OpenACC construct inside of OpenACC region");
2446 return false;
2450 if (ctx != NULL)
2452 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2453 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2455 c = NULL_TREE;
2456 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2458 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2459 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2461 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2462 && (ctx->outer == NULL
2463 || !gimple_omp_for_combined_into_p (ctx->stmt)
2464 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2465 || (gimple_omp_for_kind (ctx->outer->stmt)
2466 != GF_OMP_FOR_KIND_FOR)
2467 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2469 error_at (gimple_location (stmt),
2470 "%<ordered simd threads%> must be closely "
2471 "nested inside of %<for simd%> region");
2472 return false;
2474 return true;
2477 error_at (gimple_location (stmt),
2478 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2479 " may not be nested inside %<simd%> region");
2480 return false;
2482 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2484 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2485 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2486 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2487 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2489 error_at (gimple_location (stmt),
2490 "only %<distribute%> or %<parallel%> regions are "
2491 "allowed to be strictly nested inside %<teams%> "
2492 "region");
2493 return false;
2497 switch (gimple_code (stmt))
2499 case GIMPLE_OMP_FOR:
2500 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2501 return true;
2502 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2504 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2506 error_at (gimple_location (stmt),
2507 "%<distribute%> region must be strictly nested "
2508 "inside %<teams%> construct");
2509 return false;
2511 return true;
2513 /* We split taskloop into task and nested taskloop in it. */
2514 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2515 return true;
2516 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2518 bool ok = false;
2520 if (ctx)
2521 switch (gimple_code (ctx->stmt))
2523 case GIMPLE_OMP_FOR:
2524 ok = (gimple_omp_for_kind (ctx->stmt)
2525 == GF_OMP_FOR_KIND_OACC_LOOP);
2526 break;
2528 case GIMPLE_OMP_TARGET:
2529 switch (gimple_omp_target_kind (ctx->stmt))
2531 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2532 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2533 ok = true;
2534 break;
2536 default:
2537 break;
2540 default:
2541 break;
2543 else if (oacc_get_fn_attrib (current_function_decl))
2544 ok = true;
2545 if (!ok)
2547 error_at (gimple_location (stmt),
2548 "OpenACC loop directive must be associated with"
2549 " an OpenACC compute region");
2550 return false;
2553 /* FALLTHRU */
2554 case GIMPLE_CALL:
2555 if (is_gimple_call (stmt)
2556 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2557 == BUILT_IN_GOMP_CANCEL
2558 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2559 == BUILT_IN_GOMP_CANCELLATION_POINT))
2561 const char *bad = NULL;
2562 const char *kind = NULL;
2563 const char *construct
2564 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2565 == BUILT_IN_GOMP_CANCEL)
2566 ? "#pragma omp cancel"
2567 : "#pragma omp cancellation point";
2568 if (ctx == NULL)
2570 error_at (gimple_location (stmt), "orphaned %qs construct",
2571 construct);
2572 return false;
2574 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2575 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2576 : 0)
2578 case 1:
2579 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2580 bad = "#pragma omp parallel";
2581 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2582 == BUILT_IN_GOMP_CANCEL
2583 && !integer_zerop (gimple_call_arg (stmt, 1)))
2584 ctx->cancellable = true;
2585 kind = "parallel";
2586 break;
2587 case 2:
2588 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2589 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2590 bad = "#pragma omp for";
2591 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2592 == BUILT_IN_GOMP_CANCEL
2593 && !integer_zerop (gimple_call_arg (stmt, 1)))
2595 ctx->cancellable = true;
2596 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2597 OMP_CLAUSE_NOWAIT))
2598 warning_at (gimple_location (stmt), 0,
2599 "%<#pragma omp cancel for%> inside "
2600 "%<nowait%> for construct");
2601 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2602 OMP_CLAUSE_ORDERED))
2603 warning_at (gimple_location (stmt), 0,
2604 "%<#pragma omp cancel for%> inside "
2605 "%<ordered%> for construct");
2607 kind = "for";
2608 break;
2609 case 4:
2610 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2611 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2612 bad = "#pragma omp sections";
2613 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2614 == BUILT_IN_GOMP_CANCEL
2615 && !integer_zerop (gimple_call_arg (stmt, 1)))
2617 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2619 ctx->cancellable = true;
2620 if (omp_find_clause (gimple_omp_sections_clauses
2621 (ctx->stmt),
2622 OMP_CLAUSE_NOWAIT))
2623 warning_at (gimple_location (stmt), 0,
2624 "%<#pragma omp cancel sections%> inside "
2625 "%<nowait%> sections construct");
2627 else
2629 gcc_assert (ctx->outer
2630 && gimple_code (ctx->outer->stmt)
2631 == GIMPLE_OMP_SECTIONS);
2632 ctx->outer->cancellable = true;
2633 if (omp_find_clause (gimple_omp_sections_clauses
2634 (ctx->outer->stmt),
2635 OMP_CLAUSE_NOWAIT))
2636 warning_at (gimple_location (stmt), 0,
2637 "%<#pragma omp cancel sections%> inside "
2638 "%<nowait%> sections construct");
2641 kind = "sections";
2642 break;
2643 case 8:
2644 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2645 bad = "#pragma omp task";
2646 else
2648 for (omp_context *octx = ctx->outer;
2649 octx; octx = octx->outer)
2651 switch (gimple_code (octx->stmt))
2653 case GIMPLE_OMP_TASKGROUP:
2654 break;
2655 case GIMPLE_OMP_TARGET:
2656 if (gimple_omp_target_kind (octx->stmt)
2657 != GF_OMP_TARGET_KIND_REGION)
2658 continue;
2659 /* FALLTHRU */
2660 case GIMPLE_OMP_PARALLEL:
2661 case GIMPLE_OMP_TEAMS:
2662 error_at (gimple_location (stmt),
2663 "%<%s taskgroup%> construct not closely "
2664 "nested inside of %<taskgroup%> region",
2665 construct);
2666 return false;
2667 default:
2668 continue;
2670 break;
2672 ctx->cancellable = true;
2674 kind = "taskgroup";
2675 break;
2676 default:
2677 error_at (gimple_location (stmt), "invalid arguments");
2678 return false;
2680 if (bad)
2682 error_at (gimple_location (stmt),
2683 "%<%s %s%> construct not closely nested inside of %qs",
2684 construct, kind, bad);
2685 return false;
2688 /* FALLTHRU */
2689 case GIMPLE_OMP_SECTIONS:
2690 case GIMPLE_OMP_SINGLE:
2691 for (; ctx != NULL; ctx = ctx->outer)
2692 switch (gimple_code (ctx->stmt))
2694 case GIMPLE_OMP_FOR:
2695 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2696 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2697 break;
2698 /* FALLTHRU */
2699 case GIMPLE_OMP_SECTIONS:
2700 case GIMPLE_OMP_SINGLE:
2701 case GIMPLE_OMP_ORDERED:
2702 case GIMPLE_OMP_MASTER:
2703 case GIMPLE_OMP_TASK:
2704 case GIMPLE_OMP_CRITICAL:
2705 if (is_gimple_call (stmt))
2707 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2708 != BUILT_IN_GOMP_BARRIER)
2709 return true;
2710 error_at (gimple_location (stmt),
2711 "barrier region may not be closely nested inside "
2712 "of work-sharing, %<critical%>, %<ordered%>, "
2713 "%<master%>, explicit %<task%> or %<taskloop%> "
2714 "region");
2715 return false;
2717 error_at (gimple_location (stmt),
2718 "work-sharing region may not be closely nested inside "
2719 "of work-sharing, %<critical%>, %<ordered%>, "
2720 "%<master%>, explicit %<task%> or %<taskloop%> region");
2721 return false;
2722 case GIMPLE_OMP_PARALLEL:
2723 case GIMPLE_OMP_TEAMS:
2724 return true;
2725 case GIMPLE_OMP_TARGET:
2726 if (gimple_omp_target_kind (ctx->stmt)
2727 == GF_OMP_TARGET_KIND_REGION)
2728 return true;
2729 break;
2730 default:
2731 break;
2733 break;
2734 case GIMPLE_OMP_MASTER:
2735 for (; ctx != NULL; ctx = ctx->outer)
2736 switch (gimple_code (ctx->stmt))
2738 case GIMPLE_OMP_FOR:
2739 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2740 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2741 break;
2742 /* FALLTHRU */
2743 case GIMPLE_OMP_SECTIONS:
2744 case GIMPLE_OMP_SINGLE:
2745 case GIMPLE_OMP_TASK:
2746 error_at (gimple_location (stmt),
2747 "%<master%> region may not be closely nested inside "
2748 "of work-sharing, explicit %<task%> or %<taskloop%> "
2749 "region");
2750 return false;
2751 case GIMPLE_OMP_PARALLEL:
2752 case GIMPLE_OMP_TEAMS:
2753 return true;
2754 case GIMPLE_OMP_TARGET:
2755 if (gimple_omp_target_kind (ctx->stmt)
2756 == GF_OMP_TARGET_KIND_REGION)
2757 return true;
2758 break;
2759 default:
2760 break;
2762 break;
2763 case GIMPLE_OMP_TASK:
2764 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2765 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2766 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2767 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2769 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2770 error_at (OMP_CLAUSE_LOCATION (c),
2771 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2772 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2773 return false;
2775 break;
2776 case GIMPLE_OMP_ORDERED:
2777 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2778 c; c = OMP_CLAUSE_CHAIN (c))
2780 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2782 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2783 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2784 continue;
2786 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2787 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2788 || kind == OMP_CLAUSE_DEPEND_SINK)
2790 tree oclause;
2791 /* Look for containing ordered(N) loop. */
2792 if (ctx == NULL
2793 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2794 || (oclause
2795 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2796 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2798 error_at (OMP_CLAUSE_LOCATION (c),
2799 "%<ordered%> construct with %<depend%> clause "
2800 "must be closely nested inside an %<ordered%> "
2801 "loop");
2802 return false;
2804 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2806 error_at (OMP_CLAUSE_LOCATION (c),
2807 "%<ordered%> construct with %<depend%> clause "
2808 "must be closely nested inside a loop with "
2809 "%<ordered%> clause with a parameter");
2810 return false;
2813 else
2815 error_at (OMP_CLAUSE_LOCATION (c),
2816 "invalid depend kind in omp %<ordered%> %<depend%>");
2817 return false;
2820 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2821 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2823 /* ordered simd must be closely nested inside of simd region,
2824 and simd region must not encounter constructs other than
2825 ordered simd, therefore ordered simd may be either orphaned,
2826 or ctx->stmt must be simd. The latter case is handled already
2827 earlier. */
2828 if (ctx != NULL)
2830 error_at (gimple_location (stmt),
2831 "%<ordered%> %<simd%> must be closely nested inside "
2832 "%<simd%> region");
2833 return false;
2836 for (; ctx != NULL; ctx = ctx->outer)
2837 switch (gimple_code (ctx->stmt))
2839 case GIMPLE_OMP_CRITICAL:
2840 case GIMPLE_OMP_TASK:
2841 case GIMPLE_OMP_ORDERED:
2842 ordered_in_taskloop:
2843 error_at (gimple_location (stmt),
2844 "%<ordered%> region may not be closely nested inside "
2845 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2846 "%<taskloop%> region");
2847 return false;
2848 case GIMPLE_OMP_FOR:
2849 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2850 goto ordered_in_taskloop;
2851 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2852 OMP_CLAUSE_ORDERED) == NULL)
2854 error_at (gimple_location (stmt),
2855 "%<ordered%> region must be closely nested inside "
2856 "a loop region with an %<ordered%> clause");
2857 return false;
2859 return true;
2860 case GIMPLE_OMP_TARGET:
2861 if (gimple_omp_target_kind (ctx->stmt)
2862 != GF_OMP_TARGET_KIND_REGION)
2863 break;
2864 /* FALLTHRU */
2865 case GIMPLE_OMP_PARALLEL:
2866 case GIMPLE_OMP_TEAMS:
2867 error_at (gimple_location (stmt),
2868 "%<ordered%> region must be closely nested inside "
2869 "a loop region with an %<ordered%> clause");
2870 return false;
2871 default:
2872 break;
2874 break;
2875 case GIMPLE_OMP_CRITICAL:
2877 tree this_stmt_name
2878 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2879 for (; ctx != NULL; ctx = ctx->outer)
2880 if (gomp_critical *other_crit
2881 = dyn_cast <gomp_critical *> (ctx->stmt))
2882 if (this_stmt_name == gimple_omp_critical_name (other_crit))
2884 error_at (gimple_location (stmt),
2885 "%<critical%> region may not be nested inside "
2886 "a %<critical%> region with the same name");
2887 return false;
2890 break;
2891 case GIMPLE_OMP_TEAMS:
2892 if (ctx == NULL
2893 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2894 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2896 error_at (gimple_location (stmt),
2897 "%<teams%> construct not closely nested inside of "
2898 "%<target%> construct");
2899 return false;
2901 break;
2902 case GIMPLE_OMP_TARGET:
2903 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2904 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2905 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2906 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2908 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2909 error_at (OMP_CLAUSE_LOCATION (c),
2910 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2911 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2912 return false;
2914 if (is_gimple_omp_offloaded (stmt)
2915 && oacc_get_fn_attrib (cfun->decl) != NULL)
2917 error_at (gimple_location (stmt),
2918 "OpenACC region inside of OpenACC routine, nested "
2919 "parallelism not supported yet");
2920 return false;
2922 for (; ctx != NULL; ctx = ctx->outer)
2924 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2926 if (is_gimple_omp (stmt)
2927 && is_gimple_omp_oacc (stmt)
2928 && is_gimple_omp (ctx->stmt))
2930 error_at (gimple_location (stmt),
2931 "OpenACC construct inside of non-OpenACC region");
2932 return false;
2934 continue;
2937 const char *stmt_name, *ctx_stmt_name;
2938 switch (gimple_omp_target_kind (stmt))
2940 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2941 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2942 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2943 case GF_OMP_TARGET_KIND_ENTER_DATA:
2944 stmt_name = "target enter data"; break;
2945 case GF_OMP_TARGET_KIND_EXIT_DATA:
2946 stmt_name = "target exit data"; break;
2947 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2948 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2949 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2950 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
2951 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2952 stmt_name = "enter/exit data"; break;
2953 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2954 break;
2955 default: gcc_unreachable ();
2957 switch (gimple_omp_target_kind (ctx->stmt))
2959 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2960 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
2961 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2962 ctx_stmt_name = "parallel"; break;
2963 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2964 ctx_stmt_name = "kernels"; break;
2965 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
2966 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2967 ctx_stmt_name = "host_data"; break;
2968 default: gcc_unreachable ();
2971 /* OpenACC/OpenMP mismatch? */
2972 if (is_gimple_omp_oacc (stmt)
2973 != is_gimple_omp_oacc (ctx->stmt))
2975 error_at (gimple_location (stmt),
2976 "%s %qs construct inside of %s %qs region",
2977 (is_gimple_omp_oacc (stmt)
2978 ? "OpenACC" : "OpenMP"), stmt_name,
2979 (is_gimple_omp_oacc (ctx->stmt)
2980 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
2981 return false;
2983 if (is_gimple_omp_offloaded (ctx->stmt))
2985 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
2986 if (is_gimple_omp_oacc (ctx->stmt))
2988 error_at (gimple_location (stmt),
2989 "%qs construct inside of %qs region",
2990 stmt_name, ctx_stmt_name);
2991 return false;
2993 else
2995 warning_at (gimple_location (stmt), 0,
2996 "%qs construct inside of %qs region",
2997 stmt_name, ctx_stmt_name);
3001 break;
3002 default:
3003 break;
3005 return true;
3009 /* Helper function scan_omp.
3011 Callback for walk_tree or operators in walk_gimple_stmt used to
3012 scan for OMP directives in TP. */
3014 static tree
3015 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3017 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3018 omp_context *ctx = (omp_context *) wi->info;
3019 tree t = *tp;
3021 switch (TREE_CODE (t))
3023 case VAR_DECL:
3024 case PARM_DECL:
3025 case LABEL_DECL:
3026 case RESULT_DECL:
3027 if (ctx)
3029 tree repl = remap_decl (t, &ctx->cb);
3030 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3031 *tp = repl;
3033 break;
3035 default:
3036 if (ctx && TYPE_P (t))
3037 *tp = remap_type (t, &ctx->cb);
3038 else if (!DECL_P (t))
3040 *walk_subtrees = 1;
3041 if (ctx)
3043 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3044 if (tem != TREE_TYPE (t))
3046 if (TREE_CODE (t) == INTEGER_CST)
3047 *tp = wide_int_to_tree (tem, t);
3048 else
3049 TREE_TYPE (t) = tem;
3053 break;
3056 return NULL_TREE;
3059 /* Return true if FNDECL is a setjmp or a longjmp. */
3061 static bool
3062 setjmp_or_longjmp_p (const_tree fndecl)
3064 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3065 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
3066 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
3067 return true;
3069 tree declname = DECL_NAME (fndecl);
3070 if (!declname)
3071 return false;
3072 const char *name = IDENTIFIER_POINTER (declname);
3073 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3077 /* Helper function for scan_omp.
3079 Callback for walk_gimple_stmt used to scan for OMP directives in
3080 the current statement in GSI. */
3082 static tree
3083 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3084 struct walk_stmt_info *wi)
3086 gimple *stmt = gsi_stmt (*gsi);
3087 omp_context *ctx = (omp_context *) wi->info;
3089 if (gimple_has_location (stmt))
3090 input_location = gimple_location (stmt);
3092 /* Check the nesting restrictions. */
3093 bool remove = false;
3094 if (is_gimple_omp (stmt))
3095 remove = !check_omp_nesting_restrictions (stmt, ctx);
3096 else if (is_gimple_call (stmt))
3098 tree fndecl = gimple_call_fndecl (stmt);
3099 if (fndecl)
3101 if (setjmp_or_longjmp_p (fndecl)
3102 && ctx
3103 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3104 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3106 remove = true;
3107 error_at (gimple_location (stmt),
3108 "setjmp/longjmp inside simd construct");
3110 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3111 switch (DECL_FUNCTION_CODE (fndecl))
3113 case BUILT_IN_GOMP_BARRIER:
3114 case BUILT_IN_GOMP_CANCEL:
3115 case BUILT_IN_GOMP_CANCELLATION_POINT:
3116 case BUILT_IN_GOMP_TASKYIELD:
3117 case BUILT_IN_GOMP_TASKWAIT:
3118 case BUILT_IN_GOMP_TASKGROUP_START:
3119 case BUILT_IN_GOMP_TASKGROUP_END:
3120 remove = !check_omp_nesting_restrictions (stmt, ctx);
3121 break;
3122 default:
3123 break;
3127 if (remove)
3129 stmt = gimple_build_nop ();
3130 gsi_replace (gsi, stmt, false);
3133 *handled_ops_p = true;
3135 switch (gimple_code (stmt))
3137 case GIMPLE_OMP_PARALLEL:
3138 taskreg_nesting_level++;
3139 scan_omp_parallel (gsi, ctx);
3140 taskreg_nesting_level--;
3141 break;
3143 case GIMPLE_OMP_TASK:
3144 taskreg_nesting_level++;
3145 scan_omp_task (gsi, ctx);
3146 taskreg_nesting_level--;
3147 break;
3149 case GIMPLE_OMP_FOR:
3150 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3151 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3152 && omp_maybe_offloaded_ctx (ctx)
3153 && omp_max_simt_vf ())
3154 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3155 else
3156 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3157 break;
3159 case GIMPLE_OMP_SECTIONS:
3160 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3161 break;
3163 case GIMPLE_OMP_SINGLE:
3164 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3165 break;
3167 case GIMPLE_OMP_SECTION:
3168 case GIMPLE_OMP_MASTER:
3169 case GIMPLE_OMP_TASKGROUP:
3170 case GIMPLE_OMP_ORDERED:
3171 case GIMPLE_OMP_CRITICAL:
3172 case GIMPLE_OMP_GRID_BODY:
3173 ctx = new_omp_context (stmt, ctx);
3174 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3175 break;
3177 case GIMPLE_OMP_TARGET:
3178 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3179 break;
3181 case GIMPLE_OMP_TEAMS:
3182 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3183 break;
3185 case GIMPLE_BIND:
3187 tree var;
3189 *handled_ops_p = false;
3190 if (ctx)
3191 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3192 var ;
3193 var = DECL_CHAIN (var))
3194 insert_decl_map (&ctx->cb, var, var);
3196 break;
3197 default:
3198 *handled_ops_p = false;
3199 break;
3202 return NULL_TREE;
3206 /* Scan all the statements starting at the current statement. CTX
3207 contains context information about the OMP directives and
3208 clauses found during the scan. */
3210 static void
3211 scan_omp (gimple_seq *body_p, omp_context *ctx)
3213 location_t saved_location;
3214 struct walk_stmt_info wi;
3216 memset (&wi, 0, sizeof (wi));
3217 wi.info = ctx;
3218 wi.want_locations = true;
3220 saved_location = input_location;
3221 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3222 input_location = saved_location;
3225 /* Re-gimplification and code generation routines. */
3227 /* If a context was created for STMT when it was scanned, return it. */
3229 static omp_context *
3230 maybe_lookup_ctx (gimple *stmt)
3232 splay_tree_node n;
3233 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3234 return n ? (omp_context *) n->value : NULL;
3238 /* Find the mapping for DECL in CTX or the immediately enclosing
3239 context that has a mapping for DECL.
3241 If CTX is a nested parallel directive, we may have to use the decl
3242 mappings created in CTX's parent context. Suppose that we have the
3243 following parallel nesting (variable UIDs showed for clarity):
3245 iD.1562 = 0;
3246 #omp parallel shared(iD.1562) -> outer parallel
3247 iD.1562 = iD.1562 + 1;
3249 #omp parallel shared (iD.1562) -> inner parallel
3250 iD.1562 = iD.1562 - 1;
3252 Each parallel structure will create a distinct .omp_data_s structure
3253 for copying iD.1562 in/out of the directive:
3255 outer parallel .omp_data_s.1.i -> iD.1562
3256 inner parallel .omp_data_s.2.i -> iD.1562
3258 A shared variable mapping will produce a copy-out operation before
3259 the parallel directive and a copy-in operation after it. So, in
3260 this case we would have:
3262 iD.1562 = 0;
3263 .omp_data_o.1.i = iD.1562;
3264 #omp parallel shared(iD.1562) -> outer parallel
3265 .omp_data_i.1 = &.omp_data_o.1
3266 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3268 .omp_data_o.2.i = iD.1562; -> **
3269 #omp parallel shared(iD.1562) -> inner parallel
3270 .omp_data_i.2 = &.omp_data_o.2
3271 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3274 ** This is a problem. The symbol iD.1562 cannot be referenced
3275 inside the body of the outer parallel region. But since we are
3276 emitting this copy operation while expanding the inner parallel
3277 directive, we need to access the CTX structure of the outer
3278 parallel directive to get the correct mapping:
3280 .omp_data_o.2.i = .omp_data_i.1->i
3282 Since there may be other workshare or parallel directives enclosing
3283 the parallel directive, it may be necessary to walk up the context
3284 parent chain. This is not a problem in general because nested
3285 parallelism happens only rarely. */
3287 static tree
3288 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3290 tree t;
3291 omp_context *up;
3293 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3294 t = maybe_lookup_decl (decl, up);
3296 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3298 return t ? t : decl;
3302 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3303 in outer contexts. */
3305 static tree
3306 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3308 tree t = NULL;
3309 omp_context *up;
3311 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3312 t = maybe_lookup_decl (decl, up);
3314 return t ? t : decl;
3318 /* Construct the initialization value for reduction operation OP. */
3320 tree
3321 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3323 switch (op)
3325 case PLUS_EXPR:
3326 case MINUS_EXPR:
3327 case BIT_IOR_EXPR:
3328 case BIT_XOR_EXPR:
3329 case TRUTH_OR_EXPR:
3330 case TRUTH_ORIF_EXPR:
3331 case TRUTH_XOR_EXPR:
3332 case NE_EXPR:
3333 return build_zero_cst (type);
3335 case MULT_EXPR:
3336 case TRUTH_AND_EXPR:
3337 case TRUTH_ANDIF_EXPR:
3338 case EQ_EXPR:
3339 return fold_convert_loc (loc, type, integer_one_node);
3341 case BIT_AND_EXPR:
3342 return fold_convert_loc (loc, type, integer_minus_one_node);
3344 case MAX_EXPR:
3345 if (SCALAR_FLOAT_TYPE_P (type))
3347 REAL_VALUE_TYPE max, min;
3348 if (HONOR_INFINITIES (type))
3350 real_inf (&max);
3351 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3353 else
3354 real_maxval (&min, 1, TYPE_MODE (type));
3355 return build_real (type, min);
3357 else if (POINTER_TYPE_P (type))
3359 wide_int min
3360 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3361 return wide_int_to_tree (type, min);
3363 else
3365 gcc_assert (INTEGRAL_TYPE_P (type));
3366 return TYPE_MIN_VALUE (type);
3369 case MIN_EXPR:
3370 if (SCALAR_FLOAT_TYPE_P (type))
3372 REAL_VALUE_TYPE max;
3373 if (HONOR_INFINITIES (type))
3374 real_inf (&max);
3375 else
3376 real_maxval (&max, 0, TYPE_MODE (type));
3377 return build_real (type, max);
3379 else if (POINTER_TYPE_P (type))
3381 wide_int max
3382 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3383 return wide_int_to_tree (type, max);
3385 else
3387 gcc_assert (INTEGRAL_TYPE_P (type));
3388 return TYPE_MAX_VALUE (type);
3391 default:
3392 gcc_unreachable ();
3396 /* Construct the initialization value for reduction CLAUSE. */
3398 tree
3399 omp_reduction_init (tree clause, tree type)
3401 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3402 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3405 /* Return alignment to be assumed for var in CLAUSE, which should be
3406 OMP_CLAUSE_ALIGNED. */
3408 static tree
3409 omp_clause_aligned_alignment (tree clause)
3411 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3412 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3414 /* Otherwise return implementation defined alignment. */
3415 unsigned int al = 1;
3416 machine_mode mode, vmode;
3417 int vs = targetm.vectorize.autovectorize_vector_sizes ();
3418 if (vs)
3419 vs = 1 << floor_log2 (vs);
3420 static enum mode_class classes[]
3421 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3422 for (int i = 0; i < 4; i += 2)
3423 for (mode = GET_CLASS_NARROWEST_MODE (classes[i]);
3424 mode != VOIDmode;
3425 mode = GET_MODE_WIDER_MODE (mode))
3427 vmode = targetm.vectorize.preferred_simd_mode (mode);
3428 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3429 continue;
3430 while (vs
3431 && GET_MODE_SIZE (vmode) < vs
3432 && GET_MODE_2XWIDER_MODE (vmode) != VOIDmode)
3433 vmode = GET_MODE_2XWIDER_MODE (vmode);
3435 tree type = lang_hooks.types.type_for_mode (mode, 1);
3436 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3437 continue;
3438 type = build_vector_type (type, GET_MODE_SIZE (vmode)
3439 / GET_MODE_SIZE (mode));
3440 if (TYPE_MODE (type) != vmode)
3441 continue;
3442 if (TYPE_ALIGN_UNIT (type) > al)
3443 al = TYPE_ALIGN_UNIT (type);
3445 return build_int_cst (integer_type_node, al);
3448 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3449 privatization. */
3451 static bool
3452 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx, int &max_vf,
3453 tree &idx, tree &lane, tree &ivar, tree &lvar)
3455 if (max_vf == 0)
3457 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3458 OMP_CLAUSE__SIMT_))
3459 max_vf = omp_max_simt_vf ();
3460 else
3461 max_vf = omp_max_vf ();
3462 if (max_vf > 1)
3464 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3465 OMP_CLAUSE_SAFELEN);
3466 if (c
3467 && (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) != INTEGER_CST
3468 || tree_int_cst_sgn (OMP_CLAUSE_SAFELEN_EXPR (c)) != 1))
3469 max_vf = 1;
3470 else if (c && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
3471 max_vf) == -1)
3472 max_vf = tree_to_shwi (OMP_CLAUSE_SAFELEN_EXPR (c));
3474 if (max_vf > 1)
3476 idx = create_tmp_var (unsigned_type_node);
3477 lane = create_tmp_var (unsigned_type_node);
3480 if (max_vf == 1)
3481 return false;
3483 tree atype = build_array_type_nelts (TREE_TYPE (new_var), max_vf);
3484 tree avar = create_tmp_var_raw (atype);
3485 if (TREE_ADDRESSABLE (new_var))
3486 TREE_ADDRESSABLE (avar) = 1;
3487 DECL_ATTRIBUTES (avar)
3488 = tree_cons (get_identifier ("omp simd array"), NULL,
3489 DECL_ATTRIBUTES (avar));
3490 gimple_add_tmp_var (avar);
3491 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, idx,
3492 NULL_TREE, NULL_TREE);
3493 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, lane,
3494 NULL_TREE, NULL_TREE);
3495 if (DECL_P (new_var))
3497 SET_DECL_VALUE_EXPR (new_var, lvar);
3498 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3500 return true;
3503 /* Helper function of lower_rec_input_clauses. For a reference
3504 in simd reduction, add an underlying variable it will reference. */
3506 static void
3507 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3509 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3510 if (TREE_CONSTANT (z))
3512 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3513 get_name (new_vard));
3514 gimple_add_tmp_var (z);
3515 TREE_ADDRESSABLE (z) = 1;
3516 z = build_fold_addr_expr_loc (loc, z);
3517 gimplify_assign (new_vard, z, ilist);
3521 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3522 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3523 private variables. Initialization statements go in ILIST, while calls
3524 to destructors go in DLIST. */
3526 static void
3527 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3528 omp_context *ctx, struct omp_for_data *fd)
3530 tree c, dtor, copyin_seq, x, ptr;
3531 bool copyin_by_ref = false;
3532 bool lastprivate_firstprivate = false;
3533 bool reduction_omp_orig_ref = false;
3534 int pass;
3535 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3536 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3537 bool maybe_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3538 int max_vf = 0;
3539 tree lane = NULL_TREE, idx = NULL_TREE;
3540 tree simt_lane = NULL_TREE;
3541 tree ivar = NULL_TREE, lvar = NULL_TREE;
3542 gimple_seq llist[3] = { };
3544 copyin_seq = NULL;
3546 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3547 with data sharing clauses referencing variable sized vars. That
3548 is unnecessarily hard to support and very unlikely to result in
3549 vectorized code anyway. */
3550 if (is_simd)
3551 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3552 switch (OMP_CLAUSE_CODE (c))
3554 case OMP_CLAUSE_LINEAR:
3555 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3556 max_vf = 1;
3557 /* FALLTHRU */
3558 case OMP_CLAUSE_PRIVATE:
3559 case OMP_CLAUSE_FIRSTPRIVATE:
3560 case OMP_CLAUSE_LASTPRIVATE:
3561 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3562 max_vf = 1;
3563 break;
3564 case OMP_CLAUSE_REDUCTION:
3565 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3566 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3567 max_vf = 1;
3568 break;
3569 default:
3570 continue;
3573 /* Do all the fixed sized types in the first pass, and the variable sized
3574 types in the second pass. This makes sure that the scalar arguments to
3575 the variable sized types are processed before we use them in the
3576 variable sized operations. */
3577 for (pass = 0; pass < 2; ++pass)
3579 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3581 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3582 tree var, new_var;
3583 bool by_ref;
3584 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3586 switch (c_kind)
3588 case OMP_CLAUSE_PRIVATE:
3589 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3590 continue;
3591 break;
3592 case OMP_CLAUSE_SHARED:
3593 /* Ignore shared directives in teams construct. */
3594 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3595 continue;
3596 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3598 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3599 || is_global_var (OMP_CLAUSE_DECL (c)));
3600 continue;
3602 case OMP_CLAUSE_FIRSTPRIVATE:
3603 case OMP_CLAUSE_COPYIN:
3604 break;
3605 case OMP_CLAUSE_LINEAR:
3606 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3607 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3608 lastprivate_firstprivate = true;
3609 break;
3610 case OMP_CLAUSE_REDUCTION:
3611 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3612 reduction_omp_orig_ref = true;
3613 break;
3614 case OMP_CLAUSE__LOOPTEMP_:
3615 /* Handle _looptemp_ clauses only on parallel/task. */
3616 if (fd)
3617 continue;
3618 break;
3619 case OMP_CLAUSE_LASTPRIVATE:
3620 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3622 lastprivate_firstprivate = true;
3623 if (pass != 0 || is_taskloop_ctx (ctx))
3624 continue;
3626 /* Even without corresponding firstprivate, if
3627 decl is Fortran allocatable, it needs outer var
3628 reference. */
3629 else if (pass == 0
3630 && lang_hooks.decls.omp_private_outer_ref
3631 (OMP_CLAUSE_DECL (c)))
3632 lastprivate_firstprivate = true;
3633 break;
3634 case OMP_CLAUSE_ALIGNED:
3635 if (pass == 0)
3636 continue;
3637 var = OMP_CLAUSE_DECL (c);
3638 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3639 && !is_global_var (var))
3641 new_var = maybe_lookup_decl (var, ctx);
3642 if (new_var == NULL_TREE)
3643 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3644 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3645 tree alarg = omp_clause_aligned_alignment (c);
3646 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3647 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3648 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3649 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3650 gimplify_and_add (x, ilist);
3652 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3653 && is_global_var (var))
3655 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3656 new_var = lookup_decl (var, ctx);
3657 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3658 t = build_fold_addr_expr_loc (clause_loc, t);
3659 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3660 tree alarg = omp_clause_aligned_alignment (c);
3661 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3662 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3663 t = fold_convert_loc (clause_loc, ptype, t);
3664 x = create_tmp_var (ptype);
3665 t = build2 (MODIFY_EXPR, ptype, x, t);
3666 gimplify_and_add (t, ilist);
3667 t = build_simple_mem_ref_loc (clause_loc, x);
3668 SET_DECL_VALUE_EXPR (new_var, t);
3669 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3671 continue;
3672 default:
3673 continue;
3676 new_var = var = OMP_CLAUSE_DECL (c);
3677 if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3679 var = TREE_OPERAND (var, 0);
3680 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3681 var = TREE_OPERAND (var, 0);
3682 if (TREE_CODE (var) == INDIRECT_REF
3683 || TREE_CODE (var) == ADDR_EXPR)
3684 var = TREE_OPERAND (var, 0);
3685 if (is_variable_sized (var))
3687 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3688 var = DECL_VALUE_EXPR (var);
3689 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3690 var = TREE_OPERAND (var, 0);
3691 gcc_assert (DECL_P (var));
3693 new_var = var;
3695 if (c_kind != OMP_CLAUSE_COPYIN)
3696 new_var = lookup_decl (var, ctx);
3698 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3700 if (pass != 0)
3701 continue;
3703 /* C/C++ array section reductions. */
3704 else if (c_kind == OMP_CLAUSE_REDUCTION
3705 && var != OMP_CLAUSE_DECL (c))
3707 if (pass == 0)
3708 continue;
3710 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3711 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3712 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3714 tree b = TREE_OPERAND (orig_var, 1);
3715 b = maybe_lookup_decl (b, ctx);
3716 if (b == NULL)
3718 b = TREE_OPERAND (orig_var, 1);
3719 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3721 if (integer_zerop (bias))
3722 bias = b;
3723 else
3725 bias = fold_convert_loc (clause_loc,
3726 TREE_TYPE (b), bias);
3727 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3728 TREE_TYPE (b), b, bias);
3730 orig_var = TREE_OPERAND (orig_var, 0);
3732 if (TREE_CODE (orig_var) == INDIRECT_REF
3733 || TREE_CODE (orig_var) == ADDR_EXPR)
3734 orig_var = TREE_OPERAND (orig_var, 0);
3735 tree d = OMP_CLAUSE_DECL (c);
3736 tree type = TREE_TYPE (d);
3737 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3738 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3739 const char *name = get_name (orig_var);
3740 if (TREE_CONSTANT (v))
3742 x = create_tmp_var_raw (type, name);
3743 gimple_add_tmp_var (x);
3744 TREE_ADDRESSABLE (x) = 1;
3745 x = build_fold_addr_expr_loc (clause_loc, x);
3747 else
3749 tree atmp
3750 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3751 tree t = maybe_lookup_decl (v, ctx);
3752 if (t)
3753 v = t;
3754 else
3755 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3756 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3757 t = fold_build2_loc (clause_loc, PLUS_EXPR,
3758 TREE_TYPE (v), v,
3759 build_int_cst (TREE_TYPE (v), 1));
3760 t = fold_build2_loc (clause_loc, MULT_EXPR,
3761 TREE_TYPE (v), t,
3762 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3763 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3764 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3767 tree ptype = build_pointer_type (TREE_TYPE (type));
3768 x = fold_convert_loc (clause_loc, ptype, x);
3769 tree y = create_tmp_var (ptype, name);
3770 gimplify_assign (y, x, ilist);
3771 x = y;
3772 tree yb = y;
3774 if (!integer_zerop (bias))
3776 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3777 bias);
3778 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3780 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3781 pointer_sized_int_node, yb, bias);
3782 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3783 yb = create_tmp_var (ptype, name);
3784 gimplify_assign (yb, x, ilist);
3785 x = yb;
3788 d = TREE_OPERAND (d, 0);
3789 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3790 d = TREE_OPERAND (d, 0);
3791 if (TREE_CODE (d) == ADDR_EXPR)
3793 if (orig_var != var)
3795 gcc_assert (is_variable_sized (orig_var));
3796 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3798 gimplify_assign (new_var, x, ilist);
3799 tree new_orig_var = lookup_decl (orig_var, ctx);
3800 tree t = build_fold_indirect_ref (new_var);
3801 DECL_IGNORED_P (new_var) = 0;
3802 TREE_THIS_NOTRAP (t);
3803 SET_DECL_VALUE_EXPR (new_orig_var, t);
3804 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3806 else
3808 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3809 build_int_cst (ptype, 0));
3810 SET_DECL_VALUE_EXPR (new_var, x);
3811 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3814 else
3816 gcc_assert (orig_var == var);
3817 if (TREE_CODE (d) == INDIRECT_REF)
3819 x = create_tmp_var (ptype, name);
3820 TREE_ADDRESSABLE (x) = 1;
3821 gimplify_assign (x, yb, ilist);
3822 x = build_fold_addr_expr_loc (clause_loc, x);
3824 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3825 gimplify_assign (new_var, x, ilist);
3827 tree y1 = create_tmp_var (ptype, NULL);
3828 gimplify_assign (y1, y, ilist);
3829 tree i2 = NULL_TREE, y2 = NULL_TREE;
3830 tree body2 = NULL_TREE, end2 = NULL_TREE;
3831 tree y3 = NULL_TREE, y4 = NULL_TREE;
3832 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3834 y2 = create_tmp_var (ptype, NULL);
3835 gimplify_assign (y2, y, ilist);
3836 tree ref = build_outer_var_ref (var, ctx);
3837 /* For ref build_outer_var_ref already performs this. */
3838 if (TREE_CODE (d) == INDIRECT_REF)
3839 gcc_assert (omp_is_reference (var));
3840 else if (TREE_CODE (d) == ADDR_EXPR)
3841 ref = build_fold_addr_expr (ref);
3842 else if (omp_is_reference (var))
3843 ref = build_fold_addr_expr (ref);
3844 ref = fold_convert_loc (clause_loc, ptype, ref);
3845 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3846 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3848 y3 = create_tmp_var (ptype, NULL);
3849 gimplify_assign (y3, unshare_expr (ref), ilist);
3851 if (is_simd)
3853 y4 = create_tmp_var (ptype, NULL);
3854 gimplify_assign (y4, ref, dlist);
3857 tree i = create_tmp_var (TREE_TYPE (v), NULL);
3858 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3859 tree body = create_artificial_label (UNKNOWN_LOCATION);
3860 tree end = create_artificial_label (UNKNOWN_LOCATION);
3861 gimple_seq_add_stmt (ilist, gimple_build_label (body));
3862 if (y2)
3864 i2 = create_tmp_var (TREE_TYPE (v), NULL);
3865 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3866 body2 = create_artificial_label (UNKNOWN_LOCATION);
3867 end2 = create_artificial_label (UNKNOWN_LOCATION);
3868 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3870 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3872 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3873 tree decl_placeholder
3874 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3875 SET_DECL_VALUE_EXPR (decl_placeholder,
3876 build_simple_mem_ref (y1));
3877 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3878 SET_DECL_VALUE_EXPR (placeholder,
3879 y3 ? build_simple_mem_ref (y3)
3880 : error_mark_node);
3881 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3882 x = lang_hooks.decls.omp_clause_default_ctor
3883 (c, build_simple_mem_ref (y1),
3884 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3885 if (x)
3886 gimplify_and_add (x, ilist);
3887 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3889 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3890 lower_omp (&tseq, ctx);
3891 gimple_seq_add_seq (ilist, tseq);
3893 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3894 if (is_simd)
3896 SET_DECL_VALUE_EXPR (decl_placeholder,
3897 build_simple_mem_ref (y2));
3898 SET_DECL_VALUE_EXPR (placeholder,
3899 build_simple_mem_ref (y4));
3900 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3901 lower_omp (&tseq, ctx);
3902 gimple_seq_add_seq (dlist, tseq);
3903 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3905 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3906 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3907 x = lang_hooks.decls.omp_clause_dtor
3908 (c, build_simple_mem_ref (y2));
3909 if (x)
3911 gimple_seq tseq = NULL;
3912 dtor = x;
3913 gimplify_stmt (&dtor, &tseq);
3914 gimple_seq_add_seq (dlist, tseq);
3917 else
3919 x = omp_reduction_init (c, TREE_TYPE (type));
3920 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3922 /* reduction(-:var) sums up the partial results, so it
3923 acts identically to reduction(+:var). */
3924 if (code == MINUS_EXPR)
3925 code = PLUS_EXPR;
3927 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
3928 if (is_simd)
3930 x = build2 (code, TREE_TYPE (type),
3931 build_simple_mem_ref (y4),
3932 build_simple_mem_ref (y2));
3933 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
3936 gimple *g
3937 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
3938 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3939 gimple_seq_add_stmt (ilist, g);
3940 if (y3)
3942 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
3943 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3944 gimple_seq_add_stmt (ilist, g);
3946 g = gimple_build_assign (i, PLUS_EXPR, i,
3947 build_int_cst (TREE_TYPE (i), 1));
3948 gimple_seq_add_stmt (ilist, g);
3949 g = gimple_build_cond (LE_EXPR, i, v, body, end);
3950 gimple_seq_add_stmt (ilist, g);
3951 gimple_seq_add_stmt (ilist, gimple_build_label (end));
3952 if (y2)
3954 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
3955 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3956 gimple_seq_add_stmt (dlist, g);
3957 if (y4)
3959 g = gimple_build_assign
3960 (y4, POINTER_PLUS_EXPR, y4,
3961 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3962 gimple_seq_add_stmt (dlist, g);
3964 g = gimple_build_assign (i2, PLUS_EXPR, i2,
3965 build_int_cst (TREE_TYPE (i2), 1));
3966 gimple_seq_add_stmt (dlist, g);
3967 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
3968 gimple_seq_add_stmt (dlist, g);
3969 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
3971 continue;
3973 else if (is_variable_sized (var))
3975 /* For variable sized types, we need to allocate the
3976 actual storage here. Call alloca and store the
3977 result in the pointer decl that we created elsewhere. */
3978 if (pass == 0)
3979 continue;
3981 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
3983 gcall *stmt;
3984 tree tmp, atmp;
3986 ptr = DECL_VALUE_EXPR (new_var);
3987 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
3988 ptr = TREE_OPERAND (ptr, 0);
3989 gcc_assert (DECL_P (ptr));
3990 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
3992 /* void *tmp = __builtin_alloca */
3993 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3994 stmt = gimple_build_call (atmp, 2, x,
3995 size_int (DECL_ALIGN (var)));
3996 tmp = create_tmp_var_raw (ptr_type_node);
3997 gimple_add_tmp_var (tmp);
3998 gimple_call_set_lhs (stmt, tmp);
4000 gimple_seq_add_stmt (ilist, stmt);
4002 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4003 gimplify_assign (ptr, x, ilist);
4006 else if (omp_is_reference (var))
4008 /* For references that are being privatized for Fortran,
4009 allocate new backing storage for the new pointer
4010 variable. This allows us to avoid changing all the
4011 code that expects a pointer to something that expects
4012 a direct variable. */
4013 if (pass == 0)
4014 continue;
4016 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4017 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4019 x = build_receiver_ref (var, false, ctx);
4020 x = build_fold_addr_expr_loc (clause_loc, x);
4022 else if (TREE_CONSTANT (x))
4024 /* For reduction in SIMD loop, defer adding the
4025 initialization of the reference, because if we decide
4026 to use SIMD array for it, the initilization could cause
4027 expansion ICE. */
4028 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4029 x = NULL_TREE;
4030 else
4032 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4033 get_name (var));
4034 gimple_add_tmp_var (x);
4035 TREE_ADDRESSABLE (x) = 1;
4036 x = build_fold_addr_expr_loc (clause_loc, x);
4039 else
4041 tree atmp
4042 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4043 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4044 tree al = size_int (TYPE_ALIGN (rtype));
4045 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4048 if (x)
4050 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4051 gimplify_assign (new_var, x, ilist);
4054 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4056 else if (c_kind == OMP_CLAUSE_REDUCTION
4057 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4059 if (pass == 0)
4060 continue;
4062 else if (pass != 0)
4063 continue;
4065 switch (OMP_CLAUSE_CODE (c))
4067 case OMP_CLAUSE_SHARED:
4068 /* Ignore shared directives in teams construct. */
4069 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4070 continue;
4071 /* Shared global vars are just accessed directly. */
4072 if (is_global_var (new_var))
4073 break;
4074 /* For taskloop firstprivate/lastprivate, represented
4075 as firstprivate and shared clause on the task, new_var
4076 is the firstprivate var. */
4077 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4078 break;
4079 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4080 needs to be delayed until after fixup_child_record_type so
4081 that we get the correct type during the dereference. */
4082 by_ref = use_pointer_for_field (var, ctx);
4083 x = build_receiver_ref (var, by_ref, ctx);
4084 SET_DECL_VALUE_EXPR (new_var, x);
4085 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4087 /* ??? If VAR is not passed by reference, and the variable
4088 hasn't been initialized yet, then we'll get a warning for
4089 the store into the omp_data_s structure. Ideally, we'd be
4090 able to notice this and not store anything at all, but
4091 we're generating code too early. Suppress the warning. */
4092 if (!by_ref)
4093 TREE_NO_WARNING (var) = 1;
4094 break;
4096 case OMP_CLAUSE_LASTPRIVATE:
4097 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4098 break;
4099 /* FALLTHRU */
4101 case OMP_CLAUSE_PRIVATE:
4102 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4103 x = build_outer_var_ref (var, ctx);
4104 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4106 if (is_task_ctx (ctx))
4107 x = build_receiver_ref (var, false, ctx);
4108 else
4109 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4111 else
4112 x = NULL;
4113 do_private:
4114 tree nx;
4115 nx = lang_hooks.decls.omp_clause_default_ctor
4116 (c, unshare_expr (new_var), x);
4117 if (is_simd)
4119 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4120 if ((TREE_ADDRESSABLE (new_var) || nx || y
4121 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4122 && lower_rec_simd_input_clauses (new_var, ctx, max_vf,
4123 idx, lane, ivar, lvar))
4125 if (nx)
4126 x = lang_hooks.decls.omp_clause_default_ctor
4127 (c, unshare_expr (ivar), x);
4128 if (nx && x)
4129 gimplify_and_add (x, &llist[0]);
4130 if (y)
4132 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4133 if (y)
4135 gimple_seq tseq = NULL;
4137 dtor = y;
4138 gimplify_stmt (&dtor, &tseq);
4139 gimple_seq_add_seq (&llist[1], tseq);
4142 break;
4145 if (nx)
4146 gimplify_and_add (nx, ilist);
4147 /* FALLTHRU */
4149 do_dtor:
4150 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4151 if (x)
4153 gimple_seq tseq = NULL;
4155 dtor = x;
4156 gimplify_stmt (&dtor, &tseq);
4157 gimple_seq_add_seq (dlist, tseq);
4159 break;
4161 case OMP_CLAUSE_LINEAR:
4162 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4163 goto do_firstprivate;
4164 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4165 x = NULL;
4166 else
4167 x = build_outer_var_ref (var, ctx);
4168 goto do_private;
4170 case OMP_CLAUSE_FIRSTPRIVATE:
4171 if (is_task_ctx (ctx))
4173 if (omp_is_reference (var) || is_variable_sized (var))
4174 goto do_dtor;
4175 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4176 ctx))
4177 || use_pointer_for_field (var, NULL))
4179 x = build_receiver_ref (var, false, ctx);
4180 SET_DECL_VALUE_EXPR (new_var, x);
4181 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4182 goto do_dtor;
4185 do_firstprivate:
4186 x = build_outer_var_ref (var, ctx);
4187 if (is_simd)
4189 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4190 && gimple_omp_for_combined_into_p (ctx->stmt))
4192 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4193 tree stept = TREE_TYPE (t);
4194 tree ct = omp_find_clause (clauses,
4195 OMP_CLAUSE__LOOPTEMP_);
4196 gcc_assert (ct);
4197 tree l = OMP_CLAUSE_DECL (ct);
4198 tree n1 = fd->loop.n1;
4199 tree step = fd->loop.step;
4200 tree itype = TREE_TYPE (l);
4201 if (POINTER_TYPE_P (itype))
4202 itype = signed_type_for (itype);
4203 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4204 if (TYPE_UNSIGNED (itype)
4205 && fd->loop.cond_code == GT_EXPR)
4206 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4207 fold_build1 (NEGATE_EXPR, itype, l),
4208 fold_build1 (NEGATE_EXPR,
4209 itype, step));
4210 else
4211 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4212 t = fold_build2 (MULT_EXPR, stept,
4213 fold_convert (stept, l), t);
4215 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4217 x = lang_hooks.decls.omp_clause_linear_ctor
4218 (c, new_var, x, t);
4219 gimplify_and_add (x, ilist);
4220 goto do_dtor;
4223 if (POINTER_TYPE_P (TREE_TYPE (x)))
4224 x = fold_build2 (POINTER_PLUS_EXPR,
4225 TREE_TYPE (x), x, t);
4226 else
4227 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4230 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4231 || TREE_ADDRESSABLE (new_var))
4232 && lower_rec_simd_input_clauses (new_var, ctx, max_vf,
4233 idx, lane, ivar, lvar))
4235 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4237 tree iv = create_tmp_var (TREE_TYPE (new_var));
4238 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4239 gimplify_and_add (x, ilist);
4240 gimple_stmt_iterator gsi
4241 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4242 gassign *g
4243 = gimple_build_assign (unshare_expr (lvar), iv);
4244 gsi_insert_before_without_update (&gsi, g,
4245 GSI_SAME_STMT);
4246 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4247 enum tree_code code = PLUS_EXPR;
4248 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4249 code = POINTER_PLUS_EXPR;
4250 g = gimple_build_assign (iv, code, iv, t);
4251 gsi_insert_before_without_update (&gsi, g,
4252 GSI_SAME_STMT);
4253 break;
4255 x = lang_hooks.decls.omp_clause_copy_ctor
4256 (c, unshare_expr (ivar), x);
4257 gimplify_and_add (x, &llist[0]);
4258 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4259 if (x)
4261 gimple_seq tseq = NULL;
4263 dtor = x;
4264 gimplify_stmt (&dtor, &tseq);
4265 gimple_seq_add_seq (&llist[1], tseq);
4267 break;
4270 x = lang_hooks.decls.omp_clause_copy_ctor
4271 (c, unshare_expr (new_var), x);
4272 gimplify_and_add (x, ilist);
4273 goto do_dtor;
4275 case OMP_CLAUSE__LOOPTEMP_:
4276 gcc_assert (is_taskreg_ctx (ctx));
4277 x = build_outer_var_ref (var, ctx);
4278 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4279 gimplify_and_add (x, ilist);
4280 break;
4282 case OMP_CLAUSE_COPYIN:
4283 by_ref = use_pointer_for_field (var, NULL);
4284 x = build_receiver_ref (var, by_ref, ctx);
4285 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4286 append_to_statement_list (x, &copyin_seq);
4287 copyin_by_ref |= by_ref;
4288 break;
4290 case OMP_CLAUSE_REDUCTION:
4291 /* OpenACC reductions are initialized using the
4292 GOACC_REDUCTION internal function. */
4293 if (is_gimple_omp_oacc (ctx->stmt))
4294 break;
4295 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4297 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4298 gimple *tseq;
4299 x = build_outer_var_ref (var, ctx);
4301 if (omp_is_reference (var)
4302 && !useless_type_conversion_p (TREE_TYPE (placeholder),
4303 TREE_TYPE (x)))
4304 x = build_fold_addr_expr_loc (clause_loc, x);
4305 SET_DECL_VALUE_EXPR (placeholder, x);
4306 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4307 tree new_vard = new_var;
4308 if (omp_is_reference (var))
4310 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4311 new_vard = TREE_OPERAND (new_var, 0);
4312 gcc_assert (DECL_P (new_vard));
4314 if (is_simd
4315 && lower_rec_simd_input_clauses (new_var, ctx, max_vf,
4316 idx, lane, ivar, lvar))
4318 if (new_vard == new_var)
4320 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4321 SET_DECL_VALUE_EXPR (new_var, ivar);
4323 else
4325 SET_DECL_VALUE_EXPR (new_vard,
4326 build_fold_addr_expr (ivar));
4327 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4329 x = lang_hooks.decls.omp_clause_default_ctor
4330 (c, unshare_expr (ivar),
4331 build_outer_var_ref (var, ctx));
4332 if (x)
4333 gimplify_and_add (x, &llist[0]);
4334 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4336 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4337 lower_omp (&tseq, ctx);
4338 gimple_seq_add_seq (&llist[0], tseq);
4340 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4341 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4342 lower_omp (&tseq, ctx);
4343 gimple_seq_add_seq (&llist[1], tseq);
4344 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4345 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4346 if (new_vard == new_var)
4347 SET_DECL_VALUE_EXPR (new_var, lvar);
4348 else
4349 SET_DECL_VALUE_EXPR (new_vard,
4350 build_fold_addr_expr (lvar));
4351 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4352 if (x)
4354 tseq = NULL;
4355 dtor = x;
4356 gimplify_stmt (&dtor, &tseq);
4357 gimple_seq_add_seq (&llist[1], tseq);
4359 break;
4361 /* If this is a reference to constant size reduction var
4362 with placeholder, we haven't emitted the initializer
4363 for it because it is undesirable if SIMD arrays are used.
4364 But if they aren't used, we need to emit the deferred
4365 initialization now. */
4366 else if (omp_is_reference (var) && is_simd)
4367 handle_simd_reference (clause_loc, new_vard, ilist);
4368 x = lang_hooks.decls.omp_clause_default_ctor
4369 (c, unshare_expr (new_var),
4370 build_outer_var_ref (var, ctx));
4371 if (x)
4372 gimplify_and_add (x, ilist);
4373 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4375 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4376 lower_omp (&tseq, ctx);
4377 gimple_seq_add_seq (ilist, tseq);
4379 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4380 if (is_simd)
4382 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4383 lower_omp (&tseq, ctx);
4384 gimple_seq_add_seq (dlist, tseq);
4385 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4387 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4388 goto do_dtor;
4390 else
4392 x = omp_reduction_init (c, TREE_TYPE (new_var));
4393 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4394 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4396 /* reduction(-:var) sums up the partial results, so it
4397 acts identically to reduction(+:var). */
4398 if (code == MINUS_EXPR)
4399 code = PLUS_EXPR;
4401 tree new_vard = new_var;
4402 if (is_simd && omp_is_reference (var))
4404 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4405 new_vard = TREE_OPERAND (new_var, 0);
4406 gcc_assert (DECL_P (new_vard));
4408 if (is_simd
4409 && lower_rec_simd_input_clauses (new_var, ctx, max_vf,
4410 idx, lane, ivar, lvar))
4412 tree ref = build_outer_var_ref (var, ctx);
4414 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4416 if (maybe_simt)
4418 if (!simt_lane)
4419 simt_lane = create_tmp_var (unsigned_type_node);
4420 x = build_call_expr_internal_loc
4421 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4422 TREE_TYPE (ivar), 2, ivar, simt_lane);
4423 x = build2 (code, TREE_TYPE (ivar), ivar, x);
4424 gimplify_assign (ivar, x, &llist[2]);
4426 x = build2 (code, TREE_TYPE (ref), ref, ivar);
4427 ref = build_outer_var_ref (var, ctx);
4428 gimplify_assign (ref, x, &llist[1]);
4430 if (new_vard != new_var)
4432 SET_DECL_VALUE_EXPR (new_vard,
4433 build_fold_addr_expr (lvar));
4434 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4437 else
4439 if (omp_is_reference (var) && is_simd)
4440 handle_simd_reference (clause_loc, new_vard, ilist);
4441 gimplify_assign (new_var, x, ilist);
4442 if (is_simd)
4444 tree ref = build_outer_var_ref (var, ctx);
4446 x = build2 (code, TREE_TYPE (ref), ref, new_var);
4447 ref = build_outer_var_ref (var, ctx);
4448 gimplify_assign (ref, x, dlist);
4452 break;
4454 default:
4455 gcc_unreachable ();
4460 if (lane)
4462 tree uid = create_tmp_var (ptr_type_node, "simduid");
4463 /* Don't want uninit warnings on simduid, it is always uninitialized,
4464 but we use it not for the value, but for the DECL_UID only. */
4465 TREE_NO_WARNING (uid) = 1;
4466 gimple *g
4467 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4468 gimple_call_set_lhs (g, lane);
4469 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4470 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4471 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4472 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4473 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4474 gimple_omp_for_set_clauses (ctx->stmt, c);
4475 g = gimple_build_assign (lane, INTEGER_CST,
4476 build_int_cst (unsigned_type_node, 0));
4477 gimple_seq_add_stmt (ilist, g);
4478 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4479 if (llist[2])
4481 tree simt_vf = create_tmp_var (unsigned_type_node);
4482 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4483 gimple_call_set_lhs (g, simt_vf);
4484 gimple_seq_add_stmt (dlist, g);
4486 tree t = build_int_cst (unsigned_type_node, 1);
4487 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4488 gimple_seq_add_stmt (dlist, g);
4490 t = build_int_cst (unsigned_type_node, 0);
4491 g = gimple_build_assign (idx, INTEGER_CST, t);
4492 gimple_seq_add_stmt (dlist, g);
4494 tree body = create_artificial_label (UNKNOWN_LOCATION);
4495 tree header = create_artificial_label (UNKNOWN_LOCATION);
4496 tree end = create_artificial_label (UNKNOWN_LOCATION);
4497 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4498 gimple_seq_add_stmt (dlist, gimple_build_label (body));
4500 gimple_seq_add_seq (dlist, llist[2]);
4502 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4503 gimple_seq_add_stmt (dlist, g);
4505 gimple_seq_add_stmt (dlist, gimple_build_label (header));
4506 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4507 gimple_seq_add_stmt (dlist, g);
4509 gimple_seq_add_stmt (dlist, gimple_build_label (end));
4511 for (int i = 0; i < 2; i++)
4512 if (llist[i])
4514 tree vf = create_tmp_var (unsigned_type_node);
4515 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4516 gimple_call_set_lhs (g, vf);
4517 gimple_seq *seq = i == 0 ? ilist : dlist;
4518 gimple_seq_add_stmt (seq, g);
4519 tree t = build_int_cst (unsigned_type_node, 0);
4520 g = gimple_build_assign (idx, INTEGER_CST, t);
4521 gimple_seq_add_stmt (seq, g);
4522 tree body = create_artificial_label (UNKNOWN_LOCATION);
4523 tree header = create_artificial_label (UNKNOWN_LOCATION);
4524 tree end = create_artificial_label (UNKNOWN_LOCATION);
4525 gimple_seq_add_stmt (seq, gimple_build_goto (header));
4526 gimple_seq_add_stmt (seq, gimple_build_label (body));
4527 gimple_seq_add_seq (seq, llist[i]);
4528 t = build_int_cst (unsigned_type_node, 1);
4529 g = gimple_build_assign (idx, PLUS_EXPR, idx, t);
4530 gimple_seq_add_stmt (seq, g);
4531 gimple_seq_add_stmt (seq, gimple_build_label (header));
4532 g = gimple_build_cond (LT_EXPR, idx, vf, body, end);
4533 gimple_seq_add_stmt (seq, g);
4534 gimple_seq_add_stmt (seq, gimple_build_label (end));
4538 /* The copyin sequence is not to be executed by the main thread, since
4539 that would result in self-copies. Perhaps not visible to scalars,
4540 but it certainly is to C++ operator=. */
4541 if (copyin_seq)
4543 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4545 x = build2 (NE_EXPR, boolean_type_node, x,
4546 build_int_cst (TREE_TYPE (x), 0));
4547 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4548 gimplify_and_add (x, ilist);
4551 /* If any copyin variable is passed by reference, we must ensure the
4552 master thread doesn't modify it before it is copied over in all
4553 threads. Similarly for variables in both firstprivate and
4554 lastprivate clauses we need to ensure the lastprivate copying
4555 happens after firstprivate copying in all threads. And similarly
4556 for UDRs if initializer expression refers to omp_orig. */
4557 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4559 /* Don't add any barrier for #pragma omp simd or
4560 #pragma omp distribute. */
4561 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4562 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4563 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4566 /* If max_vf is non-zero, then we can use only a vectorization factor
4567 up to the max_vf we chose. So stick it into the safelen clause. */
4568 if (max_vf)
4570 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4571 OMP_CLAUSE_SAFELEN);
4572 if (c == NULL_TREE
4573 || (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) == INTEGER_CST
4574 && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
4575 max_vf) == 1))
4577 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4578 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4579 max_vf);
4580 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4581 gimple_omp_for_set_clauses (ctx->stmt, c);
4587 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4588 both parallel and workshare constructs. PREDICATE may be NULL if it's
4589 always true. */
4591 static void
4592 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4593 omp_context *ctx)
4595 tree x, c, label = NULL, orig_clauses = clauses;
4596 bool par_clauses = false;
4597 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4599 /* Early exit if there are no lastprivate or linear clauses. */
4600 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4601 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4602 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4603 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4604 break;
4605 if (clauses == NULL)
4607 /* If this was a workshare clause, see if it had been combined
4608 with its parallel. In that case, look for the clauses on the
4609 parallel statement itself. */
4610 if (is_parallel_ctx (ctx))
4611 return;
4613 ctx = ctx->outer;
4614 if (ctx == NULL || !is_parallel_ctx (ctx))
4615 return;
4617 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4618 OMP_CLAUSE_LASTPRIVATE);
4619 if (clauses == NULL)
4620 return;
4621 par_clauses = true;
4624 bool maybe_simt = false;
4625 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4626 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4628 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4629 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4630 if (simduid)
4631 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4634 if (predicate)
4636 gcond *stmt;
4637 tree label_true, arm1, arm2;
4638 enum tree_code pred_code = TREE_CODE (predicate);
4640 label = create_artificial_label (UNKNOWN_LOCATION);
4641 label_true = create_artificial_label (UNKNOWN_LOCATION);
4642 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4644 arm1 = TREE_OPERAND (predicate, 0);
4645 arm2 = TREE_OPERAND (predicate, 1);
4646 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4647 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4649 else
4651 arm1 = predicate;
4652 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4653 arm2 = boolean_false_node;
4654 pred_code = NE_EXPR;
4656 if (maybe_simt)
4658 c = build2 (pred_code, boolean_type_node, arm1, arm2);
4659 c = fold_convert (integer_type_node, c);
4660 simtcond = create_tmp_var (integer_type_node);
4661 gimplify_assign (simtcond, c, stmt_list);
4662 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4663 1, simtcond);
4664 c = create_tmp_var (integer_type_node);
4665 gimple_call_set_lhs (g, c);
4666 gimple_seq_add_stmt (stmt_list, g);
4667 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4668 label_true, label);
4670 else
4671 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4672 gimple_seq_add_stmt (stmt_list, stmt);
4673 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4676 for (c = clauses; c ;)
4678 tree var, new_var;
4679 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4681 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4682 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4683 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4685 var = OMP_CLAUSE_DECL (c);
4686 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4687 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4688 && is_taskloop_ctx (ctx))
4690 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4691 new_var = lookup_decl (var, ctx->outer);
4693 else
4695 new_var = lookup_decl (var, ctx);
4696 /* Avoid uninitialized warnings for lastprivate and
4697 for linear iterators. */
4698 if (predicate
4699 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4700 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4701 TREE_NO_WARNING (new_var) = 1;
4704 if (simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4706 tree val = DECL_VALUE_EXPR (new_var);
4707 if (TREE_CODE (val) == ARRAY_REF
4708 && VAR_P (TREE_OPERAND (val, 0))
4709 && lookup_attribute ("omp simd array",
4710 DECL_ATTRIBUTES (TREE_OPERAND (val,
4711 0))))
4713 if (lastlane == NULL)
4715 lastlane = create_tmp_var (unsigned_type_node);
4716 gcall *g
4717 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4718 2, simduid,
4719 TREE_OPERAND (val, 1));
4720 gimple_call_set_lhs (g, lastlane);
4721 gimple_seq_add_stmt (stmt_list, g);
4723 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4724 TREE_OPERAND (val, 0), lastlane,
4725 NULL_TREE, NULL_TREE);
4726 if (maybe_simt)
4728 gcall *g;
4729 if (simtlast == NULL)
4731 simtlast = create_tmp_var (unsigned_type_node);
4732 g = gimple_build_call_internal
4733 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4734 gimple_call_set_lhs (g, simtlast);
4735 gimple_seq_add_stmt (stmt_list, g);
4737 x = build_call_expr_internal_loc
4738 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4739 TREE_TYPE (new_var), 2, new_var, simtlast);
4740 new_var = unshare_expr (new_var);
4741 gimplify_assign (new_var, x, stmt_list);
4742 new_var = unshare_expr (new_var);
4747 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4748 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4750 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4751 gimple_seq_add_seq (stmt_list,
4752 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4753 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4755 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4756 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4758 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4759 gimple_seq_add_seq (stmt_list,
4760 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4761 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4764 x = NULL_TREE;
4765 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4766 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4768 gcc_checking_assert (is_taskloop_ctx (ctx));
4769 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4770 ctx->outer->outer);
4771 if (is_global_var (ovar))
4772 x = ovar;
4774 if (!x)
4775 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4776 if (omp_is_reference (var))
4777 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4778 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4779 gimplify_and_add (x, stmt_list);
4781 c = OMP_CLAUSE_CHAIN (c);
4782 if (c == NULL && !par_clauses)
4784 /* If this was a workshare clause, see if it had been combined
4785 with its parallel. In that case, continue looking for the
4786 clauses also on the parallel statement itself. */
4787 if (is_parallel_ctx (ctx))
4788 break;
4790 ctx = ctx->outer;
4791 if (ctx == NULL || !is_parallel_ctx (ctx))
4792 break;
4794 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4795 OMP_CLAUSE_LASTPRIVATE);
4796 par_clauses = true;
4800 if (label)
4801 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4804 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4805 (which might be a placeholder). INNER is true if this is an inner
4806 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4807 join markers. Generate the before-loop forking sequence in
4808 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4809 general form of these sequences is
4811 GOACC_REDUCTION_SETUP
4812 GOACC_FORK
4813 GOACC_REDUCTION_INIT
4815 GOACC_REDUCTION_FINI
4816 GOACC_JOIN
4817 GOACC_REDUCTION_TEARDOWN. */
4819 static void
4820 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4821 gcall *fork, gcall *join, gimple_seq *fork_seq,
4822 gimple_seq *join_seq, omp_context *ctx)
4824 gimple_seq before_fork = NULL;
4825 gimple_seq after_fork = NULL;
4826 gimple_seq before_join = NULL;
4827 gimple_seq after_join = NULL;
4828 tree init_code = NULL_TREE, fini_code = NULL_TREE,
4829 setup_code = NULL_TREE, teardown_code = NULL_TREE;
4830 unsigned offset = 0;
4832 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4833 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4835 tree orig = OMP_CLAUSE_DECL (c);
4836 tree var = maybe_lookup_decl (orig, ctx);
4837 tree ref_to_res = NULL_TREE;
4838 tree incoming, outgoing, v1, v2, v3;
4839 bool is_private = false;
4841 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4842 if (rcode == MINUS_EXPR)
4843 rcode = PLUS_EXPR;
4844 else if (rcode == TRUTH_ANDIF_EXPR)
4845 rcode = BIT_AND_EXPR;
4846 else if (rcode == TRUTH_ORIF_EXPR)
4847 rcode = BIT_IOR_EXPR;
4848 tree op = build_int_cst (unsigned_type_node, rcode);
4850 if (!var)
4851 var = orig;
4853 incoming = outgoing = var;
4855 if (!inner)
4857 /* See if an outer construct also reduces this variable. */
4858 omp_context *outer = ctx;
4860 while (omp_context *probe = outer->outer)
4862 enum gimple_code type = gimple_code (probe->stmt);
4863 tree cls;
4865 switch (type)
4867 case GIMPLE_OMP_FOR:
4868 cls = gimple_omp_for_clauses (probe->stmt);
4869 break;
4871 case GIMPLE_OMP_TARGET:
4872 if (gimple_omp_target_kind (probe->stmt)
4873 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
4874 goto do_lookup;
4876 cls = gimple_omp_target_clauses (probe->stmt);
4877 break;
4879 default:
4880 goto do_lookup;
4883 outer = probe;
4884 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
4885 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4886 && orig == OMP_CLAUSE_DECL (cls))
4888 incoming = outgoing = lookup_decl (orig, probe);
4889 goto has_outer_reduction;
4891 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4892 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4893 && orig == OMP_CLAUSE_DECL (cls))
4895 is_private = true;
4896 goto do_lookup;
4900 do_lookup:
4901 /* This is the outermost construct with this reduction,
4902 see if there's a mapping for it. */
4903 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
4904 && maybe_lookup_field (orig, outer) && !is_private)
4906 ref_to_res = build_receiver_ref (orig, false, outer);
4907 if (omp_is_reference (orig))
4908 ref_to_res = build_simple_mem_ref (ref_to_res);
4910 tree type = TREE_TYPE (var);
4911 if (POINTER_TYPE_P (type))
4912 type = TREE_TYPE (type);
4914 outgoing = var;
4915 incoming = omp_reduction_init_op (loc, rcode, type);
4917 else
4919 /* Try to look at enclosing contexts for reduction var,
4920 use original if no mapping found. */
4921 tree t = NULL_TREE;
4922 omp_context *c = ctx->outer;
4923 while (c && !t)
4925 t = maybe_lookup_decl (orig, c);
4926 c = c->outer;
4928 incoming = outgoing = (t ? t : orig);
4931 has_outer_reduction:;
4934 if (!ref_to_res)
4935 ref_to_res = integer_zero_node;
4937 if (omp_is_reference (orig))
4939 tree type = TREE_TYPE (var);
4940 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
4942 if (!inner)
4944 tree x = create_tmp_var (TREE_TYPE (type), id);
4945 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
4948 v1 = create_tmp_var (type, id);
4949 v2 = create_tmp_var (type, id);
4950 v3 = create_tmp_var (type, id);
4952 gimplify_assign (v1, var, fork_seq);
4953 gimplify_assign (v2, var, fork_seq);
4954 gimplify_assign (v3, var, fork_seq);
4956 var = build_simple_mem_ref (var);
4957 v1 = build_simple_mem_ref (v1);
4958 v2 = build_simple_mem_ref (v2);
4959 v3 = build_simple_mem_ref (v3);
4960 outgoing = build_simple_mem_ref (outgoing);
4962 if (!TREE_CONSTANT (incoming))
4963 incoming = build_simple_mem_ref (incoming);
4965 else
4966 v1 = v2 = v3 = var;
4968 /* Determine position in reduction buffer, which may be used
4969 by target. */
4970 enum machine_mode mode = TYPE_MODE (TREE_TYPE (var));
4971 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
4972 offset = (offset + align - 1) & ~(align - 1);
4973 tree off = build_int_cst (sizetype, offset);
4974 offset += GET_MODE_SIZE (mode);
4976 if (!init_code)
4978 init_code = build_int_cst (integer_type_node,
4979 IFN_GOACC_REDUCTION_INIT);
4980 fini_code = build_int_cst (integer_type_node,
4981 IFN_GOACC_REDUCTION_FINI);
4982 setup_code = build_int_cst (integer_type_node,
4983 IFN_GOACC_REDUCTION_SETUP);
4984 teardown_code = build_int_cst (integer_type_node,
4985 IFN_GOACC_REDUCTION_TEARDOWN);
4988 tree setup_call
4989 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
4990 TREE_TYPE (var), 6, setup_code,
4991 unshare_expr (ref_to_res),
4992 incoming, level, op, off);
4993 tree init_call
4994 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
4995 TREE_TYPE (var), 6, init_code,
4996 unshare_expr (ref_to_res),
4997 v1, level, op, off);
4998 tree fini_call
4999 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5000 TREE_TYPE (var), 6, fini_code,
5001 unshare_expr (ref_to_res),
5002 v2, level, op, off);
5003 tree teardown_call
5004 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5005 TREE_TYPE (var), 6, teardown_code,
5006 ref_to_res, v3, level, op, off);
5008 gimplify_assign (v1, setup_call, &before_fork);
5009 gimplify_assign (v2, init_call, &after_fork);
5010 gimplify_assign (v3, fini_call, &before_join);
5011 gimplify_assign (outgoing, teardown_call, &after_join);
5014 /* Now stitch things together. */
5015 gimple_seq_add_seq (fork_seq, before_fork);
5016 if (fork)
5017 gimple_seq_add_stmt (fork_seq, fork);
5018 gimple_seq_add_seq (fork_seq, after_fork);
5020 gimple_seq_add_seq (join_seq, before_join);
5021 if (join)
5022 gimple_seq_add_stmt (join_seq, join);
5023 gimple_seq_add_seq (join_seq, after_join);
5026 /* Generate code to implement the REDUCTION clauses. */
5028 static void
5029 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5031 gimple_seq sub_seq = NULL;
5032 gimple *stmt;
5033 tree x, c;
5034 int count = 0;
5036 /* OpenACC loop reductions are handled elsewhere. */
5037 if (is_gimple_omp_oacc (ctx->stmt))
5038 return;
5040 /* SIMD reductions are handled in lower_rec_input_clauses. */
5041 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5042 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5043 return;
5045 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5046 update in that case, otherwise use a lock. */
5047 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5048 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5050 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5051 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5053 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5054 count = -1;
5055 break;
5057 count++;
5060 if (count == 0)
5061 return;
5063 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5065 tree var, ref, new_var, orig_var;
5066 enum tree_code code;
5067 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5069 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5070 continue;
5072 orig_var = var = OMP_CLAUSE_DECL (c);
5073 if (TREE_CODE (var) == MEM_REF)
5075 var = TREE_OPERAND (var, 0);
5076 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5077 var = TREE_OPERAND (var, 0);
5078 if (TREE_CODE (var) == INDIRECT_REF
5079 || TREE_CODE (var) == ADDR_EXPR)
5080 var = TREE_OPERAND (var, 0);
5081 orig_var = var;
5082 if (is_variable_sized (var))
5084 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5085 var = DECL_VALUE_EXPR (var);
5086 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5087 var = TREE_OPERAND (var, 0);
5088 gcc_assert (DECL_P (var));
5091 new_var = lookup_decl (var, ctx);
5092 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5093 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5094 ref = build_outer_var_ref (var, ctx);
5095 code = OMP_CLAUSE_REDUCTION_CODE (c);
5097 /* reduction(-:var) sums up the partial results, so it acts
5098 identically to reduction(+:var). */
5099 if (code == MINUS_EXPR)
5100 code = PLUS_EXPR;
5102 if (count == 1)
5104 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5106 addr = save_expr (addr);
5107 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5108 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5109 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5110 gimplify_and_add (x, stmt_seqp);
5111 return;
5113 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5115 tree d = OMP_CLAUSE_DECL (c);
5116 tree type = TREE_TYPE (d);
5117 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5118 tree i = create_tmp_var (TREE_TYPE (v), NULL);
5119 tree ptype = build_pointer_type (TREE_TYPE (type));
5120 tree bias = TREE_OPERAND (d, 1);
5121 d = TREE_OPERAND (d, 0);
5122 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5124 tree b = TREE_OPERAND (d, 1);
5125 b = maybe_lookup_decl (b, ctx);
5126 if (b == NULL)
5128 b = TREE_OPERAND (d, 1);
5129 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5131 if (integer_zerop (bias))
5132 bias = b;
5133 else
5135 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5136 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5137 TREE_TYPE (b), b, bias);
5139 d = TREE_OPERAND (d, 0);
5141 /* For ref build_outer_var_ref already performs this, so
5142 only new_var needs a dereference. */
5143 if (TREE_CODE (d) == INDIRECT_REF)
5145 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5146 gcc_assert (omp_is_reference (var) && var == orig_var);
5148 else if (TREE_CODE (d) == ADDR_EXPR)
5150 if (orig_var == var)
5152 new_var = build_fold_addr_expr (new_var);
5153 ref = build_fold_addr_expr (ref);
5156 else
5158 gcc_assert (orig_var == var);
5159 if (omp_is_reference (var))
5160 ref = build_fold_addr_expr (ref);
5162 if (DECL_P (v))
5164 tree t = maybe_lookup_decl (v, ctx);
5165 if (t)
5166 v = t;
5167 else
5168 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5169 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5171 if (!integer_zerop (bias))
5173 bias = fold_convert_loc (clause_loc, sizetype, bias);
5174 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5175 TREE_TYPE (new_var), new_var,
5176 unshare_expr (bias));
5177 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5178 TREE_TYPE (ref), ref, bias);
5180 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5181 ref = fold_convert_loc (clause_loc, ptype, ref);
5182 tree m = create_tmp_var (ptype, NULL);
5183 gimplify_assign (m, new_var, stmt_seqp);
5184 new_var = m;
5185 m = create_tmp_var (ptype, NULL);
5186 gimplify_assign (m, ref, stmt_seqp);
5187 ref = m;
5188 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5189 tree body = create_artificial_label (UNKNOWN_LOCATION);
5190 tree end = create_artificial_label (UNKNOWN_LOCATION);
5191 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5192 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5193 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5194 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5196 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5197 tree decl_placeholder
5198 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5199 SET_DECL_VALUE_EXPR (placeholder, out);
5200 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5201 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5202 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5203 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5204 gimple_seq_add_seq (&sub_seq,
5205 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5206 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5207 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5208 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5210 else
5212 x = build2 (code, TREE_TYPE (out), out, priv);
5213 out = unshare_expr (out);
5214 gimplify_assign (out, x, &sub_seq);
5216 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5217 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5218 gimple_seq_add_stmt (&sub_seq, g);
5219 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5220 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5221 gimple_seq_add_stmt (&sub_seq, g);
5222 g = gimple_build_assign (i, PLUS_EXPR, i,
5223 build_int_cst (TREE_TYPE (i), 1));
5224 gimple_seq_add_stmt (&sub_seq, g);
5225 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5226 gimple_seq_add_stmt (&sub_seq, g);
5227 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5229 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5231 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5233 if (omp_is_reference (var)
5234 && !useless_type_conversion_p (TREE_TYPE (placeholder),
5235 TREE_TYPE (ref)))
5236 ref = build_fold_addr_expr_loc (clause_loc, ref);
5237 SET_DECL_VALUE_EXPR (placeholder, ref);
5238 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5239 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5240 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5241 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5242 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5244 else
5246 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5247 ref = build_outer_var_ref (var, ctx);
5248 gimplify_assign (ref, x, &sub_seq);
5252 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5254 gimple_seq_add_stmt (stmt_seqp, stmt);
5256 gimple_seq_add_seq (stmt_seqp, sub_seq);
5258 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5260 gimple_seq_add_stmt (stmt_seqp, stmt);
5264 /* Generate code to implement the COPYPRIVATE clauses. */
5266 static void
5267 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5268 omp_context *ctx)
5270 tree c;
5272 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5274 tree var, new_var, ref, x;
5275 bool by_ref;
5276 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5278 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5279 continue;
5281 var = OMP_CLAUSE_DECL (c);
5282 by_ref = use_pointer_for_field (var, NULL);
5284 ref = build_sender_ref (var, ctx);
5285 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5286 if (by_ref)
5288 x = build_fold_addr_expr_loc (clause_loc, new_var);
5289 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5291 gimplify_assign (ref, x, slist);
5293 ref = build_receiver_ref (var, false, ctx);
5294 if (by_ref)
5296 ref = fold_convert_loc (clause_loc,
5297 build_pointer_type (TREE_TYPE (new_var)),
5298 ref);
5299 ref = build_fold_indirect_ref_loc (clause_loc, ref);
5301 if (omp_is_reference (var))
5303 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5304 ref = build_simple_mem_ref_loc (clause_loc, ref);
5305 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5307 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5308 gimplify_and_add (x, rlist);
5313 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5314 and REDUCTION from the sender (aka parent) side. */
5316 static void
5317 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5318 omp_context *ctx)
5320 tree c, t;
5321 int ignored_looptemp = 0;
5322 bool is_taskloop = false;
5324 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5325 by GOMP_taskloop. */
5326 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5328 ignored_looptemp = 2;
5329 is_taskloop = true;
5332 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5334 tree val, ref, x, var;
5335 bool by_ref, do_in = false, do_out = false;
5336 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5338 switch (OMP_CLAUSE_CODE (c))
5340 case OMP_CLAUSE_PRIVATE:
5341 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5342 break;
5343 continue;
5344 case OMP_CLAUSE_FIRSTPRIVATE:
5345 case OMP_CLAUSE_COPYIN:
5346 case OMP_CLAUSE_LASTPRIVATE:
5347 case OMP_CLAUSE_REDUCTION:
5348 break;
5349 case OMP_CLAUSE_SHARED:
5350 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5351 break;
5352 continue;
5353 case OMP_CLAUSE__LOOPTEMP_:
5354 if (ignored_looptemp)
5356 ignored_looptemp--;
5357 continue;
5359 break;
5360 default:
5361 continue;
5364 val = OMP_CLAUSE_DECL (c);
5365 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5366 && TREE_CODE (val) == MEM_REF)
5368 val = TREE_OPERAND (val, 0);
5369 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5370 val = TREE_OPERAND (val, 0);
5371 if (TREE_CODE (val) == INDIRECT_REF
5372 || TREE_CODE (val) == ADDR_EXPR)
5373 val = TREE_OPERAND (val, 0);
5374 if (is_variable_sized (val))
5375 continue;
5378 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5379 outer taskloop region. */
5380 omp_context *ctx_for_o = ctx;
5381 if (is_taskloop
5382 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5383 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5384 ctx_for_o = ctx->outer;
5386 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5388 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5389 && is_global_var (var))
5390 continue;
5392 t = omp_member_access_dummy_var (var);
5393 if (t)
5395 var = DECL_VALUE_EXPR (var);
5396 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5397 if (o != t)
5398 var = unshare_and_remap (var, t, o);
5399 else
5400 var = unshare_expr (var);
5403 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5405 /* Handle taskloop firstprivate/lastprivate, where the
5406 lastprivate on GIMPLE_OMP_TASK is represented as
5407 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5408 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5409 x = omp_build_component_ref (ctx->sender_decl, f);
5410 if (use_pointer_for_field (val, ctx))
5411 var = build_fold_addr_expr (var);
5412 gimplify_assign (x, var, ilist);
5413 DECL_ABSTRACT_ORIGIN (f) = NULL;
5414 continue;
5417 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5418 || val == OMP_CLAUSE_DECL (c))
5419 && is_variable_sized (val))
5420 continue;
5421 by_ref = use_pointer_for_field (val, NULL);
5423 switch (OMP_CLAUSE_CODE (c))
5425 case OMP_CLAUSE_FIRSTPRIVATE:
5426 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5427 && !by_ref
5428 && is_task_ctx (ctx))
5429 TREE_NO_WARNING (var) = 1;
5430 do_in = true;
5431 break;
5433 case OMP_CLAUSE_PRIVATE:
5434 case OMP_CLAUSE_COPYIN:
5435 case OMP_CLAUSE__LOOPTEMP_:
5436 do_in = true;
5437 break;
5439 case OMP_CLAUSE_LASTPRIVATE:
5440 if (by_ref || omp_is_reference (val))
5442 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5443 continue;
5444 do_in = true;
5446 else
5448 do_out = true;
5449 if (lang_hooks.decls.omp_private_outer_ref (val))
5450 do_in = true;
5452 break;
5454 case OMP_CLAUSE_REDUCTION:
5455 do_in = true;
5456 if (val == OMP_CLAUSE_DECL (c))
5457 do_out = !(by_ref || omp_is_reference (val));
5458 else
5459 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5460 break;
5462 default:
5463 gcc_unreachable ();
5466 if (do_in)
5468 ref = build_sender_ref (val, ctx);
5469 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5470 gimplify_assign (ref, x, ilist);
5471 if (is_task_ctx (ctx))
5472 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5475 if (do_out)
5477 ref = build_sender_ref (val, ctx);
5478 gimplify_assign (var, ref, olist);
5483 /* Generate code to implement SHARED from the sender (aka parent)
5484 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5485 list things that got automatically shared. */
5487 static void
5488 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5490 tree var, ovar, nvar, t, f, x, record_type;
5492 if (ctx->record_type == NULL)
5493 return;
5495 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5496 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5498 ovar = DECL_ABSTRACT_ORIGIN (f);
5499 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5500 continue;
5502 nvar = maybe_lookup_decl (ovar, ctx);
5503 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5504 continue;
5506 /* If CTX is a nested parallel directive. Find the immediately
5507 enclosing parallel or workshare construct that contains a
5508 mapping for OVAR. */
5509 var = lookup_decl_in_outer_ctx (ovar, ctx);
5511 t = omp_member_access_dummy_var (var);
5512 if (t)
5514 var = DECL_VALUE_EXPR (var);
5515 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5516 if (o != t)
5517 var = unshare_and_remap (var, t, o);
5518 else
5519 var = unshare_expr (var);
5522 if (use_pointer_for_field (ovar, ctx))
5524 x = build_sender_ref (ovar, ctx);
5525 var = build_fold_addr_expr (var);
5526 gimplify_assign (x, var, ilist);
5528 else
5530 x = build_sender_ref (ovar, ctx);
5531 gimplify_assign (x, var, ilist);
5533 if (!TREE_READONLY (var)
5534 /* We don't need to receive a new reference to a result
5535 or parm decl. In fact we may not store to it as we will
5536 invalidate any pending RSO and generate wrong gimple
5537 during inlining. */
5538 && !((TREE_CODE (var) == RESULT_DECL
5539 || TREE_CODE (var) == PARM_DECL)
5540 && DECL_BY_REFERENCE (var)))
5542 x = build_sender_ref (ovar, ctx);
5543 gimplify_assign (var, x, olist);
5549 /* Emit an OpenACC head marker call, encapulating the partitioning and
5550 other information that must be processed by the target compiler.
5551 Return the maximum number of dimensions the associated loop might
5552 be partitioned over. */
5554 static unsigned
5555 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5556 gimple_seq *seq, omp_context *ctx)
5558 unsigned levels = 0;
5559 unsigned tag = 0;
5560 tree gang_static = NULL_TREE;
5561 auto_vec<tree, 5> args;
5563 args.quick_push (build_int_cst
5564 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5565 args.quick_push (ddvar);
5566 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5568 switch (OMP_CLAUSE_CODE (c))
5570 case OMP_CLAUSE_GANG:
5571 tag |= OLF_DIM_GANG;
5572 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5573 /* static:* is represented by -1, and we can ignore it, as
5574 scheduling is always static. */
5575 if (gang_static && integer_minus_onep (gang_static))
5576 gang_static = NULL_TREE;
5577 levels++;
5578 break;
5580 case OMP_CLAUSE_WORKER:
5581 tag |= OLF_DIM_WORKER;
5582 levels++;
5583 break;
5585 case OMP_CLAUSE_VECTOR:
5586 tag |= OLF_DIM_VECTOR;
5587 levels++;
5588 break;
5590 case OMP_CLAUSE_SEQ:
5591 tag |= OLF_SEQ;
5592 break;
5594 case OMP_CLAUSE_AUTO:
5595 tag |= OLF_AUTO;
5596 break;
5598 case OMP_CLAUSE_INDEPENDENT:
5599 tag |= OLF_INDEPENDENT;
5600 break;
5602 default:
5603 continue;
5607 if (gang_static)
5609 if (DECL_P (gang_static))
5610 gang_static = build_outer_var_ref (gang_static, ctx);
5611 tag |= OLF_GANG_STATIC;
5614 /* In a parallel region, loops are implicitly INDEPENDENT. */
5615 omp_context *tgt = enclosing_target_ctx (ctx);
5616 if (!tgt || is_oacc_parallel (tgt))
5617 tag |= OLF_INDEPENDENT;
5619 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR is implicitly AUTO. */
5620 if (!(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1) << OLF_DIM_BASE)
5621 | OLF_SEQ)))
5622 tag |= OLF_AUTO;
5624 /* Ensure at least one level. */
5625 if (!levels)
5626 levels++;
5628 args.quick_push (build_int_cst (integer_type_node, levels));
5629 args.quick_push (build_int_cst (integer_type_node, tag));
5630 if (gang_static)
5631 args.quick_push (gang_static);
5633 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5634 gimple_set_location (call, loc);
5635 gimple_set_lhs (call, ddvar);
5636 gimple_seq_add_stmt (seq, call);
5638 return levels;
5641 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5642 partitioning level of the enclosed region. */
5644 static void
5645 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5646 tree tofollow, gimple_seq *seq)
5648 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5649 : IFN_UNIQUE_OACC_TAIL_MARK);
5650 tree marker = build_int_cst (integer_type_node, marker_kind);
5651 int nargs = 2 + (tofollow != NULL_TREE);
5652 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5653 marker, ddvar, tofollow);
5654 gimple_set_location (call, loc);
5655 gimple_set_lhs (call, ddvar);
5656 gimple_seq_add_stmt (seq, call);
5659 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5660 the loop clauses, from which we extract reductions. Initialize
5661 HEAD and TAIL. */
5663 static void
5664 lower_oacc_head_tail (location_t loc, tree clauses,
5665 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5667 bool inner = false;
5668 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5669 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5671 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5672 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5673 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5675 gcc_assert (count);
5676 for (unsigned done = 1; count; count--, done++)
5678 gimple_seq fork_seq = NULL;
5679 gimple_seq join_seq = NULL;
5681 tree place = build_int_cst (integer_type_node, -1);
5682 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5683 fork_kind, ddvar, place);
5684 gimple_set_location (fork, loc);
5685 gimple_set_lhs (fork, ddvar);
5687 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5688 join_kind, ddvar, place);
5689 gimple_set_location (join, loc);
5690 gimple_set_lhs (join, ddvar);
5692 /* Mark the beginning of this level sequence. */
5693 if (inner)
5694 lower_oacc_loop_marker (loc, ddvar, true,
5695 build_int_cst (integer_type_node, count),
5696 &fork_seq);
5697 lower_oacc_loop_marker (loc, ddvar, false,
5698 build_int_cst (integer_type_node, done),
5699 &join_seq);
5701 lower_oacc_reductions (loc, clauses, place, inner,
5702 fork, join, &fork_seq, &join_seq, ctx);
5704 /* Append this level to head. */
5705 gimple_seq_add_seq (head, fork_seq);
5706 /* Prepend it to tail. */
5707 gimple_seq_add_seq (&join_seq, *tail);
5708 *tail = join_seq;
5710 inner = true;
5713 /* Mark the end of the sequence. */
5714 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5715 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5718 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5719 catch handler and return it. This prevents programs from violating the
5720 structured block semantics with throws. */
5722 static gimple_seq
5723 maybe_catch_exception (gimple_seq body)
5725 gimple *g;
5726 tree decl;
5728 if (!flag_exceptions)
5729 return body;
5731 if (lang_hooks.eh_protect_cleanup_actions != NULL)
5732 decl = lang_hooks.eh_protect_cleanup_actions ();
5733 else
5734 decl = builtin_decl_explicit (BUILT_IN_TRAP);
5736 g = gimple_build_eh_must_not_throw (decl);
5737 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5738 GIMPLE_TRY_CATCH);
5740 return gimple_seq_alloc_with_stmt (g);
5744 /* Routines to lower OMP directives into OMP-GIMPLE. */
5746 /* If ctx is a worksharing context inside of a cancellable parallel
5747 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5748 and conditional branch to parallel's cancel_label to handle
5749 cancellation in the implicit barrier. */
5751 static void
5752 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5754 gimple *omp_return = gimple_seq_last_stmt (*body);
5755 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5756 if (gimple_omp_return_nowait_p (omp_return))
5757 return;
5758 if (ctx->outer
5759 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5760 && ctx->outer->cancellable)
5762 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5763 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5764 tree lhs = create_tmp_var (c_bool_type);
5765 gimple_omp_return_set_lhs (omp_return, lhs);
5766 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5767 gimple *g = gimple_build_cond (NE_EXPR, lhs,
5768 fold_convert (c_bool_type,
5769 boolean_false_node),
5770 ctx->outer->cancel_label, fallthru_label);
5771 gimple_seq_add_stmt (body, g);
5772 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5776 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5777 CTX is the enclosing OMP context for the current statement. */
5779 static void
5780 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5782 tree block, control;
5783 gimple_stmt_iterator tgsi;
5784 gomp_sections *stmt;
5785 gimple *t;
5786 gbind *new_stmt, *bind;
5787 gimple_seq ilist, dlist, olist, new_body;
5789 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5791 push_gimplify_context ();
5793 dlist = NULL;
5794 ilist = NULL;
5795 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5796 &ilist, &dlist, ctx, NULL);
5798 new_body = gimple_omp_body (stmt);
5799 gimple_omp_set_body (stmt, NULL);
5800 tgsi = gsi_start (new_body);
5801 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5803 omp_context *sctx;
5804 gimple *sec_start;
5806 sec_start = gsi_stmt (tgsi);
5807 sctx = maybe_lookup_ctx (sec_start);
5808 gcc_assert (sctx);
5810 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5811 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5812 GSI_CONTINUE_LINKING);
5813 gimple_omp_set_body (sec_start, NULL);
5815 if (gsi_one_before_end_p (tgsi))
5817 gimple_seq l = NULL;
5818 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5819 &l, ctx);
5820 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5821 gimple_omp_section_set_last (sec_start);
5824 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5825 GSI_CONTINUE_LINKING);
5828 block = make_node (BLOCK);
5829 bind = gimple_build_bind (NULL, new_body, block);
5831 olist = NULL;
5832 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
5834 block = make_node (BLOCK);
5835 new_stmt = gimple_build_bind (NULL, NULL, block);
5836 gsi_replace (gsi_p, new_stmt, true);
5838 pop_gimplify_context (new_stmt);
5839 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5840 BLOCK_VARS (block) = gimple_bind_vars (bind);
5841 if (BLOCK_VARS (block))
5842 TREE_USED (block) = 1;
5844 new_body = NULL;
5845 gimple_seq_add_seq (&new_body, ilist);
5846 gimple_seq_add_stmt (&new_body, stmt);
5847 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5848 gimple_seq_add_stmt (&new_body, bind);
5850 control = create_tmp_var (unsigned_type_node, ".section");
5851 t = gimple_build_omp_continue (control, control);
5852 gimple_omp_sections_set_control (stmt, control);
5853 gimple_seq_add_stmt (&new_body, t);
5855 gimple_seq_add_seq (&new_body, olist);
5856 if (ctx->cancellable)
5857 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5858 gimple_seq_add_seq (&new_body, dlist);
5860 new_body = maybe_catch_exception (new_body);
5862 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5863 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5864 t = gimple_build_omp_return (nowait);
5865 gimple_seq_add_stmt (&new_body, t);
5866 maybe_add_implicit_barrier_cancel (ctx, &new_body);
5868 gimple_bind_set_body (new_stmt, new_body);
5872 /* A subroutine of lower_omp_single. Expand the simple form of
5873 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5875 if (GOMP_single_start ())
5876 BODY;
5877 [ GOMP_barrier (); ] -> unless 'nowait' is present.
5879 FIXME. It may be better to delay expanding the logic of this until
5880 pass_expand_omp. The expanded logic may make the job more difficult
5881 to a synchronization analysis pass. */
5883 static void
5884 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
5886 location_t loc = gimple_location (single_stmt);
5887 tree tlabel = create_artificial_label (loc);
5888 tree flabel = create_artificial_label (loc);
5889 gimple *call, *cond;
5890 tree lhs, decl;
5892 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
5893 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
5894 call = gimple_build_call (decl, 0);
5895 gimple_call_set_lhs (call, lhs);
5896 gimple_seq_add_stmt (pre_p, call);
5898 cond = gimple_build_cond (EQ_EXPR, lhs,
5899 fold_convert_loc (loc, TREE_TYPE (lhs),
5900 boolean_true_node),
5901 tlabel, flabel);
5902 gimple_seq_add_stmt (pre_p, cond);
5903 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
5904 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
5905 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
5909 /* A subroutine of lower_omp_single. Expand the simple form of
5910 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
5912 #pragma omp single copyprivate (a, b, c)
5914 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
5917 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
5919 BODY;
5920 copyout.a = a;
5921 copyout.b = b;
5922 copyout.c = c;
5923 GOMP_single_copy_end (&copyout);
5925 else
5927 a = copyout_p->a;
5928 b = copyout_p->b;
5929 c = copyout_p->c;
5931 GOMP_barrier ();
5934 FIXME. It may be better to delay expanding the logic of this until
5935 pass_expand_omp. The expanded logic may make the job more difficult
5936 to a synchronization analysis pass. */
5938 static void
5939 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
5940 omp_context *ctx)
5942 tree ptr_type, t, l0, l1, l2, bfn_decl;
5943 gimple_seq copyin_seq;
5944 location_t loc = gimple_location (single_stmt);
5946 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
5948 ptr_type = build_pointer_type (ctx->record_type);
5949 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
5951 l0 = create_artificial_label (loc);
5952 l1 = create_artificial_label (loc);
5953 l2 = create_artificial_label (loc);
5955 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
5956 t = build_call_expr_loc (loc, bfn_decl, 0);
5957 t = fold_convert_loc (loc, ptr_type, t);
5958 gimplify_assign (ctx->receiver_decl, t, pre_p);
5960 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
5961 build_int_cst (ptr_type, 0));
5962 t = build3 (COND_EXPR, void_type_node, t,
5963 build_and_jump (&l0), build_and_jump (&l1));
5964 gimplify_and_add (t, pre_p);
5966 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
5968 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
5970 copyin_seq = NULL;
5971 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
5972 &copyin_seq, ctx);
5974 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
5975 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
5976 t = build_call_expr_loc (loc, bfn_decl, 1, t);
5977 gimplify_and_add (t, pre_p);
5979 t = build_and_jump (&l2);
5980 gimplify_and_add (t, pre_p);
5982 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
5984 gimple_seq_add_seq (pre_p, copyin_seq);
5986 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
5990 /* Expand code for an OpenMP single directive. */
5992 static void
5993 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5995 tree block;
5996 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
5997 gbind *bind;
5998 gimple_seq bind_body, bind_body_tail = NULL, dlist;
6000 push_gimplify_context ();
6002 block = make_node (BLOCK);
6003 bind = gimple_build_bind (NULL, NULL, block);
6004 gsi_replace (gsi_p, bind, true);
6005 bind_body = NULL;
6006 dlist = NULL;
6007 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6008 &bind_body, &dlist, ctx, NULL);
6009 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6011 gimple_seq_add_stmt (&bind_body, single_stmt);
6013 if (ctx->record_type)
6014 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6015 else
6016 lower_omp_single_simple (single_stmt, &bind_body);
6018 gimple_omp_set_body (single_stmt, NULL);
6020 gimple_seq_add_seq (&bind_body, dlist);
6022 bind_body = maybe_catch_exception (bind_body);
6024 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6025 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6026 gimple *g = gimple_build_omp_return (nowait);
6027 gimple_seq_add_stmt (&bind_body_tail, g);
6028 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6029 if (ctx->record_type)
6031 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6032 tree clobber = build_constructor (ctx->record_type, NULL);
6033 TREE_THIS_VOLATILE (clobber) = 1;
6034 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6035 clobber), GSI_SAME_STMT);
6037 gimple_seq_add_seq (&bind_body, bind_body_tail);
6038 gimple_bind_set_body (bind, bind_body);
6040 pop_gimplify_context (bind);
6042 gimple_bind_append_vars (bind, ctx->block_vars);
6043 BLOCK_VARS (block) = ctx->block_vars;
6044 if (BLOCK_VARS (block))
6045 TREE_USED (block) = 1;
6049 /* Expand code for an OpenMP master directive. */
6051 static void
6052 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6054 tree block, lab = NULL, x, bfn_decl;
6055 gimple *stmt = gsi_stmt (*gsi_p);
6056 gbind *bind;
6057 location_t loc = gimple_location (stmt);
6058 gimple_seq tseq;
6060 push_gimplify_context ();
6062 block = make_node (BLOCK);
6063 bind = gimple_build_bind (NULL, NULL, block);
6064 gsi_replace (gsi_p, bind, true);
6065 gimple_bind_add_stmt (bind, stmt);
6067 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6068 x = build_call_expr_loc (loc, bfn_decl, 0);
6069 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6070 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6071 tseq = NULL;
6072 gimplify_and_add (x, &tseq);
6073 gimple_bind_add_seq (bind, tseq);
6075 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6076 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6077 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6078 gimple_omp_set_body (stmt, NULL);
6080 gimple_bind_add_stmt (bind, gimple_build_label (lab));
6082 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6084 pop_gimplify_context (bind);
6086 gimple_bind_append_vars (bind, ctx->block_vars);
6087 BLOCK_VARS (block) = ctx->block_vars;
6091 /* Expand code for an OpenMP taskgroup directive. */
6093 static void
6094 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6096 gimple *stmt = gsi_stmt (*gsi_p);
6097 gcall *x;
6098 gbind *bind;
6099 tree block = make_node (BLOCK);
6101 bind = gimple_build_bind (NULL, NULL, block);
6102 gsi_replace (gsi_p, bind, true);
6103 gimple_bind_add_stmt (bind, stmt);
6105 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6107 gimple_bind_add_stmt (bind, x);
6109 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6110 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6111 gimple_omp_set_body (stmt, NULL);
6113 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6115 gimple_bind_append_vars (bind, ctx->block_vars);
6116 BLOCK_VARS (block) = ctx->block_vars;
6120 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6122 static void
6123 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6124 omp_context *ctx)
6126 struct omp_for_data fd;
6127 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6128 return;
6130 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6131 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6132 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6133 if (!fd.ordered)
6134 return;
6136 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6137 tree c = gimple_omp_ordered_clauses (ord_stmt);
6138 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6139 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6141 /* Merge depend clauses from multiple adjacent
6142 #pragma omp ordered depend(sink:...) constructs
6143 into one #pragma omp ordered depend(sink:...), so that
6144 we can optimize them together. */
6145 gimple_stmt_iterator gsi = *gsi_p;
6146 gsi_next (&gsi);
6147 while (!gsi_end_p (gsi))
6149 gimple *stmt = gsi_stmt (gsi);
6150 if (is_gimple_debug (stmt)
6151 || gimple_code (stmt) == GIMPLE_NOP)
6153 gsi_next (&gsi);
6154 continue;
6156 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6157 break;
6158 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6159 c = gimple_omp_ordered_clauses (ord_stmt2);
6160 if (c == NULL_TREE
6161 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6162 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6163 break;
6164 while (*list_p)
6165 list_p = &OMP_CLAUSE_CHAIN (*list_p);
6166 *list_p = c;
6167 gsi_remove (&gsi, true);
6171 /* Canonicalize sink dependence clauses into one folded clause if
6172 possible.
6174 The basic algorithm is to create a sink vector whose first
6175 element is the GCD of all the first elements, and whose remaining
6176 elements are the minimum of the subsequent columns.
6178 We ignore dependence vectors whose first element is zero because
6179 such dependencies are known to be executed by the same thread.
6181 We take into account the direction of the loop, so a minimum
6182 becomes a maximum if the loop is iterating forwards. We also
6183 ignore sink clauses where the loop direction is unknown, or where
6184 the offsets are clearly invalid because they are not a multiple
6185 of the loop increment.
6187 For example:
6189 #pragma omp for ordered(2)
6190 for (i=0; i < N; ++i)
6191 for (j=0; j < M; ++j)
6193 #pragma omp ordered \
6194 depend(sink:i-8,j-2) \
6195 depend(sink:i,j-1) \ // Completely ignored because i+0.
6196 depend(sink:i-4,j-3) \
6197 depend(sink:i-6,j-4)
6198 #pragma omp ordered depend(source)
6201 Folded clause is:
6203 depend(sink:-gcd(8,4,6),-min(2,3,4))
6204 -or-
6205 depend(sink:-2,-2)
6208 /* FIXME: Computing GCD's where the first element is zero is
6209 non-trivial in the presence of collapsed loops. Do this later. */
6210 if (fd.collapse > 1)
6211 return;
6213 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6214 memset (folded_deps, 0, sizeof (*folded_deps) * (2 * len - 1));
6215 tree folded_dep = NULL_TREE;
6216 /* TRUE if the first dimension's offset is negative. */
6217 bool neg_offset_p = false;
6219 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6220 unsigned int i;
6221 while ((c = *list_p) != NULL)
6223 bool remove = false;
6225 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6226 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6227 goto next_ordered_clause;
6229 tree vec;
6230 for (vec = OMP_CLAUSE_DECL (c), i = 0;
6231 vec && TREE_CODE (vec) == TREE_LIST;
6232 vec = TREE_CHAIN (vec), ++i)
6234 gcc_assert (i < len);
6236 /* omp_extract_for_data has canonicalized the condition. */
6237 gcc_assert (fd.loops[i].cond_code == LT_EXPR
6238 || fd.loops[i].cond_code == GT_EXPR);
6239 bool forward = fd.loops[i].cond_code == LT_EXPR;
6240 bool maybe_lexically_later = true;
6242 /* While the committee makes up its mind, bail if we have any
6243 non-constant steps. */
6244 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6245 goto lower_omp_ordered_ret;
6247 tree itype = TREE_TYPE (TREE_VALUE (vec));
6248 if (POINTER_TYPE_P (itype))
6249 itype = sizetype;
6250 wide_int offset = wide_int::from (TREE_PURPOSE (vec),
6251 TYPE_PRECISION (itype),
6252 TYPE_SIGN (itype));
6254 /* Ignore invalid offsets that are not multiples of the step. */
6255 if (!wi::multiple_of_p
6256 (wi::abs (offset), wi::abs ((wide_int) fd.loops[i].step),
6257 UNSIGNED))
6259 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6260 "ignoring sink clause with offset that is not "
6261 "a multiple of the loop step");
6262 remove = true;
6263 goto next_ordered_clause;
6266 /* Calculate the first dimension. The first dimension of
6267 the folded dependency vector is the GCD of the first
6268 elements, while ignoring any first elements whose offset
6269 is 0. */
6270 if (i == 0)
6272 /* Ignore dependence vectors whose first dimension is 0. */
6273 if (offset == 0)
6275 remove = true;
6276 goto next_ordered_clause;
6278 else
6280 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6282 error_at (OMP_CLAUSE_LOCATION (c),
6283 "first offset must be in opposite direction "
6284 "of loop iterations");
6285 goto lower_omp_ordered_ret;
6287 if (forward)
6288 offset = -offset;
6289 neg_offset_p = forward;
6290 /* Initialize the first time around. */
6291 if (folded_dep == NULL_TREE)
6293 folded_dep = c;
6294 folded_deps[0] = offset;
6296 else
6297 folded_deps[0] = wi::gcd (folded_deps[0],
6298 offset, UNSIGNED);
6301 /* Calculate minimum for the remaining dimensions. */
6302 else
6304 folded_deps[len + i - 1] = offset;
6305 if (folded_dep == c)
6306 folded_deps[i] = offset;
6307 else if (maybe_lexically_later
6308 && !wi::eq_p (folded_deps[i], offset))
6310 if (forward ^ wi::gts_p (folded_deps[i], offset))
6312 unsigned int j;
6313 folded_dep = c;
6314 for (j = 1; j <= i; j++)
6315 folded_deps[j] = folded_deps[len + j - 1];
6317 else
6318 maybe_lexically_later = false;
6322 gcc_assert (i == len);
6324 remove = true;
6326 next_ordered_clause:
6327 if (remove)
6328 *list_p = OMP_CLAUSE_CHAIN (c);
6329 else
6330 list_p = &OMP_CLAUSE_CHAIN (c);
6333 if (folded_dep)
6335 if (neg_offset_p)
6336 folded_deps[0] = -folded_deps[0];
6338 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6339 if (POINTER_TYPE_P (itype))
6340 itype = sizetype;
6342 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6343 = wide_int_to_tree (itype, folded_deps[0]);
6344 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6345 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6348 lower_omp_ordered_ret:
6350 /* Ordered without clauses is #pragma omp threads, while we want
6351 a nop instead if we remove all clauses. */
6352 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6353 gsi_replace (gsi_p, gimple_build_nop (), true);
6357 /* Expand code for an OpenMP ordered directive. */
6359 static void
6360 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6362 tree block;
6363 gimple *stmt = gsi_stmt (*gsi_p), *g;
6364 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6365 gcall *x;
6366 gbind *bind;
6367 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6368 OMP_CLAUSE_SIMD);
6369 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6370 loop. */
6371 bool maybe_simt
6372 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6373 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6374 OMP_CLAUSE_THREADS);
6376 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6377 OMP_CLAUSE_DEPEND))
6379 /* FIXME: This is needs to be moved to the expansion to verify various
6380 conditions only testable on cfg with dominators computed, and also
6381 all the depend clauses to be merged still might need to be available
6382 for the runtime checks. */
6383 if (0)
6384 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6385 return;
6388 push_gimplify_context ();
6390 block = make_node (BLOCK);
6391 bind = gimple_build_bind (NULL, NULL, block);
6392 gsi_replace (gsi_p, bind, true);
6393 gimple_bind_add_stmt (bind, stmt);
6395 if (simd)
6397 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6398 build_int_cst (NULL_TREE, threads));
6399 cfun->has_simduid_loops = true;
6401 else
6402 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6404 gimple_bind_add_stmt (bind, x);
6406 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6407 if (maybe_simt)
6409 counter = create_tmp_var (integer_type_node);
6410 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6411 gimple_call_set_lhs (g, counter);
6412 gimple_bind_add_stmt (bind, g);
6414 body = create_artificial_label (UNKNOWN_LOCATION);
6415 test = create_artificial_label (UNKNOWN_LOCATION);
6416 gimple_bind_add_stmt (bind, gimple_build_label (body));
6418 tree simt_pred = create_tmp_var (integer_type_node);
6419 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6420 gimple_call_set_lhs (g, simt_pred);
6421 gimple_bind_add_stmt (bind, g);
6423 tree t = create_artificial_label (UNKNOWN_LOCATION);
6424 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6425 gimple_bind_add_stmt (bind, g);
6427 gimple_bind_add_stmt (bind, gimple_build_label (t));
6429 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6430 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6431 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6432 gimple_omp_set_body (stmt, NULL);
6434 if (maybe_simt)
6436 gimple_bind_add_stmt (bind, gimple_build_label (test));
6437 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6438 gimple_bind_add_stmt (bind, g);
6440 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6441 tree nonneg = create_tmp_var (integer_type_node);
6442 gimple_seq tseq = NULL;
6443 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6444 gimple_bind_add_seq (bind, tseq);
6446 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6447 gimple_call_set_lhs (g, nonneg);
6448 gimple_bind_add_stmt (bind, g);
6450 tree end = create_artificial_label (UNKNOWN_LOCATION);
6451 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6452 gimple_bind_add_stmt (bind, g);
6454 gimple_bind_add_stmt (bind, gimple_build_label (end));
6456 if (simd)
6457 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6458 build_int_cst (NULL_TREE, threads));
6459 else
6460 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6462 gimple_bind_add_stmt (bind, x);
6464 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6466 pop_gimplify_context (bind);
6468 gimple_bind_append_vars (bind, ctx->block_vars);
6469 BLOCK_VARS (block) = gimple_bind_vars (bind);
6473 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6474 substitution of a couple of function calls. But in the NAMED case,
6475 requires that languages coordinate a symbol name. It is therefore
6476 best put here in common code. */
6478 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6480 static void
6481 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6483 tree block;
6484 tree name, lock, unlock;
6485 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6486 gbind *bind;
6487 location_t loc = gimple_location (stmt);
6488 gimple_seq tbody;
6490 name = gimple_omp_critical_name (stmt);
6491 if (name)
6493 tree decl;
6495 if (!critical_name_mutexes)
6496 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6498 tree *n = critical_name_mutexes->get (name);
6499 if (n == NULL)
6501 char *new_str;
6503 decl = create_tmp_var_raw (ptr_type_node);
6505 new_str = ACONCAT ((".gomp_critical_user_",
6506 IDENTIFIER_POINTER (name), NULL));
6507 DECL_NAME (decl) = get_identifier (new_str);
6508 TREE_PUBLIC (decl) = 1;
6509 TREE_STATIC (decl) = 1;
6510 DECL_COMMON (decl) = 1;
6511 DECL_ARTIFICIAL (decl) = 1;
6512 DECL_IGNORED_P (decl) = 1;
6514 varpool_node::finalize_decl (decl);
6516 critical_name_mutexes->put (name, decl);
6518 else
6519 decl = *n;
6521 /* If '#pragma omp critical' is inside offloaded region or
6522 inside function marked as offloadable, the symbol must be
6523 marked as offloadable too. */
6524 omp_context *octx;
6525 if (cgraph_node::get (current_function_decl)->offloadable)
6526 varpool_node::get_create (decl)->offloadable = 1;
6527 else
6528 for (octx = ctx->outer; octx; octx = octx->outer)
6529 if (is_gimple_omp_offloaded (octx->stmt))
6531 varpool_node::get_create (decl)->offloadable = 1;
6532 break;
6535 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6536 lock = build_call_expr_loc (loc, lock, 1,
6537 build_fold_addr_expr_loc (loc, decl));
6539 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6540 unlock = build_call_expr_loc (loc, unlock, 1,
6541 build_fold_addr_expr_loc (loc, decl));
6543 else
6545 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6546 lock = build_call_expr_loc (loc, lock, 0);
6548 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6549 unlock = build_call_expr_loc (loc, unlock, 0);
6552 push_gimplify_context ();
6554 block = make_node (BLOCK);
6555 bind = gimple_build_bind (NULL, NULL, block);
6556 gsi_replace (gsi_p, bind, true);
6557 gimple_bind_add_stmt (bind, stmt);
6559 tbody = gimple_bind_body (bind);
6560 gimplify_and_add (lock, &tbody);
6561 gimple_bind_set_body (bind, tbody);
6563 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6564 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6565 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6566 gimple_omp_set_body (stmt, NULL);
6568 tbody = gimple_bind_body (bind);
6569 gimplify_and_add (unlock, &tbody);
6570 gimple_bind_set_body (bind, tbody);
6572 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6574 pop_gimplify_context (bind);
6575 gimple_bind_append_vars (bind, ctx->block_vars);
6576 BLOCK_VARS (block) = gimple_bind_vars (bind);
6579 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6580 for a lastprivate clause. Given a loop control predicate of (V
6581 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6582 is appended to *DLIST, iterator initialization is appended to
6583 *BODY_P. */
6585 static void
6586 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6587 gimple_seq *dlist, struct omp_context *ctx)
6589 tree clauses, cond, vinit;
6590 enum tree_code cond_code;
6591 gimple_seq stmts;
6593 cond_code = fd->loop.cond_code;
6594 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6596 /* When possible, use a strict equality expression. This can let VRP
6597 type optimizations deduce the value and remove a copy. */
6598 if (tree_fits_shwi_p (fd->loop.step))
6600 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6601 if (step == 1 || step == -1)
6602 cond_code = EQ_EXPR;
6605 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6606 || gimple_omp_for_grid_phony (fd->for_stmt))
6607 cond = omp_grid_lastprivate_predicate (fd);
6608 else
6610 tree n2 = fd->loop.n2;
6611 if (fd->collapse > 1
6612 && TREE_CODE (n2) != INTEGER_CST
6613 && gimple_omp_for_combined_into_p (fd->for_stmt))
6615 struct omp_context *taskreg_ctx = NULL;
6616 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6618 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6619 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6620 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6622 if (gimple_omp_for_combined_into_p (gfor))
6624 gcc_assert (ctx->outer->outer
6625 && is_parallel_ctx (ctx->outer->outer));
6626 taskreg_ctx = ctx->outer->outer;
6628 else
6630 struct omp_for_data outer_fd;
6631 omp_extract_for_data (gfor, &outer_fd, NULL);
6632 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6635 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6636 taskreg_ctx = ctx->outer->outer;
6638 else if (is_taskreg_ctx (ctx->outer))
6639 taskreg_ctx = ctx->outer;
6640 if (taskreg_ctx)
6642 int i;
6643 tree taskreg_clauses
6644 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6645 tree innerc = omp_find_clause (taskreg_clauses,
6646 OMP_CLAUSE__LOOPTEMP_);
6647 gcc_assert (innerc);
6648 for (i = 0; i < fd->collapse; i++)
6650 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6651 OMP_CLAUSE__LOOPTEMP_);
6652 gcc_assert (innerc);
6654 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6655 OMP_CLAUSE__LOOPTEMP_);
6656 if (innerc)
6657 n2 = fold_convert (TREE_TYPE (n2),
6658 lookup_decl (OMP_CLAUSE_DECL (innerc),
6659 taskreg_ctx));
6662 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6665 clauses = gimple_omp_for_clauses (fd->for_stmt);
6666 stmts = NULL;
6667 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6668 if (!gimple_seq_empty_p (stmts))
6670 gimple_seq_add_seq (&stmts, *dlist);
6671 *dlist = stmts;
6673 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6674 vinit = fd->loop.n1;
6675 if (cond_code == EQ_EXPR
6676 && tree_fits_shwi_p (fd->loop.n2)
6677 && ! integer_zerop (fd->loop.n2))
6678 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6679 else
6680 vinit = unshare_expr (vinit);
6682 /* Initialize the iterator variable, so that threads that don't execute
6683 any iterations don't execute the lastprivate clauses by accident. */
6684 gimplify_assign (fd->loop.v, vinit, body_p);
6689 /* Lower code for an OMP loop directive. */
6691 static void
6692 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6694 tree *rhs_p, block;
6695 struct omp_for_data fd, *fdp = NULL;
6696 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6697 gbind *new_stmt;
6698 gimple_seq omp_for_body, body, dlist;
6699 gimple_seq oacc_head = NULL, oacc_tail = NULL;
6700 size_t i;
6702 push_gimplify_context ();
6704 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6706 block = make_node (BLOCK);
6707 new_stmt = gimple_build_bind (NULL, NULL, block);
6708 /* Replace at gsi right away, so that 'stmt' is no member
6709 of a sequence anymore as we're going to add to a different
6710 one below. */
6711 gsi_replace (gsi_p, new_stmt, true);
6713 /* Move declaration of temporaries in the loop body before we make
6714 it go away. */
6715 omp_for_body = gimple_omp_body (stmt);
6716 if (!gimple_seq_empty_p (omp_for_body)
6717 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6719 gbind *inner_bind
6720 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6721 tree vars = gimple_bind_vars (inner_bind);
6722 gimple_bind_append_vars (new_stmt, vars);
6723 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6724 keep them on the inner_bind and it's block. */
6725 gimple_bind_set_vars (inner_bind, NULL_TREE);
6726 if (gimple_bind_block (inner_bind))
6727 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6730 if (gimple_omp_for_combined_into_p (stmt))
6732 omp_extract_for_data (stmt, &fd, NULL);
6733 fdp = &fd;
6735 /* We need two temporaries with fd.loop.v type (istart/iend)
6736 and then (fd.collapse - 1) temporaries with the same
6737 type for count2 ... countN-1 vars if not constant. */
6738 size_t count = 2;
6739 tree type = fd.iter_type;
6740 if (fd.collapse > 1
6741 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6742 count += fd.collapse - 1;
6743 bool taskreg_for
6744 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6745 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6746 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6747 tree clauses = *pc;
6748 if (taskreg_for)
6749 outerc
6750 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6751 OMP_CLAUSE__LOOPTEMP_);
6752 for (i = 0; i < count; i++)
6754 tree temp;
6755 if (taskreg_for)
6757 gcc_assert (outerc);
6758 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6759 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6760 OMP_CLAUSE__LOOPTEMP_);
6762 else
6764 temp = create_tmp_var (type);
6765 insert_decl_map (&ctx->outer->cb, temp, temp);
6767 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6768 OMP_CLAUSE_DECL (*pc) = temp;
6769 pc = &OMP_CLAUSE_CHAIN (*pc);
6771 *pc = clauses;
6774 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6775 dlist = NULL;
6776 body = NULL;
6777 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6778 fdp);
6779 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6781 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6783 /* Lower the header expressions. At this point, we can assume that
6784 the header is of the form:
6786 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6788 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6789 using the .omp_data_s mapping, if needed. */
6790 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6792 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6793 if (!is_gimple_min_invariant (*rhs_p))
6794 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6796 rhs_p = gimple_omp_for_final_ptr (stmt, i);
6797 if (!is_gimple_min_invariant (*rhs_p))
6798 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6800 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6801 if (!is_gimple_min_invariant (*rhs_p))
6802 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6805 /* Once lowered, extract the bounds and clauses. */
6806 omp_extract_for_data (stmt, &fd, NULL);
6808 if (is_gimple_omp_oacc (ctx->stmt)
6809 && !ctx_in_oacc_kernels_region (ctx))
6810 lower_oacc_head_tail (gimple_location (stmt),
6811 gimple_omp_for_clauses (stmt),
6812 &oacc_head, &oacc_tail, ctx);
6814 /* Add OpenACC partitioning and reduction markers just before the loop. */
6815 if (oacc_head)
6816 gimple_seq_add_seq (&body, oacc_head);
6818 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
6820 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6821 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
6822 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6823 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6825 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6826 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6827 OMP_CLAUSE_LINEAR_STEP (c)
6828 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6829 ctx);
6832 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6833 && gimple_omp_for_grid_phony (stmt));
6834 if (!phony_loop)
6835 gimple_seq_add_stmt (&body, stmt);
6836 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6838 if (!phony_loop)
6839 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6840 fd.loop.v));
6842 /* After the loop, add exit clauses. */
6843 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
6845 if (ctx->cancellable)
6846 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
6848 gimple_seq_add_seq (&body, dlist);
6850 body = maybe_catch_exception (body);
6852 if (!phony_loop)
6854 /* Region exit marker goes at the end of the loop body. */
6855 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
6856 maybe_add_implicit_barrier_cancel (ctx, &body);
6859 /* Add OpenACC joining and reduction markers just after the loop. */
6860 if (oacc_tail)
6861 gimple_seq_add_seq (&body, oacc_tail);
6863 pop_gimplify_context (new_stmt);
6865 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6866 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
6867 if (BLOCK_VARS (block))
6868 TREE_USED (block) = 1;
6870 gimple_bind_set_body (new_stmt, body);
6871 gimple_omp_set_body (stmt, NULL);
6872 gimple_omp_for_set_pre_body (stmt, NULL);
6875 /* Callback for walk_stmts. Check if the current statement only contains
6876 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
6878 static tree
6879 check_combined_parallel (gimple_stmt_iterator *gsi_p,
6880 bool *handled_ops_p,
6881 struct walk_stmt_info *wi)
6883 int *info = (int *) wi->info;
6884 gimple *stmt = gsi_stmt (*gsi_p);
6886 *handled_ops_p = true;
6887 switch (gimple_code (stmt))
6889 WALK_SUBSTMTS;
6891 case GIMPLE_OMP_FOR:
6892 case GIMPLE_OMP_SECTIONS:
6893 *info = *info == 0 ? 1 : -1;
6894 break;
6895 default:
6896 *info = -1;
6897 break;
6899 return NULL;
6902 struct omp_taskcopy_context
6904 /* This field must be at the beginning, as we do "inheritance": Some
6905 callback functions for tree-inline.c (e.g., omp_copy_decl)
6906 receive a copy_body_data pointer that is up-casted to an
6907 omp_context pointer. */
6908 copy_body_data cb;
6909 omp_context *ctx;
6912 static tree
6913 task_copyfn_copy_decl (tree var, copy_body_data *cb)
6915 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
6917 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
6918 return create_tmp_var (TREE_TYPE (var));
6920 return var;
6923 static tree
6924 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
6926 tree name, new_fields = NULL, type, f;
6928 type = lang_hooks.types.make_type (RECORD_TYPE);
6929 name = DECL_NAME (TYPE_NAME (orig_type));
6930 name = build_decl (gimple_location (tcctx->ctx->stmt),
6931 TYPE_DECL, name, type);
6932 TYPE_NAME (type) = name;
6934 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
6936 tree new_f = copy_node (f);
6937 DECL_CONTEXT (new_f) = type;
6938 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
6939 TREE_CHAIN (new_f) = new_fields;
6940 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
6941 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
6942 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
6943 &tcctx->cb, NULL);
6944 new_fields = new_f;
6945 tcctx->cb.decl_map->put (f, new_f);
6947 TYPE_FIELDS (type) = nreverse (new_fields);
6948 layout_type (type);
6949 return type;
6952 /* Create task copyfn. */
6954 static void
6955 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
6957 struct function *child_cfun;
6958 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
6959 tree record_type, srecord_type, bind, list;
6960 bool record_needs_remap = false, srecord_needs_remap = false;
6961 splay_tree_node n;
6962 struct omp_taskcopy_context tcctx;
6963 location_t loc = gimple_location (task_stmt);
6965 child_fn = gimple_omp_task_copy_fn (task_stmt);
6966 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
6967 gcc_assert (child_cfun->cfg == NULL);
6968 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
6970 /* Reset DECL_CONTEXT on function arguments. */
6971 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
6972 DECL_CONTEXT (t) = child_fn;
6974 /* Populate the function. */
6975 push_gimplify_context ();
6976 push_cfun (child_cfun);
6978 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
6979 TREE_SIDE_EFFECTS (bind) = 1;
6980 list = NULL;
6981 DECL_SAVED_TREE (child_fn) = bind;
6982 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
6984 /* Remap src and dst argument types if needed. */
6985 record_type = ctx->record_type;
6986 srecord_type = ctx->srecord_type;
6987 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
6988 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
6990 record_needs_remap = true;
6991 break;
6993 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
6994 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
6996 srecord_needs_remap = true;
6997 break;
7000 if (record_needs_remap || srecord_needs_remap)
7002 memset (&tcctx, '\0', sizeof (tcctx));
7003 tcctx.cb.src_fn = ctx->cb.src_fn;
7004 tcctx.cb.dst_fn = child_fn;
7005 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7006 gcc_checking_assert (tcctx.cb.src_node);
7007 tcctx.cb.dst_node = tcctx.cb.src_node;
7008 tcctx.cb.src_cfun = ctx->cb.src_cfun;
7009 tcctx.cb.copy_decl = task_copyfn_copy_decl;
7010 tcctx.cb.eh_lp_nr = 0;
7011 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7012 tcctx.cb.decl_map = new hash_map<tree, tree>;
7013 tcctx.ctx = ctx;
7015 if (record_needs_remap)
7016 record_type = task_copyfn_remap_type (&tcctx, record_type);
7017 if (srecord_needs_remap)
7018 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7020 else
7021 tcctx.cb.decl_map = NULL;
7023 arg = DECL_ARGUMENTS (child_fn);
7024 TREE_TYPE (arg) = build_pointer_type (record_type);
7025 sarg = DECL_CHAIN (arg);
7026 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7028 /* First pass: initialize temporaries used in record_type and srecord_type
7029 sizes and field offsets. */
7030 if (tcctx.cb.decl_map)
7031 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7032 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7034 tree *p;
7036 decl = OMP_CLAUSE_DECL (c);
7037 p = tcctx.cb.decl_map->get (decl);
7038 if (p == NULL)
7039 continue;
7040 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7041 sf = (tree) n->value;
7042 sf = *tcctx.cb.decl_map->get (sf);
7043 src = build_simple_mem_ref_loc (loc, sarg);
7044 src = omp_build_component_ref (src, sf);
7045 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7046 append_to_statement_list (t, &list);
7049 /* Second pass: copy shared var pointers and copy construct non-VLA
7050 firstprivate vars. */
7051 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7052 switch (OMP_CLAUSE_CODE (c))
7054 splay_tree_key key;
7055 case OMP_CLAUSE_SHARED:
7056 decl = OMP_CLAUSE_DECL (c);
7057 key = (splay_tree_key) decl;
7058 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7059 key = (splay_tree_key) &DECL_UID (decl);
7060 n = splay_tree_lookup (ctx->field_map, key);
7061 if (n == NULL)
7062 break;
7063 f = (tree) n->value;
7064 if (tcctx.cb.decl_map)
7065 f = *tcctx.cb.decl_map->get (f);
7066 n = splay_tree_lookup (ctx->sfield_map, key);
7067 sf = (tree) n->value;
7068 if (tcctx.cb.decl_map)
7069 sf = *tcctx.cb.decl_map->get (sf);
7070 src = build_simple_mem_ref_loc (loc, sarg);
7071 src = omp_build_component_ref (src, sf);
7072 dst = build_simple_mem_ref_loc (loc, arg);
7073 dst = omp_build_component_ref (dst, f);
7074 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7075 append_to_statement_list (t, &list);
7076 break;
7077 case OMP_CLAUSE_FIRSTPRIVATE:
7078 decl = OMP_CLAUSE_DECL (c);
7079 if (is_variable_sized (decl))
7080 break;
7081 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7082 if (n == NULL)
7083 break;
7084 f = (tree) n->value;
7085 if (tcctx.cb.decl_map)
7086 f = *tcctx.cb.decl_map->get (f);
7087 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7088 if (n != NULL)
7090 sf = (tree) n->value;
7091 if (tcctx.cb.decl_map)
7092 sf = *tcctx.cb.decl_map->get (sf);
7093 src = build_simple_mem_ref_loc (loc, sarg);
7094 src = omp_build_component_ref (src, sf);
7095 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7096 src = build_simple_mem_ref_loc (loc, src);
7098 else
7099 src = decl;
7100 dst = build_simple_mem_ref_loc (loc, arg);
7101 dst = omp_build_component_ref (dst, f);
7102 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7103 append_to_statement_list (t, &list);
7104 break;
7105 case OMP_CLAUSE_PRIVATE:
7106 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7107 break;
7108 decl = OMP_CLAUSE_DECL (c);
7109 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7110 f = (tree) n->value;
7111 if (tcctx.cb.decl_map)
7112 f = *tcctx.cb.decl_map->get (f);
7113 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7114 if (n != NULL)
7116 sf = (tree) n->value;
7117 if (tcctx.cb.decl_map)
7118 sf = *tcctx.cb.decl_map->get (sf);
7119 src = build_simple_mem_ref_loc (loc, sarg);
7120 src = omp_build_component_ref (src, sf);
7121 if (use_pointer_for_field (decl, NULL))
7122 src = build_simple_mem_ref_loc (loc, src);
7124 else
7125 src = decl;
7126 dst = build_simple_mem_ref_loc (loc, arg);
7127 dst = omp_build_component_ref (dst, f);
7128 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7129 append_to_statement_list (t, &list);
7130 break;
7131 default:
7132 break;
7135 /* Last pass: handle VLA firstprivates. */
7136 if (tcctx.cb.decl_map)
7137 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7138 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7140 tree ind, ptr, df;
7142 decl = OMP_CLAUSE_DECL (c);
7143 if (!is_variable_sized (decl))
7144 continue;
7145 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7146 if (n == NULL)
7147 continue;
7148 f = (tree) n->value;
7149 f = *tcctx.cb.decl_map->get (f);
7150 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7151 ind = DECL_VALUE_EXPR (decl);
7152 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7153 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7154 n = splay_tree_lookup (ctx->sfield_map,
7155 (splay_tree_key) TREE_OPERAND (ind, 0));
7156 sf = (tree) n->value;
7157 sf = *tcctx.cb.decl_map->get (sf);
7158 src = build_simple_mem_ref_loc (loc, sarg);
7159 src = omp_build_component_ref (src, sf);
7160 src = build_simple_mem_ref_loc (loc, src);
7161 dst = build_simple_mem_ref_loc (loc, arg);
7162 dst = omp_build_component_ref (dst, f);
7163 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7164 append_to_statement_list (t, &list);
7165 n = splay_tree_lookup (ctx->field_map,
7166 (splay_tree_key) TREE_OPERAND (ind, 0));
7167 df = (tree) n->value;
7168 df = *tcctx.cb.decl_map->get (df);
7169 ptr = build_simple_mem_ref_loc (loc, arg);
7170 ptr = omp_build_component_ref (ptr, df);
7171 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7172 build_fold_addr_expr_loc (loc, dst));
7173 append_to_statement_list (t, &list);
7176 t = build1 (RETURN_EXPR, void_type_node, NULL);
7177 append_to_statement_list (t, &list);
7179 if (tcctx.cb.decl_map)
7180 delete tcctx.cb.decl_map;
7181 pop_gimplify_context (NULL);
7182 BIND_EXPR_BODY (bind) = list;
7183 pop_cfun ();
7186 static void
7187 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7189 tree c, clauses;
7190 gimple *g;
7191 size_t n_in = 0, n_out = 0, idx = 2, i;
7193 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7194 gcc_assert (clauses);
7195 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7196 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7197 switch (OMP_CLAUSE_DEPEND_KIND (c))
7199 case OMP_CLAUSE_DEPEND_IN:
7200 n_in++;
7201 break;
7202 case OMP_CLAUSE_DEPEND_OUT:
7203 case OMP_CLAUSE_DEPEND_INOUT:
7204 n_out++;
7205 break;
7206 case OMP_CLAUSE_DEPEND_SOURCE:
7207 case OMP_CLAUSE_DEPEND_SINK:
7208 /* FALLTHRU */
7209 default:
7210 gcc_unreachable ();
7212 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7213 tree array = create_tmp_var (type);
7214 TREE_ADDRESSABLE (array) = 1;
7215 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7216 NULL_TREE);
7217 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7218 gimple_seq_add_stmt (iseq, g);
7219 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7220 NULL_TREE);
7221 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7222 gimple_seq_add_stmt (iseq, g);
7223 for (i = 0; i < 2; i++)
7225 if ((i ? n_in : n_out) == 0)
7226 continue;
7227 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7228 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7229 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7231 tree t = OMP_CLAUSE_DECL (c);
7232 t = fold_convert (ptr_type_node, t);
7233 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7234 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7235 NULL_TREE, NULL_TREE);
7236 g = gimple_build_assign (r, t);
7237 gimple_seq_add_stmt (iseq, g);
7240 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7241 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7242 OMP_CLAUSE_CHAIN (c) = *pclauses;
7243 *pclauses = c;
7244 tree clobber = build_constructor (type, NULL);
7245 TREE_THIS_VOLATILE (clobber) = 1;
7246 g = gimple_build_assign (array, clobber);
7247 gimple_seq_add_stmt (oseq, g);
7250 /* Lower the OpenMP parallel or task directive in the current statement
7251 in GSI_P. CTX holds context information for the directive. */
7253 static void
7254 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7256 tree clauses;
7257 tree child_fn, t;
7258 gimple *stmt = gsi_stmt (*gsi_p);
7259 gbind *par_bind, *bind, *dep_bind = NULL;
7260 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7261 location_t loc = gimple_location (stmt);
7263 clauses = gimple_omp_taskreg_clauses (stmt);
7264 par_bind
7265 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7266 par_body = gimple_bind_body (par_bind);
7267 child_fn = ctx->cb.dst_fn;
7268 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7269 && !gimple_omp_parallel_combined_p (stmt))
7271 struct walk_stmt_info wi;
7272 int ws_num = 0;
7274 memset (&wi, 0, sizeof (wi));
7275 wi.info = &ws_num;
7276 wi.val_only = true;
7277 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7278 if (ws_num == 1)
7279 gimple_omp_parallel_set_combined_p (stmt, true);
7281 gimple_seq dep_ilist = NULL;
7282 gimple_seq dep_olist = NULL;
7283 if (gimple_code (stmt) == GIMPLE_OMP_TASK
7284 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7286 push_gimplify_context ();
7287 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7288 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7289 &dep_ilist, &dep_olist);
7292 if (ctx->srecord_type)
7293 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7295 push_gimplify_context ();
7297 par_olist = NULL;
7298 par_ilist = NULL;
7299 par_rlist = NULL;
7300 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7301 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7302 if (phony_construct && ctx->record_type)
7304 gcc_checking_assert (!ctx->receiver_decl);
7305 ctx->receiver_decl = create_tmp_var
7306 (build_reference_type (ctx->record_type), ".omp_rec");
7308 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7309 lower_omp (&par_body, ctx);
7310 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7311 lower_reduction_clauses (clauses, &par_rlist, ctx);
7313 /* Declare all the variables created by mapping and the variables
7314 declared in the scope of the parallel body. */
7315 record_vars_into (ctx->block_vars, child_fn);
7316 record_vars_into (gimple_bind_vars (par_bind), child_fn);
7318 if (ctx->record_type)
7320 ctx->sender_decl
7321 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7322 : ctx->record_type, ".omp_data_o");
7323 DECL_NAMELESS (ctx->sender_decl) = 1;
7324 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7325 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7328 olist = NULL;
7329 ilist = NULL;
7330 lower_send_clauses (clauses, &ilist, &olist, ctx);
7331 lower_send_shared_vars (&ilist, &olist, ctx);
7333 if (ctx->record_type)
7335 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7336 TREE_THIS_VOLATILE (clobber) = 1;
7337 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7338 clobber));
7341 /* Once all the expansions are done, sequence all the different
7342 fragments inside gimple_omp_body. */
7344 new_body = NULL;
7346 if (ctx->record_type)
7348 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7349 /* fixup_child_record_type might have changed receiver_decl's type. */
7350 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7351 gimple_seq_add_stmt (&new_body,
7352 gimple_build_assign (ctx->receiver_decl, t));
7355 gimple_seq_add_seq (&new_body, par_ilist);
7356 gimple_seq_add_seq (&new_body, par_body);
7357 gimple_seq_add_seq (&new_body, par_rlist);
7358 if (ctx->cancellable)
7359 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7360 gimple_seq_add_seq (&new_body, par_olist);
7361 new_body = maybe_catch_exception (new_body);
7362 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7363 gimple_seq_add_stmt (&new_body,
7364 gimple_build_omp_continue (integer_zero_node,
7365 integer_zero_node));
7366 if (!phony_construct)
7368 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7369 gimple_omp_set_body (stmt, new_body);
7372 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7373 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7374 gimple_bind_add_seq (bind, ilist);
7375 if (!phony_construct)
7376 gimple_bind_add_stmt (bind, stmt);
7377 else
7378 gimple_bind_add_seq (bind, new_body);
7379 gimple_bind_add_seq (bind, olist);
7381 pop_gimplify_context (NULL);
7383 if (dep_bind)
7385 gimple_bind_add_seq (dep_bind, dep_ilist);
7386 gimple_bind_add_stmt (dep_bind, bind);
7387 gimple_bind_add_seq (dep_bind, dep_olist);
7388 pop_gimplify_context (dep_bind);
7392 /* Lower the GIMPLE_OMP_TARGET in the current statement
7393 in GSI_P. CTX holds context information for the directive. */
7395 static void
7396 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7398 tree clauses;
7399 tree child_fn, t, c;
7400 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7401 gbind *tgt_bind, *bind, *dep_bind = NULL;
7402 gimple_seq tgt_body, olist, ilist, fplist, new_body;
7403 location_t loc = gimple_location (stmt);
7404 bool offloaded, data_region;
7405 unsigned int map_cnt = 0;
7407 offloaded = is_gimple_omp_offloaded (stmt);
7408 switch (gimple_omp_target_kind (stmt))
7410 case GF_OMP_TARGET_KIND_REGION:
7411 case GF_OMP_TARGET_KIND_UPDATE:
7412 case GF_OMP_TARGET_KIND_ENTER_DATA:
7413 case GF_OMP_TARGET_KIND_EXIT_DATA:
7414 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7415 case GF_OMP_TARGET_KIND_OACC_KERNELS:
7416 case GF_OMP_TARGET_KIND_OACC_UPDATE:
7417 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7418 case GF_OMP_TARGET_KIND_OACC_DECLARE:
7419 data_region = false;
7420 break;
7421 case GF_OMP_TARGET_KIND_DATA:
7422 case GF_OMP_TARGET_KIND_OACC_DATA:
7423 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7424 data_region = true;
7425 break;
7426 default:
7427 gcc_unreachable ();
7430 clauses = gimple_omp_target_clauses (stmt);
7432 gimple_seq dep_ilist = NULL;
7433 gimple_seq dep_olist = NULL;
7434 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7436 push_gimplify_context ();
7437 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7438 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7439 &dep_ilist, &dep_olist);
7442 tgt_bind = NULL;
7443 tgt_body = NULL;
7444 if (offloaded)
7446 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7447 tgt_body = gimple_bind_body (tgt_bind);
7449 else if (data_region)
7450 tgt_body = gimple_omp_body (stmt);
7451 child_fn = ctx->cb.dst_fn;
7453 push_gimplify_context ();
7454 fplist = NULL;
7456 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7457 switch (OMP_CLAUSE_CODE (c))
7459 tree var, x;
7461 default:
7462 break;
7463 case OMP_CLAUSE_MAP:
7464 #if CHECKING_P
7465 /* First check what we're prepared to handle in the following. */
7466 switch (OMP_CLAUSE_MAP_KIND (c))
7468 case GOMP_MAP_ALLOC:
7469 case GOMP_MAP_TO:
7470 case GOMP_MAP_FROM:
7471 case GOMP_MAP_TOFROM:
7472 case GOMP_MAP_POINTER:
7473 case GOMP_MAP_TO_PSET:
7474 case GOMP_MAP_DELETE:
7475 case GOMP_MAP_RELEASE:
7476 case GOMP_MAP_ALWAYS_TO:
7477 case GOMP_MAP_ALWAYS_FROM:
7478 case GOMP_MAP_ALWAYS_TOFROM:
7479 case GOMP_MAP_FIRSTPRIVATE_POINTER:
7480 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7481 case GOMP_MAP_STRUCT:
7482 case GOMP_MAP_ALWAYS_POINTER:
7483 break;
7484 case GOMP_MAP_FORCE_ALLOC:
7485 case GOMP_MAP_FORCE_TO:
7486 case GOMP_MAP_FORCE_FROM:
7487 case GOMP_MAP_FORCE_TOFROM:
7488 case GOMP_MAP_FORCE_PRESENT:
7489 case GOMP_MAP_FORCE_DEVICEPTR:
7490 case GOMP_MAP_DEVICE_RESIDENT:
7491 case GOMP_MAP_LINK:
7492 gcc_assert (is_gimple_omp_oacc (stmt));
7493 break;
7494 default:
7495 gcc_unreachable ();
7497 #endif
7498 /* FALLTHRU */
7499 case OMP_CLAUSE_TO:
7500 case OMP_CLAUSE_FROM:
7501 oacc_firstprivate:
7502 var = OMP_CLAUSE_DECL (c);
7503 if (!DECL_P (var))
7505 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7506 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7507 && (OMP_CLAUSE_MAP_KIND (c)
7508 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
7509 map_cnt++;
7510 continue;
7513 if (DECL_SIZE (var)
7514 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7516 tree var2 = DECL_VALUE_EXPR (var);
7517 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7518 var2 = TREE_OPERAND (var2, 0);
7519 gcc_assert (DECL_P (var2));
7520 var = var2;
7523 if (offloaded
7524 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7525 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7526 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7528 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7530 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7531 && varpool_node::get_create (var)->offloadable)
7532 continue;
7534 tree type = build_pointer_type (TREE_TYPE (var));
7535 tree new_var = lookup_decl (var, ctx);
7536 x = create_tmp_var_raw (type, get_name (new_var));
7537 gimple_add_tmp_var (x);
7538 x = build_simple_mem_ref (x);
7539 SET_DECL_VALUE_EXPR (new_var, x);
7540 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7542 continue;
7545 if (!maybe_lookup_field (var, ctx))
7546 continue;
7548 /* Don't remap oacc parallel reduction variables, because the
7549 intermediate result must be local to each gang. */
7550 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7551 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7553 x = build_receiver_ref (var, true, ctx);
7554 tree new_var = lookup_decl (var, ctx);
7556 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7557 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7558 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7559 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7560 x = build_simple_mem_ref (x);
7561 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7563 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7564 if (omp_is_reference (new_var))
7566 /* Create a local object to hold the instance
7567 value. */
7568 tree type = TREE_TYPE (TREE_TYPE (new_var));
7569 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7570 tree inst = create_tmp_var (type, id);
7571 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7572 x = build_fold_addr_expr (inst);
7574 gimplify_assign (new_var, x, &fplist);
7576 else if (DECL_P (new_var))
7578 SET_DECL_VALUE_EXPR (new_var, x);
7579 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7581 else
7582 gcc_unreachable ();
7584 map_cnt++;
7585 break;
7587 case OMP_CLAUSE_FIRSTPRIVATE:
7588 if (is_oacc_parallel (ctx))
7589 goto oacc_firstprivate;
7590 map_cnt++;
7591 var = OMP_CLAUSE_DECL (c);
7592 if (!omp_is_reference (var)
7593 && !is_gimple_reg_type (TREE_TYPE (var)))
7595 tree new_var = lookup_decl (var, ctx);
7596 if (is_variable_sized (var))
7598 tree pvar = DECL_VALUE_EXPR (var);
7599 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7600 pvar = TREE_OPERAND (pvar, 0);
7601 gcc_assert (DECL_P (pvar));
7602 tree new_pvar = lookup_decl (pvar, ctx);
7603 x = build_fold_indirect_ref (new_pvar);
7604 TREE_THIS_NOTRAP (x) = 1;
7606 else
7607 x = build_receiver_ref (var, true, ctx);
7608 SET_DECL_VALUE_EXPR (new_var, x);
7609 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7611 break;
7613 case OMP_CLAUSE_PRIVATE:
7614 if (is_gimple_omp_oacc (ctx->stmt))
7615 break;
7616 var = OMP_CLAUSE_DECL (c);
7617 if (is_variable_sized (var))
7619 tree new_var = lookup_decl (var, ctx);
7620 tree pvar = DECL_VALUE_EXPR (var);
7621 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7622 pvar = TREE_OPERAND (pvar, 0);
7623 gcc_assert (DECL_P (pvar));
7624 tree new_pvar = lookup_decl (pvar, ctx);
7625 x = build_fold_indirect_ref (new_pvar);
7626 TREE_THIS_NOTRAP (x) = 1;
7627 SET_DECL_VALUE_EXPR (new_var, x);
7628 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7630 break;
7632 case OMP_CLAUSE_USE_DEVICE_PTR:
7633 case OMP_CLAUSE_IS_DEVICE_PTR:
7634 var = OMP_CLAUSE_DECL (c);
7635 map_cnt++;
7636 if (is_variable_sized (var))
7638 tree new_var = lookup_decl (var, ctx);
7639 tree pvar = DECL_VALUE_EXPR (var);
7640 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7641 pvar = TREE_OPERAND (pvar, 0);
7642 gcc_assert (DECL_P (pvar));
7643 tree new_pvar = lookup_decl (pvar, ctx);
7644 x = build_fold_indirect_ref (new_pvar);
7645 TREE_THIS_NOTRAP (x) = 1;
7646 SET_DECL_VALUE_EXPR (new_var, x);
7647 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7649 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7651 tree new_var = lookup_decl (var, ctx);
7652 tree type = build_pointer_type (TREE_TYPE (var));
7653 x = create_tmp_var_raw (type, get_name (new_var));
7654 gimple_add_tmp_var (x);
7655 x = build_simple_mem_ref (x);
7656 SET_DECL_VALUE_EXPR (new_var, x);
7657 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7659 else
7661 tree new_var = lookup_decl (var, ctx);
7662 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7663 gimple_add_tmp_var (x);
7664 SET_DECL_VALUE_EXPR (new_var, x);
7665 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7667 break;
7670 if (offloaded)
7672 target_nesting_level++;
7673 lower_omp (&tgt_body, ctx);
7674 target_nesting_level--;
7676 else if (data_region)
7677 lower_omp (&tgt_body, ctx);
7679 if (offloaded)
7681 /* Declare all the variables created by mapping and the variables
7682 declared in the scope of the target body. */
7683 record_vars_into (ctx->block_vars, child_fn);
7684 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7687 olist = NULL;
7688 ilist = NULL;
7689 if (ctx->record_type)
7691 ctx->sender_decl
7692 = create_tmp_var (ctx->record_type, ".omp_data_arr");
7693 DECL_NAMELESS (ctx->sender_decl) = 1;
7694 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7695 t = make_tree_vec (3);
7696 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7697 TREE_VEC_ELT (t, 1)
7698 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7699 ".omp_data_sizes");
7700 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7701 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7702 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7703 tree tkind_type = short_unsigned_type_node;
7704 int talign_shift = 8;
7705 TREE_VEC_ELT (t, 2)
7706 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7707 ".omp_data_kinds");
7708 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7709 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7710 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7711 gimple_omp_target_set_data_arg (stmt, t);
7713 vec<constructor_elt, va_gc> *vsize;
7714 vec<constructor_elt, va_gc> *vkind;
7715 vec_alloc (vsize, map_cnt);
7716 vec_alloc (vkind, map_cnt);
7717 unsigned int map_idx = 0;
7719 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7720 switch (OMP_CLAUSE_CODE (c))
7722 tree ovar, nc, s, purpose, var, x, type;
7723 unsigned int talign;
7725 default:
7726 break;
7728 case OMP_CLAUSE_MAP:
7729 case OMP_CLAUSE_TO:
7730 case OMP_CLAUSE_FROM:
7731 oacc_firstprivate_map:
7732 nc = c;
7733 ovar = OMP_CLAUSE_DECL (c);
7734 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7735 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7736 || (OMP_CLAUSE_MAP_KIND (c)
7737 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7738 break;
7739 if (!DECL_P (ovar))
7741 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7742 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7744 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7745 == get_base_address (ovar));
7746 nc = OMP_CLAUSE_CHAIN (c);
7747 ovar = OMP_CLAUSE_DECL (nc);
7749 else
7751 tree x = build_sender_ref (ovar, ctx);
7752 tree v
7753 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7754 gimplify_assign (x, v, &ilist);
7755 nc = NULL_TREE;
7758 else
7760 if (DECL_SIZE (ovar)
7761 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7763 tree ovar2 = DECL_VALUE_EXPR (ovar);
7764 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7765 ovar2 = TREE_OPERAND (ovar2, 0);
7766 gcc_assert (DECL_P (ovar2));
7767 ovar = ovar2;
7769 if (!maybe_lookup_field (ovar, ctx))
7770 continue;
7773 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7774 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7775 talign = DECL_ALIGN_UNIT (ovar);
7776 if (nc)
7778 var = lookup_decl_in_outer_ctx (ovar, ctx);
7779 x = build_sender_ref (ovar, ctx);
7781 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7782 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7783 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7784 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7786 gcc_assert (offloaded);
7787 tree avar
7788 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7789 mark_addressable (avar);
7790 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7791 talign = DECL_ALIGN_UNIT (avar);
7792 avar = build_fold_addr_expr (avar);
7793 gimplify_assign (x, avar, &ilist);
7795 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7797 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7798 if (!omp_is_reference (var))
7800 if (is_gimple_reg (var)
7801 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7802 TREE_NO_WARNING (var) = 1;
7803 var = build_fold_addr_expr (var);
7805 else
7806 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7807 gimplify_assign (x, var, &ilist);
7809 else if (is_gimple_reg (var))
7811 gcc_assert (offloaded);
7812 tree avar = create_tmp_var (TREE_TYPE (var));
7813 mark_addressable (avar);
7814 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7815 if (GOMP_MAP_COPY_TO_P (map_kind)
7816 || map_kind == GOMP_MAP_POINTER
7817 || map_kind == GOMP_MAP_TO_PSET
7818 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7820 /* If we need to initialize a temporary
7821 with VAR because it is not addressable, and
7822 the variable hasn't been initialized yet, then
7823 we'll get a warning for the store to avar.
7824 Don't warn in that case, the mapping might
7825 be implicit. */
7826 TREE_NO_WARNING (var) = 1;
7827 gimplify_assign (avar, var, &ilist);
7829 avar = build_fold_addr_expr (avar);
7830 gimplify_assign (x, avar, &ilist);
7831 if ((GOMP_MAP_COPY_FROM_P (map_kind)
7832 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7833 && !TYPE_READONLY (TREE_TYPE (var)))
7835 x = unshare_expr (x);
7836 x = build_simple_mem_ref (x);
7837 gimplify_assign (var, x, &olist);
7840 else
7842 var = build_fold_addr_expr (var);
7843 gimplify_assign (x, var, &ilist);
7846 s = NULL_TREE;
7847 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7849 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7850 s = TREE_TYPE (ovar);
7851 if (TREE_CODE (s) == REFERENCE_TYPE)
7852 s = TREE_TYPE (s);
7853 s = TYPE_SIZE_UNIT (s);
7855 else
7856 s = OMP_CLAUSE_SIZE (c);
7857 if (s == NULL_TREE)
7858 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
7859 s = fold_convert (size_type_node, s);
7860 purpose = size_int (map_idx++);
7861 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
7862 if (TREE_CODE (s) != INTEGER_CST)
7863 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
7865 unsigned HOST_WIDE_INT tkind, tkind_zero;
7866 switch (OMP_CLAUSE_CODE (c))
7868 case OMP_CLAUSE_MAP:
7869 tkind = OMP_CLAUSE_MAP_KIND (c);
7870 tkind_zero = tkind;
7871 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
7872 switch (tkind)
7874 case GOMP_MAP_ALLOC:
7875 case GOMP_MAP_TO:
7876 case GOMP_MAP_FROM:
7877 case GOMP_MAP_TOFROM:
7878 case GOMP_MAP_ALWAYS_TO:
7879 case GOMP_MAP_ALWAYS_FROM:
7880 case GOMP_MAP_ALWAYS_TOFROM:
7881 case GOMP_MAP_RELEASE:
7882 case GOMP_MAP_FORCE_TO:
7883 case GOMP_MAP_FORCE_FROM:
7884 case GOMP_MAP_FORCE_TOFROM:
7885 case GOMP_MAP_FORCE_PRESENT:
7886 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
7887 break;
7888 case GOMP_MAP_DELETE:
7889 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
7890 default:
7891 break;
7893 if (tkind_zero != tkind)
7895 if (integer_zerop (s))
7896 tkind = tkind_zero;
7897 else if (integer_nonzerop (s))
7898 tkind_zero = tkind;
7900 break;
7901 case OMP_CLAUSE_FIRSTPRIVATE:
7902 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7903 tkind = GOMP_MAP_TO;
7904 tkind_zero = tkind;
7905 break;
7906 case OMP_CLAUSE_TO:
7907 tkind = GOMP_MAP_TO;
7908 tkind_zero = tkind;
7909 break;
7910 case OMP_CLAUSE_FROM:
7911 tkind = GOMP_MAP_FROM;
7912 tkind_zero = tkind;
7913 break;
7914 default:
7915 gcc_unreachable ();
7917 gcc_checking_assert (tkind
7918 < (HOST_WIDE_INT_C (1U) << talign_shift));
7919 gcc_checking_assert (tkind_zero
7920 < (HOST_WIDE_INT_C (1U) << talign_shift));
7921 talign = ceil_log2 (talign);
7922 tkind |= talign << talign_shift;
7923 tkind_zero |= talign << talign_shift;
7924 gcc_checking_assert (tkind
7925 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
7926 gcc_checking_assert (tkind_zero
7927 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
7928 if (tkind == tkind_zero)
7929 x = build_int_cstu (tkind_type, tkind);
7930 else
7932 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
7933 x = build3 (COND_EXPR, tkind_type,
7934 fold_build2 (EQ_EXPR, boolean_type_node,
7935 unshare_expr (s), size_zero_node),
7936 build_int_cstu (tkind_type, tkind_zero),
7937 build_int_cstu (tkind_type, tkind));
7939 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
7940 if (nc && nc != c)
7941 c = nc;
7942 break;
7944 case OMP_CLAUSE_FIRSTPRIVATE:
7945 if (is_oacc_parallel (ctx))
7946 goto oacc_firstprivate_map;
7947 ovar = OMP_CLAUSE_DECL (c);
7948 if (omp_is_reference (ovar))
7949 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7950 else
7951 talign = DECL_ALIGN_UNIT (ovar);
7952 var = lookup_decl_in_outer_ctx (ovar, ctx);
7953 x = build_sender_ref (ovar, ctx);
7954 tkind = GOMP_MAP_FIRSTPRIVATE;
7955 type = TREE_TYPE (ovar);
7956 if (omp_is_reference (ovar))
7957 type = TREE_TYPE (type);
7958 if ((INTEGRAL_TYPE_P (type)
7959 && TYPE_PRECISION (type) <= POINTER_SIZE)
7960 || TREE_CODE (type) == POINTER_TYPE)
7962 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
7963 tree t = var;
7964 if (omp_is_reference (var))
7965 t = build_simple_mem_ref (var);
7966 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7967 TREE_NO_WARNING (var) = 1;
7968 if (TREE_CODE (type) != POINTER_TYPE)
7969 t = fold_convert (pointer_sized_int_node, t);
7970 t = fold_convert (TREE_TYPE (x), t);
7971 gimplify_assign (x, t, &ilist);
7973 else if (omp_is_reference (var))
7974 gimplify_assign (x, var, &ilist);
7975 else if (is_gimple_reg (var))
7977 tree avar = create_tmp_var (TREE_TYPE (var));
7978 mark_addressable (avar);
7979 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7980 TREE_NO_WARNING (var) = 1;
7981 gimplify_assign (avar, var, &ilist);
7982 avar = build_fold_addr_expr (avar);
7983 gimplify_assign (x, avar, &ilist);
7985 else
7987 var = build_fold_addr_expr (var);
7988 gimplify_assign (x, var, &ilist);
7990 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
7991 s = size_int (0);
7992 else if (omp_is_reference (ovar))
7993 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7994 else
7995 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
7996 s = fold_convert (size_type_node, s);
7997 purpose = size_int (map_idx++);
7998 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
7999 if (TREE_CODE (s) != INTEGER_CST)
8000 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8002 gcc_checking_assert (tkind
8003 < (HOST_WIDE_INT_C (1U) << talign_shift));
8004 talign = ceil_log2 (talign);
8005 tkind |= talign << talign_shift;
8006 gcc_checking_assert (tkind
8007 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8008 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8009 build_int_cstu (tkind_type, tkind));
8010 break;
8012 case OMP_CLAUSE_USE_DEVICE_PTR:
8013 case OMP_CLAUSE_IS_DEVICE_PTR:
8014 ovar = OMP_CLAUSE_DECL (c);
8015 var = lookup_decl_in_outer_ctx (ovar, ctx);
8016 x = build_sender_ref (ovar, ctx);
8017 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8018 tkind = GOMP_MAP_USE_DEVICE_PTR;
8019 else
8020 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8021 type = TREE_TYPE (ovar);
8022 if (TREE_CODE (type) == ARRAY_TYPE)
8023 var = build_fold_addr_expr (var);
8024 else
8026 if (omp_is_reference (ovar))
8028 type = TREE_TYPE (type);
8029 if (TREE_CODE (type) != ARRAY_TYPE)
8030 var = build_simple_mem_ref (var);
8031 var = fold_convert (TREE_TYPE (x), var);
8034 gimplify_assign (x, var, &ilist);
8035 s = size_int (0);
8036 purpose = size_int (map_idx++);
8037 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8038 gcc_checking_assert (tkind
8039 < (HOST_WIDE_INT_C (1U) << talign_shift));
8040 gcc_checking_assert (tkind
8041 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8042 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8043 build_int_cstu (tkind_type, tkind));
8044 break;
8047 gcc_assert (map_idx == map_cnt);
8049 DECL_INITIAL (TREE_VEC_ELT (t, 1))
8050 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8051 DECL_INITIAL (TREE_VEC_ELT (t, 2))
8052 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8053 for (int i = 1; i <= 2; i++)
8054 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8056 gimple_seq initlist = NULL;
8057 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8058 TREE_VEC_ELT (t, i)),
8059 &initlist, true, NULL_TREE);
8060 gimple_seq_add_seq (&ilist, initlist);
8062 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8063 NULL);
8064 TREE_THIS_VOLATILE (clobber) = 1;
8065 gimple_seq_add_stmt (&olist,
8066 gimple_build_assign (TREE_VEC_ELT (t, i),
8067 clobber));
8070 tree clobber = build_constructor (ctx->record_type, NULL);
8071 TREE_THIS_VOLATILE (clobber) = 1;
8072 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8073 clobber));
8076 /* Once all the expansions are done, sequence all the different
8077 fragments inside gimple_omp_body. */
8079 new_body = NULL;
8081 if (offloaded
8082 && ctx->record_type)
8084 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8085 /* fixup_child_record_type might have changed receiver_decl's type. */
8086 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8087 gimple_seq_add_stmt (&new_body,
8088 gimple_build_assign (ctx->receiver_decl, t));
8090 gimple_seq_add_seq (&new_body, fplist);
8092 if (offloaded || data_region)
8094 tree prev = NULL_TREE;
8095 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8096 switch (OMP_CLAUSE_CODE (c))
8098 tree var, x;
8099 default:
8100 break;
8101 case OMP_CLAUSE_FIRSTPRIVATE:
8102 if (is_gimple_omp_oacc (ctx->stmt))
8103 break;
8104 var = OMP_CLAUSE_DECL (c);
8105 if (omp_is_reference (var)
8106 || is_gimple_reg_type (TREE_TYPE (var)))
8108 tree new_var = lookup_decl (var, ctx);
8109 tree type;
8110 type = TREE_TYPE (var);
8111 if (omp_is_reference (var))
8112 type = TREE_TYPE (type);
8113 if ((INTEGRAL_TYPE_P (type)
8114 && TYPE_PRECISION (type) <= POINTER_SIZE)
8115 || TREE_CODE (type) == POINTER_TYPE)
8117 x = build_receiver_ref (var, false, ctx);
8118 if (TREE_CODE (type) != POINTER_TYPE)
8119 x = fold_convert (pointer_sized_int_node, x);
8120 x = fold_convert (type, x);
8121 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8122 fb_rvalue);
8123 if (omp_is_reference (var))
8125 tree v = create_tmp_var_raw (type, get_name (var));
8126 gimple_add_tmp_var (v);
8127 TREE_ADDRESSABLE (v) = 1;
8128 gimple_seq_add_stmt (&new_body,
8129 gimple_build_assign (v, x));
8130 x = build_fold_addr_expr (v);
8132 gimple_seq_add_stmt (&new_body,
8133 gimple_build_assign (new_var, x));
8135 else
8137 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8138 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8139 fb_rvalue);
8140 gimple_seq_add_stmt (&new_body,
8141 gimple_build_assign (new_var, x));
8144 else if (is_variable_sized (var))
8146 tree pvar = DECL_VALUE_EXPR (var);
8147 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8148 pvar = TREE_OPERAND (pvar, 0);
8149 gcc_assert (DECL_P (pvar));
8150 tree new_var = lookup_decl (pvar, ctx);
8151 x = build_receiver_ref (var, false, ctx);
8152 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8153 gimple_seq_add_stmt (&new_body,
8154 gimple_build_assign (new_var, x));
8156 break;
8157 case OMP_CLAUSE_PRIVATE:
8158 if (is_gimple_omp_oacc (ctx->stmt))
8159 break;
8160 var = OMP_CLAUSE_DECL (c);
8161 if (omp_is_reference (var))
8163 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8164 tree new_var = lookup_decl (var, ctx);
8165 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8166 if (TREE_CONSTANT (x))
8168 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8169 get_name (var));
8170 gimple_add_tmp_var (x);
8171 TREE_ADDRESSABLE (x) = 1;
8172 x = build_fold_addr_expr_loc (clause_loc, x);
8174 else
8175 break;
8177 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8178 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8179 gimple_seq_add_stmt (&new_body,
8180 gimple_build_assign (new_var, x));
8182 break;
8183 case OMP_CLAUSE_USE_DEVICE_PTR:
8184 case OMP_CLAUSE_IS_DEVICE_PTR:
8185 var = OMP_CLAUSE_DECL (c);
8186 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8187 x = build_sender_ref (var, ctx);
8188 else
8189 x = build_receiver_ref (var, false, ctx);
8190 if (is_variable_sized (var))
8192 tree pvar = DECL_VALUE_EXPR (var);
8193 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8194 pvar = TREE_OPERAND (pvar, 0);
8195 gcc_assert (DECL_P (pvar));
8196 tree new_var = lookup_decl (pvar, ctx);
8197 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8198 gimple_seq_add_stmt (&new_body,
8199 gimple_build_assign (new_var, x));
8201 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8203 tree new_var = lookup_decl (var, ctx);
8204 new_var = DECL_VALUE_EXPR (new_var);
8205 gcc_assert (TREE_CODE (new_var) == MEM_REF);
8206 new_var = TREE_OPERAND (new_var, 0);
8207 gcc_assert (DECL_P (new_var));
8208 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8209 gimple_seq_add_stmt (&new_body,
8210 gimple_build_assign (new_var, x));
8212 else
8214 tree type = TREE_TYPE (var);
8215 tree new_var = lookup_decl (var, ctx);
8216 if (omp_is_reference (var))
8218 type = TREE_TYPE (type);
8219 if (TREE_CODE (type) != ARRAY_TYPE)
8221 tree v = create_tmp_var_raw (type, get_name (var));
8222 gimple_add_tmp_var (v);
8223 TREE_ADDRESSABLE (v) = 1;
8224 x = fold_convert (type, x);
8225 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8226 fb_rvalue);
8227 gimple_seq_add_stmt (&new_body,
8228 gimple_build_assign (v, x));
8229 x = build_fold_addr_expr (v);
8232 new_var = DECL_VALUE_EXPR (new_var);
8233 x = fold_convert (TREE_TYPE (new_var), x);
8234 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8235 gimple_seq_add_stmt (&new_body,
8236 gimple_build_assign (new_var, x));
8238 break;
8240 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8241 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8242 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8243 or references to VLAs. */
8244 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8245 switch (OMP_CLAUSE_CODE (c))
8247 tree var;
8248 default:
8249 break;
8250 case OMP_CLAUSE_MAP:
8251 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8252 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8254 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8255 HOST_WIDE_INT offset = 0;
8256 gcc_assert (prev);
8257 var = OMP_CLAUSE_DECL (c);
8258 if (DECL_P (var)
8259 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8260 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8261 ctx))
8262 && varpool_node::get_create (var)->offloadable)
8263 break;
8264 if (TREE_CODE (var) == INDIRECT_REF
8265 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8266 var = TREE_OPERAND (var, 0);
8267 if (TREE_CODE (var) == COMPONENT_REF)
8269 var = get_addr_base_and_unit_offset (var, &offset);
8270 gcc_assert (var != NULL_TREE && DECL_P (var));
8272 else if (DECL_SIZE (var)
8273 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8275 tree var2 = DECL_VALUE_EXPR (var);
8276 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8277 var2 = TREE_OPERAND (var2, 0);
8278 gcc_assert (DECL_P (var2));
8279 var = var2;
8281 tree new_var = lookup_decl (var, ctx), x;
8282 tree type = TREE_TYPE (new_var);
8283 bool is_ref;
8284 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8285 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8286 == COMPONENT_REF))
8288 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8289 is_ref = true;
8290 new_var = build2 (MEM_REF, type,
8291 build_fold_addr_expr (new_var),
8292 build_int_cst (build_pointer_type (type),
8293 offset));
8295 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8297 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8298 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8299 new_var = build2 (MEM_REF, type,
8300 build_fold_addr_expr (new_var),
8301 build_int_cst (build_pointer_type (type),
8302 offset));
8304 else
8305 is_ref = omp_is_reference (var);
8306 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8307 is_ref = false;
8308 bool ref_to_array = false;
8309 if (is_ref)
8311 type = TREE_TYPE (type);
8312 if (TREE_CODE (type) == ARRAY_TYPE)
8314 type = build_pointer_type (type);
8315 ref_to_array = true;
8318 else if (TREE_CODE (type) == ARRAY_TYPE)
8320 tree decl2 = DECL_VALUE_EXPR (new_var);
8321 gcc_assert (TREE_CODE (decl2) == MEM_REF);
8322 decl2 = TREE_OPERAND (decl2, 0);
8323 gcc_assert (DECL_P (decl2));
8324 new_var = decl2;
8325 type = TREE_TYPE (new_var);
8327 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8328 x = fold_convert_loc (clause_loc, type, x);
8329 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8331 tree bias = OMP_CLAUSE_SIZE (c);
8332 if (DECL_P (bias))
8333 bias = lookup_decl (bias, ctx);
8334 bias = fold_convert_loc (clause_loc, sizetype, bias);
8335 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8336 bias);
8337 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8338 TREE_TYPE (x), x, bias);
8340 if (ref_to_array)
8341 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8342 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8343 if (is_ref && !ref_to_array)
8345 tree t = create_tmp_var_raw (type, get_name (var));
8346 gimple_add_tmp_var (t);
8347 TREE_ADDRESSABLE (t) = 1;
8348 gimple_seq_add_stmt (&new_body,
8349 gimple_build_assign (t, x));
8350 x = build_fold_addr_expr_loc (clause_loc, t);
8352 gimple_seq_add_stmt (&new_body,
8353 gimple_build_assign (new_var, x));
8354 prev = NULL_TREE;
8356 else if (OMP_CLAUSE_CHAIN (c)
8357 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8358 == OMP_CLAUSE_MAP
8359 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8360 == GOMP_MAP_FIRSTPRIVATE_POINTER
8361 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8362 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8363 prev = c;
8364 break;
8365 case OMP_CLAUSE_PRIVATE:
8366 var = OMP_CLAUSE_DECL (c);
8367 if (is_variable_sized (var))
8369 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8370 tree new_var = lookup_decl (var, ctx);
8371 tree pvar = DECL_VALUE_EXPR (var);
8372 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8373 pvar = TREE_OPERAND (pvar, 0);
8374 gcc_assert (DECL_P (pvar));
8375 tree new_pvar = lookup_decl (pvar, ctx);
8376 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8377 tree al = size_int (DECL_ALIGN (var));
8378 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8379 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8380 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8381 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8382 gimple_seq_add_stmt (&new_body,
8383 gimple_build_assign (new_pvar, x));
8385 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8387 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8388 tree new_var = lookup_decl (var, ctx);
8389 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8390 if (TREE_CONSTANT (x))
8391 break;
8392 else
8394 tree atmp
8395 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8396 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8397 tree al = size_int (TYPE_ALIGN (rtype));
8398 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8401 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8402 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8403 gimple_seq_add_stmt (&new_body,
8404 gimple_build_assign (new_var, x));
8406 break;
8409 gimple_seq fork_seq = NULL;
8410 gimple_seq join_seq = NULL;
8412 if (is_oacc_parallel (ctx))
8414 /* If there are reductions on the offloaded region itself, treat
8415 them as a dummy GANG loop. */
8416 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8418 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8419 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8422 gimple_seq_add_seq (&new_body, fork_seq);
8423 gimple_seq_add_seq (&new_body, tgt_body);
8424 gimple_seq_add_seq (&new_body, join_seq);
8426 if (offloaded)
8427 new_body = maybe_catch_exception (new_body);
8429 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8430 gimple_omp_set_body (stmt, new_body);
8433 bind = gimple_build_bind (NULL, NULL,
8434 tgt_bind ? gimple_bind_block (tgt_bind)
8435 : NULL_TREE);
8436 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8437 gimple_bind_add_seq (bind, ilist);
8438 gimple_bind_add_stmt (bind, stmt);
8439 gimple_bind_add_seq (bind, olist);
8441 pop_gimplify_context (NULL);
8443 if (dep_bind)
8445 gimple_bind_add_seq (dep_bind, dep_ilist);
8446 gimple_bind_add_stmt (dep_bind, bind);
8447 gimple_bind_add_seq (dep_bind, dep_olist);
8448 pop_gimplify_context (dep_bind);
8452 /* Expand code for an OpenMP teams directive. */
8454 static void
8455 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8457 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8458 push_gimplify_context ();
8460 tree block = make_node (BLOCK);
8461 gbind *bind = gimple_build_bind (NULL, NULL, block);
8462 gsi_replace (gsi_p, bind, true);
8463 gimple_seq bind_body = NULL;
8464 gimple_seq dlist = NULL;
8465 gimple_seq olist = NULL;
8467 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8468 OMP_CLAUSE_NUM_TEAMS);
8469 if (num_teams == NULL_TREE)
8470 num_teams = build_int_cst (unsigned_type_node, 0);
8471 else
8473 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8474 num_teams = fold_convert (unsigned_type_node, num_teams);
8475 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8477 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8478 OMP_CLAUSE_THREAD_LIMIT);
8479 if (thread_limit == NULL_TREE)
8480 thread_limit = build_int_cst (unsigned_type_node, 0);
8481 else
8483 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8484 thread_limit = fold_convert (unsigned_type_node, thread_limit);
8485 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8486 fb_rvalue);
8489 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8490 &bind_body, &dlist, ctx, NULL);
8491 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8492 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8493 if (!gimple_omp_teams_grid_phony (teams_stmt))
8495 gimple_seq_add_stmt (&bind_body, teams_stmt);
8496 location_t loc = gimple_location (teams_stmt);
8497 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8498 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8499 gimple_set_location (call, loc);
8500 gimple_seq_add_stmt (&bind_body, call);
8503 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8504 gimple_omp_set_body (teams_stmt, NULL);
8505 gimple_seq_add_seq (&bind_body, olist);
8506 gimple_seq_add_seq (&bind_body, dlist);
8507 if (!gimple_omp_teams_grid_phony (teams_stmt))
8508 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8509 gimple_bind_set_body (bind, bind_body);
8511 pop_gimplify_context (bind);
8513 gimple_bind_append_vars (bind, ctx->block_vars);
8514 BLOCK_VARS (block) = ctx->block_vars;
8515 if (BLOCK_VARS (block))
8516 TREE_USED (block) = 1;
8519 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8521 static void
8522 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8524 gimple *stmt = gsi_stmt (*gsi_p);
8525 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8526 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8527 gimple_build_omp_return (false));
8531 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8532 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8533 of OMP context, but with task_shared_vars set. */
8535 static tree
8536 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8537 void *data)
8539 tree t = *tp;
8541 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8542 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8543 return t;
8545 if (task_shared_vars
8546 && DECL_P (t)
8547 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8548 return t;
8550 /* If a global variable has been privatized, TREE_CONSTANT on
8551 ADDR_EXPR might be wrong. */
8552 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8553 recompute_tree_invariant_for_addr_expr (t);
8555 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8556 return NULL_TREE;
8559 /* Data to be communicated between lower_omp_regimplify_operands and
8560 lower_omp_regimplify_operands_p. */
8562 struct lower_omp_regimplify_operands_data
8564 omp_context *ctx;
8565 vec<tree> *decls;
8568 /* Helper function for lower_omp_regimplify_operands. Find
8569 omp_member_access_dummy_var vars and adjust temporarily their
8570 DECL_VALUE_EXPRs if needed. */
8572 static tree
8573 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8574 void *data)
8576 tree t = omp_member_access_dummy_var (*tp);
8577 if (t)
8579 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8580 lower_omp_regimplify_operands_data *ldata
8581 = (lower_omp_regimplify_operands_data *) wi->info;
8582 tree o = maybe_lookup_decl (t, ldata->ctx);
8583 if (o != t)
8585 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8586 ldata->decls->safe_push (*tp);
8587 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8588 SET_DECL_VALUE_EXPR (*tp, v);
8591 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8592 return NULL_TREE;
8595 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8596 of omp_member_access_dummy_var vars during regimplification. */
8598 static void
8599 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8600 gimple_stmt_iterator *gsi_p)
8602 auto_vec<tree, 10> decls;
8603 if (ctx)
8605 struct walk_stmt_info wi;
8606 memset (&wi, '\0', sizeof (wi));
8607 struct lower_omp_regimplify_operands_data data;
8608 data.ctx = ctx;
8609 data.decls = &decls;
8610 wi.info = &data;
8611 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8613 gimple_regimplify_operands (stmt, gsi_p);
8614 while (!decls.is_empty ())
8616 tree t = decls.pop ();
8617 tree v = decls.pop ();
8618 SET_DECL_VALUE_EXPR (t, v);
8622 static void
8623 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8625 gimple *stmt = gsi_stmt (*gsi_p);
8626 struct walk_stmt_info wi;
8627 gcall *call_stmt;
8629 if (gimple_has_location (stmt))
8630 input_location = gimple_location (stmt);
8632 if (task_shared_vars)
8633 memset (&wi, '\0', sizeof (wi));
8635 /* If we have issued syntax errors, avoid doing any heavy lifting.
8636 Just replace the OMP directives with a NOP to avoid
8637 confusing RTL expansion. */
8638 if (seen_error () && is_gimple_omp (stmt))
8640 gsi_replace (gsi_p, gimple_build_nop (), true);
8641 return;
8644 switch (gimple_code (stmt))
8646 case GIMPLE_COND:
8648 gcond *cond_stmt = as_a <gcond *> (stmt);
8649 if ((ctx || task_shared_vars)
8650 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8651 lower_omp_regimplify_p,
8652 ctx ? NULL : &wi, NULL)
8653 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8654 lower_omp_regimplify_p,
8655 ctx ? NULL : &wi, NULL)))
8656 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8658 break;
8659 case GIMPLE_CATCH:
8660 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8661 break;
8662 case GIMPLE_EH_FILTER:
8663 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8664 break;
8665 case GIMPLE_TRY:
8666 lower_omp (gimple_try_eval_ptr (stmt), ctx);
8667 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8668 break;
8669 case GIMPLE_TRANSACTION:
8670 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8671 ctx);
8672 break;
8673 case GIMPLE_BIND:
8674 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8675 break;
8676 case GIMPLE_OMP_PARALLEL:
8677 case GIMPLE_OMP_TASK:
8678 ctx = maybe_lookup_ctx (stmt);
8679 gcc_assert (ctx);
8680 if (ctx->cancellable)
8681 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8682 lower_omp_taskreg (gsi_p, ctx);
8683 break;
8684 case GIMPLE_OMP_FOR:
8685 ctx = maybe_lookup_ctx (stmt);
8686 gcc_assert (ctx);
8687 if (ctx->cancellable)
8688 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8689 lower_omp_for (gsi_p, ctx);
8690 break;
8691 case GIMPLE_OMP_SECTIONS:
8692 ctx = maybe_lookup_ctx (stmt);
8693 gcc_assert (ctx);
8694 if (ctx->cancellable)
8695 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8696 lower_omp_sections (gsi_p, ctx);
8697 break;
8698 case GIMPLE_OMP_SINGLE:
8699 ctx = maybe_lookup_ctx (stmt);
8700 gcc_assert (ctx);
8701 lower_omp_single (gsi_p, ctx);
8702 break;
8703 case GIMPLE_OMP_MASTER:
8704 ctx = maybe_lookup_ctx (stmt);
8705 gcc_assert (ctx);
8706 lower_omp_master (gsi_p, ctx);
8707 break;
8708 case GIMPLE_OMP_TASKGROUP:
8709 ctx = maybe_lookup_ctx (stmt);
8710 gcc_assert (ctx);
8711 lower_omp_taskgroup (gsi_p, ctx);
8712 break;
8713 case GIMPLE_OMP_ORDERED:
8714 ctx = maybe_lookup_ctx (stmt);
8715 gcc_assert (ctx);
8716 lower_omp_ordered (gsi_p, ctx);
8717 break;
8718 case GIMPLE_OMP_CRITICAL:
8719 ctx = maybe_lookup_ctx (stmt);
8720 gcc_assert (ctx);
8721 lower_omp_critical (gsi_p, ctx);
8722 break;
8723 case GIMPLE_OMP_ATOMIC_LOAD:
8724 if ((ctx || task_shared_vars)
8725 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8726 as_a <gomp_atomic_load *> (stmt)),
8727 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8728 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8729 break;
8730 case GIMPLE_OMP_TARGET:
8731 ctx = maybe_lookup_ctx (stmt);
8732 gcc_assert (ctx);
8733 lower_omp_target (gsi_p, ctx);
8734 break;
8735 case GIMPLE_OMP_TEAMS:
8736 ctx = maybe_lookup_ctx (stmt);
8737 gcc_assert (ctx);
8738 lower_omp_teams (gsi_p, ctx);
8739 break;
8740 case GIMPLE_OMP_GRID_BODY:
8741 ctx = maybe_lookup_ctx (stmt);
8742 gcc_assert (ctx);
8743 lower_omp_grid_body (gsi_p, ctx);
8744 break;
8745 case GIMPLE_CALL:
8746 tree fndecl;
8747 call_stmt = as_a <gcall *> (stmt);
8748 fndecl = gimple_call_fndecl (call_stmt);
8749 if (fndecl
8750 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8751 switch (DECL_FUNCTION_CODE (fndecl))
8753 case BUILT_IN_GOMP_BARRIER:
8754 if (ctx == NULL)
8755 break;
8756 /* FALLTHRU */
8757 case BUILT_IN_GOMP_CANCEL:
8758 case BUILT_IN_GOMP_CANCELLATION_POINT:
8759 omp_context *cctx;
8760 cctx = ctx;
8761 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8762 cctx = cctx->outer;
8763 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8764 if (!cctx->cancellable)
8766 if (DECL_FUNCTION_CODE (fndecl)
8767 == BUILT_IN_GOMP_CANCELLATION_POINT)
8769 stmt = gimple_build_nop ();
8770 gsi_replace (gsi_p, stmt, false);
8772 break;
8774 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8776 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8777 gimple_call_set_fndecl (call_stmt, fndecl);
8778 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8780 tree lhs;
8781 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8782 gimple_call_set_lhs (call_stmt, lhs);
8783 tree fallthru_label;
8784 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8785 gimple *g;
8786 g = gimple_build_label (fallthru_label);
8787 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8788 g = gimple_build_cond (NE_EXPR, lhs,
8789 fold_convert (TREE_TYPE (lhs),
8790 boolean_false_node),
8791 cctx->cancel_label, fallthru_label);
8792 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8793 break;
8794 default:
8795 break;
8797 /* FALLTHRU */
8798 default:
8799 if ((ctx || task_shared_vars)
8800 && walk_gimple_op (stmt, lower_omp_regimplify_p,
8801 ctx ? NULL : &wi))
8803 /* Just remove clobbers, this should happen only if we have
8804 "privatized" local addressable variables in SIMD regions,
8805 the clobber isn't needed in that case and gimplifying address
8806 of the ARRAY_REF into a pointer and creating MEM_REF based
8807 clobber would create worse code than we get with the clobber
8808 dropped. */
8809 if (gimple_clobber_p (stmt))
8811 gsi_replace (gsi_p, gimple_build_nop (), true);
8812 break;
8814 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8816 break;
8820 static void
8821 lower_omp (gimple_seq *body, omp_context *ctx)
8823 location_t saved_location = input_location;
8824 gimple_stmt_iterator gsi;
8825 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8826 lower_omp_1 (&gsi, ctx);
8827 /* During gimplification, we haven't folded statments inside offloading
8828 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8829 if (target_nesting_level || taskreg_nesting_level)
8830 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8831 fold_stmt (&gsi);
8832 input_location = saved_location;
8835 /* Main entry point. */
8837 static unsigned int
8838 execute_lower_omp (void)
8840 gimple_seq body;
8841 int i;
8842 omp_context *ctx;
8844 /* This pass always runs, to provide PROP_gimple_lomp.
8845 But often, there is nothing to do. */
8846 if (flag_cilkplus == 0 && flag_openacc == 0 && flag_openmp == 0
8847 && flag_openmp_simd == 0)
8848 return 0;
8850 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
8851 delete_omp_context);
8853 body = gimple_body (current_function_decl);
8855 if (hsa_gen_requested_p ())
8856 omp_grid_gridify_all_targets (&body);
8858 scan_omp (&body, NULL);
8859 gcc_assert (taskreg_nesting_level == 0);
8860 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
8861 finish_taskreg_scan (ctx);
8862 taskreg_contexts.release ();
8864 if (all_contexts->root)
8866 if (task_shared_vars)
8867 push_gimplify_context ();
8868 lower_omp (&body, NULL);
8869 if (task_shared_vars)
8870 pop_gimplify_context (NULL);
8873 if (all_contexts)
8875 splay_tree_delete (all_contexts);
8876 all_contexts = NULL;
8878 BITMAP_FREE (task_shared_vars);
8879 return 0;
8882 namespace {
8884 const pass_data pass_data_lower_omp =
8886 GIMPLE_PASS, /* type */
8887 "omplower", /* name */
8888 OPTGROUP_OPENMP, /* optinfo_flags */
8889 TV_NONE, /* tv_id */
8890 PROP_gimple_any, /* properties_required */
8891 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
8892 0, /* properties_destroyed */
8893 0, /* todo_flags_start */
8894 0, /* todo_flags_finish */
8897 class pass_lower_omp : public gimple_opt_pass
8899 public:
8900 pass_lower_omp (gcc::context *ctxt)
8901 : gimple_opt_pass (pass_data_lower_omp, ctxt)
8904 /* opt_pass methods: */
8905 virtual unsigned int execute (function *) { return execute_lower_omp (); }
8907 }; // class pass_lower_omp
8909 } // anon namespace
8911 gimple_opt_pass *
8912 make_pass_lower_omp (gcc::context *ctxt)
8914 return new pass_lower_omp (ctxt);
8917 /* The following is a utility to diagnose structured block violations.
8918 It is not part of the "omplower" pass, as that's invoked too late. It
8919 should be invoked by the respective front ends after gimplification. */
8921 static splay_tree all_labels;
8923 /* Check for mismatched contexts and generate an error if needed. Return
8924 true if an error is detected. */
8926 static bool
8927 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
8928 gimple *branch_ctx, gimple *label_ctx)
8930 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
8931 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
8933 if (label_ctx == branch_ctx)
8934 return false;
8936 const char* kind = NULL;
8938 if (flag_cilkplus)
8940 if ((branch_ctx
8941 && gimple_code (branch_ctx) == GIMPLE_OMP_FOR
8942 && gimple_omp_for_kind (branch_ctx) == GF_OMP_FOR_KIND_CILKSIMD)
8943 || (label_ctx
8944 && gimple_code (label_ctx) == GIMPLE_OMP_FOR
8945 && gimple_omp_for_kind (label_ctx) == GF_OMP_FOR_KIND_CILKSIMD))
8946 kind = "Cilk Plus";
8948 if (flag_openacc)
8950 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
8951 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
8953 gcc_checking_assert (kind == NULL);
8954 kind = "OpenACC";
8957 if (kind == NULL)
8959 gcc_checking_assert (flag_openmp);
8960 kind = "OpenMP";
8963 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
8964 so we could traverse it and issue a correct "exit" or "enter" error
8965 message upon a structured block violation.
8967 We built the context by building a list with tree_cons'ing, but there is
8968 no easy counterpart in gimple tuples. It seems like far too much work
8969 for issuing exit/enter error messages. If someone really misses the
8970 distinct error message... patches welcome. */
8972 #if 0
8973 /* Try to avoid confusing the user by producing and error message
8974 with correct "exit" or "enter" verbiage. We prefer "exit"
8975 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
8976 if (branch_ctx == NULL)
8977 exit_p = false;
8978 else
8980 while (label_ctx)
8982 if (TREE_VALUE (label_ctx) == branch_ctx)
8984 exit_p = false;
8985 break;
8987 label_ctx = TREE_CHAIN (label_ctx);
8991 if (exit_p)
8992 error ("invalid exit from %s structured block", kind);
8993 else
8994 error ("invalid entry to %s structured block", kind);
8995 #endif
8997 /* If it's obvious we have an invalid entry, be specific about the error. */
8998 if (branch_ctx == NULL)
8999 error ("invalid entry to %s structured block", kind);
9000 else
9002 /* Otherwise, be vague and lazy, but efficient. */
9003 error ("invalid branch to/from %s structured block", kind);
9006 gsi_replace (gsi_p, gimple_build_nop (), false);
9007 return true;
9010 /* Pass 1: Create a minimal tree of structured blocks, and record
9011 where each label is found. */
9013 static tree
9014 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9015 struct walk_stmt_info *wi)
9017 gimple *context = (gimple *) wi->info;
9018 gimple *inner_context;
9019 gimple *stmt = gsi_stmt (*gsi_p);
9021 *handled_ops_p = true;
9023 switch (gimple_code (stmt))
9025 WALK_SUBSTMTS;
9027 case GIMPLE_OMP_PARALLEL:
9028 case GIMPLE_OMP_TASK:
9029 case GIMPLE_OMP_SECTIONS:
9030 case GIMPLE_OMP_SINGLE:
9031 case GIMPLE_OMP_SECTION:
9032 case GIMPLE_OMP_MASTER:
9033 case GIMPLE_OMP_ORDERED:
9034 case GIMPLE_OMP_CRITICAL:
9035 case GIMPLE_OMP_TARGET:
9036 case GIMPLE_OMP_TEAMS:
9037 case GIMPLE_OMP_TASKGROUP:
9038 /* The minimal context here is just the current OMP construct. */
9039 inner_context = stmt;
9040 wi->info = inner_context;
9041 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9042 wi->info = context;
9043 break;
9045 case GIMPLE_OMP_FOR:
9046 inner_context = stmt;
9047 wi->info = inner_context;
9048 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9049 walk them. */
9050 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9051 diagnose_sb_1, NULL, wi);
9052 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9053 wi->info = context;
9054 break;
9056 case GIMPLE_LABEL:
9057 splay_tree_insert (all_labels,
9058 (splay_tree_key) gimple_label_label (
9059 as_a <glabel *> (stmt)),
9060 (splay_tree_value) context);
9061 break;
9063 default:
9064 break;
9067 return NULL_TREE;
9070 /* Pass 2: Check each branch and see if its context differs from that of
9071 the destination label's context. */
9073 static tree
9074 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9075 struct walk_stmt_info *wi)
9077 gimple *context = (gimple *) wi->info;
9078 splay_tree_node n;
9079 gimple *stmt = gsi_stmt (*gsi_p);
9081 *handled_ops_p = true;
9083 switch (gimple_code (stmt))
9085 WALK_SUBSTMTS;
9087 case GIMPLE_OMP_PARALLEL:
9088 case GIMPLE_OMP_TASK:
9089 case GIMPLE_OMP_SECTIONS:
9090 case GIMPLE_OMP_SINGLE:
9091 case GIMPLE_OMP_SECTION:
9092 case GIMPLE_OMP_MASTER:
9093 case GIMPLE_OMP_ORDERED:
9094 case GIMPLE_OMP_CRITICAL:
9095 case GIMPLE_OMP_TARGET:
9096 case GIMPLE_OMP_TEAMS:
9097 case GIMPLE_OMP_TASKGROUP:
9098 wi->info = stmt;
9099 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9100 wi->info = context;
9101 break;
9103 case GIMPLE_OMP_FOR:
9104 wi->info = stmt;
9105 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9106 walk them. */
9107 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9108 diagnose_sb_2, NULL, wi);
9109 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9110 wi->info = context;
9111 break;
9113 case GIMPLE_COND:
9115 gcond *cond_stmt = as_a <gcond *> (stmt);
9116 tree lab = gimple_cond_true_label (cond_stmt);
9117 if (lab)
9119 n = splay_tree_lookup (all_labels,
9120 (splay_tree_key) lab);
9121 diagnose_sb_0 (gsi_p, context,
9122 n ? (gimple *) n->value : NULL);
9124 lab = gimple_cond_false_label (cond_stmt);
9125 if (lab)
9127 n = splay_tree_lookup (all_labels,
9128 (splay_tree_key) lab);
9129 diagnose_sb_0 (gsi_p, context,
9130 n ? (gimple *) n->value : NULL);
9133 break;
9135 case GIMPLE_GOTO:
9137 tree lab = gimple_goto_dest (stmt);
9138 if (TREE_CODE (lab) != LABEL_DECL)
9139 break;
9141 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9142 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9144 break;
9146 case GIMPLE_SWITCH:
9148 gswitch *switch_stmt = as_a <gswitch *> (stmt);
9149 unsigned int i;
9150 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9152 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9153 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9154 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9155 break;
9158 break;
9160 case GIMPLE_RETURN:
9161 diagnose_sb_0 (gsi_p, context, NULL);
9162 break;
9164 default:
9165 break;
9168 return NULL_TREE;
9171 static unsigned int
9172 diagnose_omp_structured_block_errors (void)
9174 struct walk_stmt_info wi;
9175 gimple_seq body = gimple_body (current_function_decl);
9177 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9179 memset (&wi, 0, sizeof (wi));
9180 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9182 memset (&wi, 0, sizeof (wi));
9183 wi.want_locations = true;
9184 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9186 gimple_set_body (current_function_decl, body);
9188 splay_tree_delete (all_labels);
9189 all_labels = NULL;
9191 return 0;
9194 namespace {
9196 const pass_data pass_data_diagnose_omp_blocks =
9198 GIMPLE_PASS, /* type */
9199 "*diagnose_omp_blocks", /* name */
9200 OPTGROUP_OPENMP, /* optinfo_flags */
9201 TV_NONE, /* tv_id */
9202 PROP_gimple_any, /* properties_required */
9203 0, /* properties_provided */
9204 0, /* properties_destroyed */
9205 0, /* todo_flags_start */
9206 0, /* todo_flags_finish */
9209 class pass_diagnose_omp_blocks : public gimple_opt_pass
9211 public:
9212 pass_diagnose_omp_blocks (gcc::context *ctxt)
9213 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9216 /* opt_pass methods: */
9217 virtual bool gate (function *)
9219 return flag_cilkplus || flag_openacc || flag_openmp;
9221 virtual unsigned int execute (function *)
9223 return diagnose_omp_structured_block_errors ();
9226 }; // class pass_diagnose_omp_blocks
9228 } // anon namespace
9230 gimple_opt_pass *
9231 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9233 return new pass_diagnose_omp_blocks (ctxt);
9237 #include "gt-omp-low.h"