PR c++/80560 - warn on undefined memory operations involving non-trivial types
[official-gcc.git] / gcc / omp-low.c
blobdd4a0926c448a729bce204811137df2ec3382f30
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2017 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
62 /* Lowering of OMP parallel and workshare constructs proceeds in two
63 phases. The first phase scans the function looking for OMP statements
64 and then for variables that must be replaced to satisfy data sharing
65 clauses. The second phase expands code for the constructs, as well as
66 re-gimplifying things when variables have been replaced with complex
67 expressions.
69 Final code generation is done by pass_expand_omp. The flowgraph is
70 scanned for regions which are then moved to a new
71 function, to be invoked by the thread library, or offloaded. */
73 /* Context structure. Used to store information about each parallel
74 directive in the code. */
76 struct omp_context
78 /* This field must be at the beginning, as we do "inheritance": Some
79 callback functions for tree-inline.c (e.g., omp_copy_decl)
80 receive a copy_body_data pointer that is up-casted to an
81 omp_context pointer. */
82 copy_body_data cb;
84 /* The tree of contexts corresponding to the encountered constructs. */
85 struct omp_context *outer;
86 gimple *stmt;
88 /* Map variables to fields in a structure that allows communication
89 between sending and receiving threads. */
90 splay_tree field_map;
91 tree record_type;
92 tree sender_decl;
93 tree receiver_decl;
95 /* These are used just by task contexts, if task firstprivate fn is
96 needed. srecord_type is used to communicate from the thread
97 that encountered the task construct to task firstprivate fn,
98 record_type is allocated by GOMP_task, initialized by task firstprivate
99 fn and passed to the task body fn. */
100 splay_tree sfield_map;
101 tree srecord_type;
103 /* A chain of variables to add to the top-level block surrounding the
104 construct. In the case of a parallel, this is in the child function. */
105 tree block_vars;
107 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
108 barriers should jump to during omplower pass. */
109 tree cancel_label;
111 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
112 otherwise. */
113 gimple *simt_stmt;
115 /* Nesting depth of this context. Used to beautify error messages re
116 invalid gotos. The outermost ctx is depth 1, with depth 0 being
117 reserved for the main body of the function. */
118 int depth;
120 /* True if this parallel directive is nested within another. */
121 bool is_nested;
123 /* True if this construct can be cancelled. */
124 bool cancellable;
127 static splay_tree all_contexts;
128 static int taskreg_nesting_level;
129 static int target_nesting_level;
130 static bitmap task_shared_vars;
131 static vec<omp_context *> taskreg_contexts;
133 static void scan_omp (gimple_seq *, omp_context *);
134 static tree scan_omp_1_op (tree *, int *, void *);
136 #define WALK_SUBSTMTS \
137 case GIMPLE_BIND: \
138 case GIMPLE_TRY: \
139 case GIMPLE_CATCH: \
140 case GIMPLE_EH_FILTER: \
141 case GIMPLE_TRANSACTION: \
142 /* The sub-statements for these should be walked. */ \
143 *handled_ops_p = false; \
144 break;
146 /* Return true if CTX corresponds to an oacc parallel region. */
148 static bool
149 is_oacc_parallel (omp_context *ctx)
151 enum gimple_code outer_type = gimple_code (ctx->stmt);
152 return ((outer_type == GIMPLE_OMP_TARGET)
153 && (gimple_omp_target_kind (ctx->stmt)
154 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
157 /* Return true if CTX corresponds to an oacc kernels region. */
159 static bool
160 is_oacc_kernels (omp_context *ctx)
162 enum gimple_code outer_type = gimple_code (ctx->stmt);
163 return ((outer_type == GIMPLE_OMP_TARGET)
164 && (gimple_omp_target_kind (ctx->stmt)
165 == GF_OMP_TARGET_KIND_OACC_KERNELS));
168 /* If DECL is the artificial dummy VAR_DECL created for non-static
169 data member privatization, return the underlying "this" parameter,
170 otherwise return NULL. */
172 tree
173 omp_member_access_dummy_var (tree decl)
175 if (!VAR_P (decl)
176 || !DECL_ARTIFICIAL (decl)
177 || !DECL_IGNORED_P (decl)
178 || !DECL_HAS_VALUE_EXPR_P (decl)
179 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
180 return NULL_TREE;
182 tree v = DECL_VALUE_EXPR (decl);
183 if (TREE_CODE (v) != COMPONENT_REF)
184 return NULL_TREE;
186 while (1)
187 switch (TREE_CODE (v))
189 case COMPONENT_REF:
190 case MEM_REF:
191 case INDIRECT_REF:
192 CASE_CONVERT:
193 case POINTER_PLUS_EXPR:
194 v = TREE_OPERAND (v, 0);
195 continue;
196 case PARM_DECL:
197 if (DECL_CONTEXT (v) == current_function_decl
198 && DECL_ARTIFICIAL (v)
199 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
200 return v;
201 return NULL_TREE;
202 default:
203 return NULL_TREE;
207 /* Helper for unshare_and_remap, called through walk_tree. */
209 static tree
210 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
212 tree *pair = (tree *) data;
213 if (*tp == pair[0])
215 *tp = unshare_expr (pair[1]);
216 *walk_subtrees = 0;
218 else if (IS_TYPE_OR_DECL_P (*tp))
219 *walk_subtrees = 0;
220 return NULL_TREE;
223 /* Return unshare_expr (X) with all occurrences of FROM
224 replaced with TO. */
226 static tree
227 unshare_and_remap (tree x, tree from, tree to)
229 tree pair[2] = { from, to };
230 x = unshare_expr (x);
231 walk_tree (&x, unshare_and_remap_1, pair, NULL);
232 return x;
235 /* Convenience function for calling scan_omp_1_op on tree operands. */
237 static inline tree
238 scan_omp_op (tree *tp, omp_context *ctx)
240 struct walk_stmt_info wi;
242 memset (&wi, 0, sizeof (wi));
243 wi.info = ctx;
244 wi.want_locations = true;
246 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
249 static void lower_omp (gimple_seq *, omp_context *);
250 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
251 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
253 /* Return true if CTX is for an omp parallel. */
255 static inline bool
256 is_parallel_ctx (omp_context *ctx)
258 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
262 /* Return true if CTX is for an omp task. */
264 static inline bool
265 is_task_ctx (omp_context *ctx)
267 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
271 /* Return true if CTX is for an omp taskloop. */
273 static inline bool
274 is_taskloop_ctx (omp_context *ctx)
276 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
277 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
281 /* Return true if CTX is for an omp parallel or omp task. */
283 static inline bool
284 is_taskreg_ctx (omp_context *ctx)
286 return is_parallel_ctx (ctx) || is_task_ctx (ctx);
289 /* Return true if EXPR is variable sized. */
291 static inline bool
292 is_variable_sized (const_tree expr)
294 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
297 /* Lookup variables. The "maybe" form
298 allows for the variable form to not have been entered, otherwise we
299 assert that the variable must have been entered. */
301 static inline tree
302 lookup_decl (tree var, omp_context *ctx)
304 tree *n = ctx->cb.decl_map->get (var);
305 return *n;
308 static inline tree
309 maybe_lookup_decl (const_tree var, omp_context *ctx)
311 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
312 return n ? *n : NULL_TREE;
315 static inline tree
316 lookup_field (tree var, omp_context *ctx)
318 splay_tree_node n;
319 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
320 return (tree) n->value;
323 static inline tree
324 lookup_sfield (splay_tree_key key, omp_context *ctx)
326 splay_tree_node n;
327 n = splay_tree_lookup (ctx->sfield_map
328 ? ctx->sfield_map : ctx->field_map, key);
329 return (tree) n->value;
332 static inline tree
333 lookup_sfield (tree var, omp_context *ctx)
335 return lookup_sfield ((splay_tree_key) var, ctx);
338 static inline tree
339 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
341 splay_tree_node n;
342 n = splay_tree_lookup (ctx->field_map, key);
343 return n ? (tree) n->value : NULL_TREE;
346 static inline tree
347 maybe_lookup_field (tree var, omp_context *ctx)
349 return maybe_lookup_field ((splay_tree_key) var, ctx);
352 /* Return true if DECL should be copied by pointer. SHARED_CTX is
353 the parallel context if DECL is to be shared. */
355 static bool
356 use_pointer_for_field (tree decl, omp_context *shared_ctx)
358 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
359 || TYPE_ATOMIC (TREE_TYPE (decl)))
360 return true;
362 /* We can only use copy-in/copy-out semantics for shared variables
363 when we know the value is not accessible from an outer scope. */
364 if (shared_ctx)
366 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
368 /* ??? Trivially accessible from anywhere. But why would we even
369 be passing an address in this case? Should we simply assert
370 this to be false, or should we have a cleanup pass that removes
371 these from the list of mappings? */
372 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
373 return true;
375 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
376 without analyzing the expression whether or not its location
377 is accessible to anyone else. In the case of nested parallel
378 regions it certainly may be. */
379 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
380 return true;
382 /* Do not use copy-in/copy-out for variables that have their
383 address taken. */
384 if (TREE_ADDRESSABLE (decl))
385 return true;
387 /* lower_send_shared_vars only uses copy-in, but not copy-out
388 for these. */
389 if (TREE_READONLY (decl)
390 || ((TREE_CODE (decl) == RESULT_DECL
391 || TREE_CODE (decl) == PARM_DECL)
392 && DECL_BY_REFERENCE (decl)))
393 return false;
395 /* Disallow copy-in/out in nested parallel if
396 decl is shared in outer parallel, otherwise
397 each thread could store the shared variable
398 in its own copy-in location, making the
399 variable no longer really shared. */
400 if (shared_ctx->is_nested)
402 omp_context *up;
404 for (up = shared_ctx->outer; up; up = up->outer)
405 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
406 break;
408 if (up)
410 tree c;
412 for (c = gimple_omp_taskreg_clauses (up->stmt);
413 c; c = OMP_CLAUSE_CHAIN (c))
414 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
415 && OMP_CLAUSE_DECL (c) == decl)
416 break;
418 if (c)
419 goto maybe_mark_addressable_and_ret;
423 /* For tasks avoid using copy-in/out. As tasks can be
424 deferred or executed in different thread, when GOMP_task
425 returns, the task hasn't necessarily terminated. */
426 if (is_task_ctx (shared_ctx))
428 tree outer;
429 maybe_mark_addressable_and_ret:
430 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
431 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
433 /* Taking address of OUTER in lower_send_shared_vars
434 might need regimplification of everything that uses the
435 variable. */
436 if (!task_shared_vars)
437 task_shared_vars = BITMAP_ALLOC (NULL);
438 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
439 TREE_ADDRESSABLE (outer) = 1;
441 return true;
445 return false;
448 /* Construct a new automatic decl similar to VAR. */
450 static tree
451 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
453 tree copy = copy_var_decl (var, name, type);
455 DECL_CONTEXT (copy) = current_function_decl;
456 DECL_CHAIN (copy) = ctx->block_vars;
457 /* If VAR is listed in task_shared_vars, it means it wasn't
458 originally addressable and is just because task needs to take
459 it's address. But we don't need to take address of privatizations
460 from that var. */
461 if (TREE_ADDRESSABLE (var)
462 && task_shared_vars
463 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
464 TREE_ADDRESSABLE (copy) = 0;
465 ctx->block_vars = copy;
467 return copy;
470 static tree
471 omp_copy_decl_1 (tree var, omp_context *ctx)
473 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
476 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
477 as appropriate. */
478 static tree
479 omp_build_component_ref (tree obj, tree field)
481 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
482 if (TREE_THIS_VOLATILE (field))
483 TREE_THIS_VOLATILE (ret) |= 1;
484 if (TREE_READONLY (field))
485 TREE_READONLY (ret) |= 1;
486 return ret;
489 /* Build tree nodes to access the field for VAR on the receiver side. */
491 static tree
492 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
494 tree x, field = lookup_field (var, ctx);
496 /* If the receiver record type was remapped in the child function,
497 remap the field into the new record type. */
498 x = maybe_lookup_field (field, ctx);
499 if (x != NULL)
500 field = x;
502 x = build_simple_mem_ref (ctx->receiver_decl);
503 TREE_THIS_NOTRAP (x) = 1;
504 x = omp_build_component_ref (x, field);
505 if (by_ref)
507 x = build_simple_mem_ref (x);
508 TREE_THIS_NOTRAP (x) = 1;
511 return x;
514 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
515 of a parallel, this is a component reference; for workshare constructs
516 this is some variable. */
518 static tree
519 build_outer_var_ref (tree var, omp_context *ctx,
520 enum omp_clause_code code = OMP_CLAUSE_ERROR)
522 tree x;
524 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
525 x = var;
526 else if (is_variable_sized (var))
528 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
529 x = build_outer_var_ref (x, ctx, code);
530 x = build_simple_mem_ref (x);
532 else if (is_taskreg_ctx (ctx))
534 bool by_ref = use_pointer_for_field (var, NULL);
535 x = build_receiver_ref (var, by_ref, ctx);
537 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
538 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
539 || (code == OMP_CLAUSE_PRIVATE
540 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
541 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
542 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
544 /* #pragma omp simd isn't a worksharing construct, and can reference
545 even private vars in its linear etc. clauses.
546 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
547 to private vars in all worksharing constructs. */
548 x = NULL_TREE;
549 if (ctx->outer && is_taskreg_ctx (ctx))
550 x = lookup_decl (var, ctx->outer);
551 else if (ctx->outer)
552 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
553 if (x == NULL_TREE)
554 x = var;
556 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
558 gcc_assert (ctx->outer);
559 splay_tree_node n
560 = splay_tree_lookup (ctx->outer->field_map,
561 (splay_tree_key) &DECL_UID (var));
562 if (n == NULL)
564 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
565 x = var;
566 else
567 x = lookup_decl (var, ctx->outer);
569 else
571 tree field = (tree) n->value;
572 /* If the receiver record type was remapped in the child function,
573 remap the field into the new record type. */
574 x = maybe_lookup_field (field, ctx->outer);
575 if (x != NULL)
576 field = x;
578 x = build_simple_mem_ref (ctx->outer->receiver_decl);
579 x = omp_build_component_ref (x, field);
580 if (use_pointer_for_field (var, ctx->outer))
581 x = build_simple_mem_ref (x);
584 else if (ctx->outer)
586 omp_context *outer = ctx->outer;
587 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
589 outer = outer->outer;
590 gcc_assert (outer
591 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
593 x = lookup_decl (var, outer);
595 else if (omp_is_reference (var))
596 /* This can happen with orphaned constructs. If var is reference, it is
597 possible it is shared and as such valid. */
598 x = var;
599 else if (omp_member_access_dummy_var (var))
600 x = var;
601 else
602 gcc_unreachable ();
604 if (x == var)
606 tree t = omp_member_access_dummy_var (var);
607 if (t)
609 x = DECL_VALUE_EXPR (var);
610 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
611 if (o != t)
612 x = unshare_and_remap (x, t, o);
613 else
614 x = unshare_expr (x);
618 if (omp_is_reference (var))
619 x = build_simple_mem_ref (x);
621 return x;
624 /* Build tree nodes to access the field for VAR on the sender side. */
626 static tree
627 build_sender_ref (splay_tree_key key, omp_context *ctx)
629 tree field = lookup_sfield (key, ctx);
630 return omp_build_component_ref (ctx->sender_decl, field);
633 static tree
634 build_sender_ref (tree var, omp_context *ctx)
636 return build_sender_ref ((splay_tree_key) var, ctx);
639 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
640 BASE_POINTERS_RESTRICT, declare the field with restrict. */
642 static void
643 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx,
644 bool base_pointers_restrict = false)
646 tree field, type, sfield = NULL_TREE;
647 splay_tree_key key = (splay_tree_key) var;
649 if ((mask & 8) != 0)
651 key = (splay_tree_key) &DECL_UID (var);
652 gcc_checking_assert (key != (splay_tree_key) var);
654 gcc_assert ((mask & 1) == 0
655 || !splay_tree_lookup (ctx->field_map, key));
656 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
657 || !splay_tree_lookup (ctx->sfield_map, key));
658 gcc_assert ((mask & 3) == 3
659 || !is_gimple_omp_oacc (ctx->stmt));
661 type = TREE_TYPE (var);
662 /* Prevent redeclaring the var in the split-off function with a restrict
663 pointer type. Note that we only clear type itself, restrict qualifiers in
664 the pointed-to type will be ignored by points-to analysis. */
665 if (POINTER_TYPE_P (type)
666 && TYPE_RESTRICT (type))
667 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
669 if (mask & 4)
671 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
672 type = build_pointer_type (build_pointer_type (type));
674 else if (by_ref)
676 type = build_pointer_type (type);
677 if (base_pointers_restrict)
678 type = build_qualified_type (type, TYPE_QUAL_RESTRICT);
680 else if ((mask & 3) == 1 && omp_is_reference (var))
681 type = TREE_TYPE (type);
683 field = build_decl (DECL_SOURCE_LOCATION (var),
684 FIELD_DECL, DECL_NAME (var), type);
686 /* Remember what variable this field was created for. This does have a
687 side effect of making dwarf2out ignore this member, so for helpful
688 debugging we clear it later in delete_omp_context. */
689 DECL_ABSTRACT_ORIGIN (field) = var;
690 if (type == TREE_TYPE (var))
692 SET_DECL_ALIGN (field, DECL_ALIGN (var));
693 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
694 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
696 else
697 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
699 if ((mask & 3) == 3)
701 insert_field_into_struct (ctx->record_type, field);
702 if (ctx->srecord_type)
704 sfield = build_decl (DECL_SOURCE_LOCATION (var),
705 FIELD_DECL, DECL_NAME (var), type);
706 DECL_ABSTRACT_ORIGIN (sfield) = var;
707 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
708 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
709 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
710 insert_field_into_struct (ctx->srecord_type, sfield);
713 else
715 if (ctx->srecord_type == NULL_TREE)
717 tree t;
719 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
720 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
721 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
723 sfield = build_decl (DECL_SOURCE_LOCATION (t),
724 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
725 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
726 insert_field_into_struct (ctx->srecord_type, sfield);
727 splay_tree_insert (ctx->sfield_map,
728 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
729 (splay_tree_value) sfield);
732 sfield = field;
733 insert_field_into_struct ((mask & 1) ? ctx->record_type
734 : ctx->srecord_type, field);
737 if (mask & 1)
738 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
739 if ((mask & 2) && ctx->sfield_map)
740 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
743 static tree
744 install_var_local (tree var, omp_context *ctx)
746 tree new_var = omp_copy_decl_1 (var, ctx);
747 insert_decl_map (&ctx->cb, var, new_var);
748 return new_var;
751 /* Adjust the replacement for DECL in CTX for the new context. This means
752 copying the DECL_VALUE_EXPR, and fixing up the type. */
754 static void
755 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
757 tree new_decl, size;
759 new_decl = lookup_decl (decl, ctx);
761 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
763 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
764 && DECL_HAS_VALUE_EXPR_P (decl))
766 tree ve = DECL_VALUE_EXPR (decl);
767 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
768 SET_DECL_VALUE_EXPR (new_decl, ve);
769 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
772 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
774 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
775 if (size == error_mark_node)
776 size = TYPE_SIZE (TREE_TYPE (new_decl));
777 DECL_SIZE (new_decl) = size;
779 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
780 if (size == error_mark_node)
781 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
782 DECL_SIZE_UNIT (new_decl) = size;
786 /* The callback for remap_decl. Search all containing contexts for a
787 mapping of the variable; this avoids having to duplicate the splay
788 tree ahead of time. We know a mapping doesn't already exist in the
789 given context. Create new mappings to implement default semantics. */
791 static tree
792 omp_copy_decl (tree var, copy_body_data *cb)
794 omp_context *ctx = (omp_context *) cb;
795 tree new_var;
797 if (TREE_CODE (var) == LABEL_DECL)
799 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
800 DECL_CONTEXT (new_var) = current_function_decl;
801 insert_decl_map (&ctx->cb, var, new_var);
802 return new_var;
805 while (!is_taskreg_ctx (ctx))
807 ctx = ctx->outer;
808 if (ctx == NULL)
809 return var;
810 new_var = maybe_lookup_decl (var, ctx);
811 if (new_var)
812 return new_var;
815 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
816 return var;
818 return error_mark_node;
821 /* Create a new context, with OUTER_CTX being the surrounding context. */
823 static omp_context *
824 new_omp_context (gimple *stmt, omp_context *outer_ctx)
826 omp_context *ctx = XCNEW (omp_context);
828 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
829 (splay_tree_value) ctx);
830 ctx->stmt = stmt;
832 if (outer_ctx)
834 ctx->outer = outer_ctx;
835 ctx->cb = outer_ctx->cb;
836 ctx->cb.block = NULL;
837 ctx->depth = outer_ctx->depth + 1;
839 else
841 ctx->cb.src_fn = current_function_decl;
842 ctx->cb.dst_fn = current_function_decl;
843 ctx->cb.src_node = cgraph_node::get (current_function_decl);
844 gcc_checking_assert (ctx->cb.src_node);
845 ctx->cb.dst_node = ctx->cb.src_node;
846 ctx->cb.src_cfun = cfun;
847 ctx->cb.copy_decl = omp_copy_decl;
848 ctx->cb.eh_lp_nr = 0;
849 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
850 ctx->depth = 1;
853 ctx->cb.decl_map = new hash_map<tree, tree>;
855 return ctx;
858 static gimple_seq maybe_catch_exception (gimple_seq);
860 /* Finalize task copyfn. */
862 static void
863 finalize_task_copyfn (gomp_task *task_stmt)
865 struct function *child_cfun;
866 tree child_fn;
867 gimple_seq seq = NULL, new_seq;
868 gbind *bind;
870 child_fn = gimple_omp_task_copy_fn (task_stmt);
871 if (child_fn == NULL_TREE)
872 return;
874 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
875 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
877 push_cfun (child_cfun);
878 bind = gimplify_body (child_fn, false);
879 gimple_seq_add_stmt (&seq, bind);
880 new_seq = maybe_catch_exception (seq);
881 if (new_seq != seq)
883 bind = gimple_build_bind (NULL, new_seq, NULL);
884 seq = NULL;
885 gimple_seq_add_stmt (&seq, bind);
887 gimple_set_body (child_fn, seq);
888 pop_cfun ();
890 /* Inform the callgraph about the new function. */
891 cgraph_node *node = cgraph_node::get_create (child_fn);
892 node->parallelized_function = 1;
893 cgraph_node::add_new_function (child_fn, false);
896 /* Destroy a omp_context data structures. Called through the splay tree
897 value delete callback. */
899 static void
900 delete_omp_context (splay_tree_value value)
902 omp_context *ctx = (omp_context *) value;
904 delete ctx->cb.decl_map;
906 if (ctx->field_map)
907 splay_tree_delete (ctx->field_map);
908 if (ctx->sfield_map)
909 splay_tree_delete (ctx->sfield_map);
911 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
912 it produces corrupt debug information. */
913 if (ctx->record_type)
915 tree t;
916 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
917 DECL_ABSTRACT_ORIGIN (t) = NULL;
919 if (ctx->srecord_type)
921 tree t;
922 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
923 DECL_ABSTRACT_ORIGIN (t) = NULL;
926 if (is_task_ctx (ctx))
927 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
929 XDELETE (ctx);
932 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
933 context. */
935 static void
936 fixup_child_record_type (omp_context *ctx)
938 tree f, type = ctx->record_type;
940 if (!ctx->receiver_decl)
941 return;
942 /* ??? It isn't sufficient to just call remap_type here, because
943 variably_modified_type_p doesn't work the way we expect for
944 record types. Testing each field for whether it needs remapping
945 and creating a new record by hand works, however. */
946 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
947 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
948 break;
949 if (f)
951 tree name, new_fields = NULL;
953 type = lang_hooks.types.make_type (RECORD_TYPE);
954 name = DECL_NAME (TYPE_NAME (ctx->record_type));
955 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
956 TYPE_DECL, name, type);
957 TYPE_NAME (type) = name;
959 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
961 tree new_f = copy_node (f);
962 DECL_CONTEXT (new_f) = type;
963 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
964 DECL_CHAIN (new_f) = new_fields;
965 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
966 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
967 &ctx->cb, NULL);
968 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
969 &ctx->cb, NULL);
970 new_fields = new_f;
972 /* Arrange to be able to look up the receiver field
973 given the sender field. */
974 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
975 (splay_tree_value) new_f);
977 TYPE_FIELDS (type) = nreverse (new_fields);
978 layout_type (type);
981 /* In a target region we never modify any of the pointers in *.omp_data_i,
982 so attempt to help the optimizers. */
983 if (is_gimple_omp_offloaded (ctx->stmt))
984 type = build_qualified_type (type, TYPE_QUAL_CONST);
986 TREE_TYPE (ctx->receiver_decl)
987 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
990 /* Instantiate decls as necessary in CTX to satisfy the data sharing
991 specified by CLAUSES. If BASE_POINTERS_RESTRICT, install var field with
992 restrict. */
994 static void
995 scan_sharing_clauses (tree clauses, omp_context *ctx,
996 bool base_pointers_restrict = false)
998 tree c, decl;
999 bool scan_array_reductions = false;
1001 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1003 bool by_ref;
1005 switch (OMP_CLAUSE_CODE (c))
1007 case OMP_CLAUSE_PRIVATE:
1008 decl = OMP_CLAUSE_DECL (c);
1009 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1010 goto do_private;
1011 else if (!is_variable_sized (decl))
1012 install_var_local (decl, ctx);
1013 break;
1015 case OMP_CLAUSE_SHARED:
1016 decl = OMP_CLAUSE_DECL (c);
1017 /* Ignore shared directives in teams construct. */
1018 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1020 /* Global variables don't need to be copied,
1021 the receiver side will use them directly. */
1022 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1023 if (is_global_var (odecl))
1024 break;
1025 insert_decl_map (&ctx->cb, decl, odecl);
1026 break;
1028 gcc_assert (is_taskreg_ctx (ctx));
1029 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1030 || !is_variable_sized (decl));
1031 /* Global variables don't need to be copied,
1032 the receiver side will use them directly. */
1033 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1034 break;
1035 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1037 use_pointer_for_field (decl, ctx);
1038 break;
1040 by_ref = use_pointer_for_field (decl, NULL);
1041 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1042 || TREE_ADDRESSABLE (decl)
1043 || by_ref
1044 || omp_is_reference (decl))
1046 by_ref = use_pointer_for_field (decl, ctx);
1047 install_var_field (decl, by_ref, 3, ctx);
1048 install_var_local (decl, ctx);
1049 break;
1051 /* We don't need to copy const scalar vars back. */
1052 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1053 goto do_private;
1055 case OMP_CLAUSE_REDUCTION:
1056 decl = OMP_CLAUSE_DECL (c);
1057 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1058 && TREE_CODE (decl) == MEM_REF)
1060 tree t = TREE_OPERAND (decl, 0);
1061 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1062 t = TREE_OPERAND (t, 0);
1063 if (TREE_CODE (t) == INDIRECT_REF
1064 || TREE_CODE (t) == ADDR_EXPR)
1065 t = TREE_OPERAND (t, 0);
1066 install_var_local (t, ctx);
1067 if (is_taskreg_ctx (ctx)
1068 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1069 && !is_variable_sized (t))
1071 by_ref = use_pointer_for_field (t, ctx);
1072 install_var_field (t, by_ref, 3, ctx);
1074 break;
1076 goto do_private;
1078 case OMP_CLAUSE_LASTPRIVATE:
1079 /* Let the corresponding firstprivate clause create
1080 the variable. */
1081 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1082 break;
1083 /* FALLTHRU */
1085 case OMP_CLAUSE_FIRSTPRIVATE:
1086 case OMP_CLAUSE_LINEAR:
1087 decl = OMP_CLAUSE_DECL (c);
1088 do_private:
1089 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1090 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1091 && is_gimple_omp_offloaded (ctx->stmt))
1093 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1094 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1095 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1096 install_var_field (decl, true, 3, ctx);
1097 else
1098 install_var_field (decl, false, 3, ctx);
1100 if (is_variable_sized (decl))
1102 if (is_task_ctx (ctx))
1103 install_var_field (decl, false, 1, ctx);
1104 break;
1106 else if (is_taskreg_ctx (ctx))
1108 bool global
1109 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1110 by_ref = use_pointer_for_field (decl, NULL);
1112 if (is_task_ctx (ctx)
1113 && (global || by_ref || omp_is_reference (decl)))
1115 install_var_field (decl, false, 1, ctx);
1116 if (!global)
1117 install_var_field (decl, by_ref, 2, ctx);
1119 else if (!global)
1120 install_var_field (decl, by_ref, 3, ctx);
1122 install_var_local (decl, ctx);
1123 break;
1125 case OMP_CLAUSE_USE_DEVICE_PTR:
1126 decl = OMP_CLAUSE_DECL (c);
1127 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1128 install_var_field (decl, true, 3, ctx);
1129 else
1130 install_var_field (decl, false, 3, ctx);
1131 if (DECL_SIZE (decl)
1132 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1134 tree decl2 = DECL_VALUE_EXPR (decl);
1135 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1136 decl2 = TREE_OPERAND (decl2, 0);
1137 gcc_assert (DECL_P (decl2));
1138 install_var_local (decl2, ctx);
1140 install_var_local (decl, ctx);
1141 break;
1143 case OMP_CLAUSE_IS_DEVICE_PTR:
1144 decl = OMP_CLAUSE_DECL (c);
1145 goto do_private;
1147 case OMP_CLAUSE__LOOPTEMP_:
1148 gcc_assert (is_taskreg_ctx (ctx));
1149 decl = OMP_CLAUSE_DECL (c);
1150 install_var_field (decl, false, 3, ctx);
1151 install_var_local (decl, ctx);
1152 break;
1154 case OMP_CLAUSE_COPYPRIVATE:
1155 case OMP_CLAUSE_COPYIN:
1156 decl = OMP_CLAUSE_DECL (c);
1157 by_ref = use_pointer_for_field (decl, NULL);
1158 install_var_field (decl, by_ref, 3, ctx);
1159 break;
1161 case OMP_CLAUSE_FINAL:
1162 case OMP_CLAUSE_IF:
1163 case OMP_CLAUSE_NUM_THREADS:
1164 case OMP_CLAUSE_NUM_TEAMS:
1165 case OMP_CLAUSE_THREAD_LIMIT:
1166 case OMP_CLAUSE_DEVICE:
1167 case OMP_CLAUSE_SCHEDULE:
1168 case OMP_CLAUSE_DIST_SCHEDULE:
1169 case OMP_CLAUSE_DEPEND:
1170 case OMP_CLAUSE_PRIORITY:
1171 case OMP_CLAUSE_GRAINSIZE:
1172 case OMP_CLAUSE_NUM_TASKS:
1173 case OMP_CLAUSE__CILK_FOR_COUNT_:
1174 case OMP_CLAUSE_NUM_GANGS:
1175 case OMP_CLAUSE_NUM_WORKERS:
1176 case OMP_CLAUSE_VECTOR_LENGTH:
1177 if (ctx->outer)
1178 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1179 break;
1181 case OMP_CLAUSE_TO:
1182 case OMP_CLAUSE_FROM:
1183 case OMP_CLAUSE_MAP:
1184 if (ctx->outer)
1185 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1186 decl = OMP_CLAUSE_DECL (c);
1187 /* Global variables with "omp declare target" attribute
1188 don't need to be copied, the receiver side will use them
1189 directly. However, global variables with "omp declare target link"
1190 attribute need to be copied. */
1191 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1192 && DECL_P (decl)
1193 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1194 && (OMP_CLAUSE_MAP_KIND (c)
1195 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1196 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1197 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1198 && varpool_node::get_create (decl)->offloadable
1199 && !lookup_attribute ("omp declare target link",
1200 DECL_ATTRIBUTES (decl)))
1201 break;
1202 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1203 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1205 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1206 not offloaded; there is nothing to map for those. */
1207 if (!is_gimple_omp_offloaded (ctx->stmt)
1208 && !POINTER_TYPE_P (TREE_TYPE (decl))
1209 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1210 break;
1212 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1213 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1214 || (OMP_CLAUSE_MAP_KIND (c)
1215 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1217 if (TREE_CODE (decl) == COMPONENT_REF
1218 || (TREE_CODE (decl) == INDIRECT_REF
1219 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1220 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1221 == REFERENCE_TYPE)))
1222 break;
1223 if (DECL_SIZE (decl)
1224 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1226 tree decl2 = DECL_VALUE_EXPR (decl);
1227 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1228 decl2 = TREE_OPERAND (decl2, 0);
1229 gcc_assert (DECL_P (decl2));
1230 install_var_local (decl2, ctx);
1232 install_var_local (decl, ctx);
1233 break;
1235 if (DECL_P (decl))
1237 if (DECL_SIZE (decl)
1238 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1240 tree decl2 = DECL_VALUE_EXPR (decl);
1241 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1242 decl2 = TREE_OPERAND (decl2, 0);
1243 gcc_assert (DECL_P (decl2));
1244 install_var_field (decl2, true, 3, ctx);
1245 install_var_local (decl2, ctx);
1246 install_var_local (decl, ctx);
1248 else
1250 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1251 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1252 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1253 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1254 install_var_field (decl, true, 7, ctx);
1255 else
1256 install_var_field (decl, true, 3, ctx,
1257 base_pointers_restrict);
1258 if (is_gimple_omp_offloaded (ctx->stmt)
1259 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1260 install_var_local (decl, ctx);
1263 else
1265 tree base = get_base_address (decl);
1266 tree nc = OMP_CLAUSE_CHAIN (c);
1267 if (DECL_P (base)
1268 && nc != NULL_TREE
1269 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1270 && OMP_CLAUSE_DECL (nc) == base
1271 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1272 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1274 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1275 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1277 else
1279 if (ctx->outer)
1281 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1282 decl = OMP_CLAUSE_DECL (c);
1284 gcc_assert (!splay_tree_lookup (ctx->field_map,
1285 (splay_tree_key) decl));
1286 tree field
1287 = build_decl (OMP_CLAUSE_LOCATION (c),
1288 FIELD_DECL, NULL_TREE, ptr_type_node);
1289 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1290 insert_field_into_struct (ctx->record_type, field);
1291 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1292 (splay_tree_value) field);
1295 break;
1297 case OMP_CLAUSE__GRIDDIM_:
1298 if (ctx->outer)
1300 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1301 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1303 break;
1305 case OMP_CLAUSE_NOWAIT:
1306 case OMP_CLAUSE_ORDERED:
1307 case OMP_CLAUSE_COLLAPSE:
1308 case OMP_CLAUSE_UNTIED:
1309 case OMP_CLAUSE_MERGEABLE:
1310 case OMP_CLAUSE_PROC_BIND:
1311 case OMP_CLAUSE_SAFELEN:
1312 case OMP_CLAUSE_SIMDLEN:
1313 case OMP_CLAUSE_THREADS:
1314 case OMP_CLAUSE_SIMD:
1315 case OMP_CLAUSE_NOGROUP:
1316 case OMP_CLAUSE_DEFAULTMAP:
1317 case OMP_CLAUSE_ASYNC:
1318 case OMP_CLAUSE_WAIT:
1319 case OMP_CLAUSE_GANG:
1320 case OMP_CLAUSE_WORKER:
1321 case OMP_CLAUSE_VECTOR:
1322 case OMP_CLAUSE_INDEPENDENT:
1323 case OMP_CLAUSE_AUTO:
1324 case OMP_CLAUSE_SEQ:
1325 case OMP_CLAUSE_TILE:
1326 case OMP_CLAUSE__SIMT_:
1327 case OMP_CLAUSE_DEFAULT:
1328 break;
1330 case OMP_CLAUSE_ALIGNED:
1331 decl = OMP_CLAUSE_DECL (c);
1332 if (is_global_var (decl)
1333 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1334 install_var_local (decl, ctx);
1335 break;
1337 case OMP_CLAUSE__CACHE_:
1338 default:
1339 gcc_unreachable ();
1343 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1345 switch (OMP_CLAUSE_CODE (c))
1347 case OMP_CLAUSE_LASTPRIVATE:
1348 /* Let the corresponding firstprivate clause create
1349 the variable. */
1350 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1351 scan_array_reductions = true;
1352 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1353 break;
1354 /* FALLTHRU */
1356 case OMP_CLAUSE_FIRSTPRIVATE:
1357 case OMP_CLAUSE_PRIVATE:
1358 case OMP_CLAUSE_LINEAR:
1359 case OMP_CLAUSE_IS_DEVICE_PTR:
1360 decl = OMP_CLAUSE_DECL (c);
1361 if (is_variable_sized (decl))
1363 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1364 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1365 && is_gimple_omp_offloaded (ctx->stmt))
1367 tree decl2 = DECL_VALUE_EXPR (decl);
1368 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1369 decl2 = TREE_OPERAND (decl2, 0);
1370 gcc_assert (DECL_P (decl2));
1371 install_var_local (decl2, ctx);
1372 fixup_remapped_decl (decl2, ctx, false);
1374 install_var_local (decl, ctx);
1376 fixup_remapped_decl (decl, ctx,
1377 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1378 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1379 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1380 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1381 scan_array_reductions = true;
1382 break;
1384 case OMP_CLAUSE_REDUCTION:
1385 decl = OMP_CLAUSE_DECL (c);
1386 if (TREE_CODE (decl) != MEM_REF)
1388 if (is_variable_sized (decl))
1389 install_var_local (decl, ctx);
1390 fixup_remapped_decl (decl, ctx, false);
1392 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1393 scan_array_reductions = true;
1394 break;
1396 case OMP_CLAUSE_SHARED:
1397 /* Ignore shared directives in teams construct. */
1398 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1399 break;
1400 decl = OMP_CLAUSE_DECL (c);
1401 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1402 break;
1403 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1405 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1406 ctx->outer)))
1407 break;
1408 bool by_ref = use_pointer_for_field (decl, ctx);
1409 install_var_field (decl, by_ref, 11, ctx);
1410 break;
1412 fixup_remapped_decl (decl, ctx, false);
1413 break;
1415 case OMP_CLAUSE_MAP:
1416 if (!is_gimple_omp_offloaded (ctx->stmt))
1417 break;
1418 decl = OMP_CLAUSE_DECL (c);
1419 if (DECL_P (decl)
1420 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1421 && (OMP_CLAUSE_MAP_KIND (c)
1422 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1423 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1424 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1425 && varpool_node::get_create (decl)->offloadable)
1426 break;
1427 if (DECL_P (decl))
1429 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1430 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1431 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1432 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1434 tree new_decl = lookup_decl (decl, ctx);
1435 TREE_TYPE (new_decl)
1436 = remap_type (TREE_TYPE (decl), &ctx->cb);
1438 else if (DECL_SIZE (decl)
1439 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1441 tree decl2 = DECL_VALUE_EXPR (decl);
1442 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1443 decl2 = TREE_OPERAND (decl2, 0);
1444 gcc_assert (DECL_P (decl2));
1445 fixup_remapped_decl (decl2, ctx, false);
1446 fixup_remapped_decl (decl, ctx, true);
1448 else
1449 fixup_remapped_decl (decl, ctx, false);
1451 break;
1453 case OMP_CLAUSE_COPYPRIVATE:
1454 case OMP_CLAUSE_COPYIN:
1455 case OMP_CLAUSE_DEFAULT:
1456 case OMP_CLAUSE_IF:
1457 case OMP_CLAUSE_NUM_THREADS:
1458 case OMP_CLAUSE_NUM_TEAMS:
1459 case OMP_CLAUSE_THREAD_LIMIT:
1460 case OMP_CLAUSE_DEVICE:
1461 case OMP_CLAUSE_SCHEDULE:
1462 case OMP_CLAUSE_DIST_SCHEDULE:
1463 case OMP_CLAUSE_NOWAIT:
1464 case OMP_CLAUSE_ORDERED:
1465 case OMP_CLAUSE_COLLAPSE:
1466 case OMP_CLAUSE_UNTIED:
1467 case OMP_CLAUSE_FINAL:
1468 case OMP_CLAUSE_MERGEABLE:
1469 case OMP_CLAUSE_PROC_BIND:
1470 case OMP_CLAUSE_SAFELEN:
1471 case OMP_CLAUSE_SIMDLEN:
1472 case OMP_CLAUSE_ALIGNED:
1473 case OMP_CLAUSE_DEPEND:
1474 case OMP_CLAUSE__LOOPTEMP_:
1475 case OMP_CLAUSE_TO:
1476 case OMP_CLAUSE_FROM:
1477 case OMP_CLAUSE_PRIORITY:
1478 case OMP_CLAUSE_GRAINSIZE:
1479 case OMP_CLAUSE_NUM_TASKS:
1480 case OMP_CLAUSE_THREADS:
1481 case OMP_CLAUSE_SIMD:
1482 case OMP_CLAUSE_NOGROUP:
1483 case OMP_CLAUSE_DEFAULTMAP:
1484 case OMP_CLAUSE_USE_DEVICE_PTR:
1485 case OMP_CLAUSE__CILK_FOR_COUNT_:
1486 case OMP_CLAUSE_ASYNC:
1487 case OMP_CLAUSE_WAIT:
1488 case OMP_CLAUSE_NUM_GANGS:
1489 case OMP_CLAUSE_NUM_WORKERS:
1490 case OMP_CLAUSE_VECTOR_LENGTH:
1491 case OMP_CLAUSE_GANG:
1492 case OMP_CLAUSE_WORKER:
1493 case OMP_CLAUSE_VECTOR:
1494 case OMP_CLAUSE_INDEPENDENT:
1495 case OMP_CLAUSE_AUTO:
1496 case OMP_CLAUSE_SEQ:
1497 case OMP_CLAUSE_TILE:
1498 case OMP_CLAUSE__GRIDDIM_:
1499 case OMP_CLAUSE__SIMT_:
1500 break;
1502 case OMP_CLAUSE__CACHE_:
1503 default:
1504 gcc_unreachable ();
1508 gcc_checking_assert (!scan_array_reductions
1509 || !is_gimple_omp_oacc (ctx->stmt));
1510 if (scan_array_reductions)
1512 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1513 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1514 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1516 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1517 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1519 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1520 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1521 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1522 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1523 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1524 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1528 /* Create a new name for omp child function. Returns an identifier. If
1529 IS_CILK_FOR is true then the suffix for the child function is
1530 "_cilk_for_fn." */
1532 static tree
1533 create_omp_child_function_name (bool task_copy, bool is_cilk_for)
1535 if (is_cilk_for)
1536 return clone_function_name (current_function_decl, "_cilk_for_fn");
1537 return clone_function_name (current_function_decl,
1538 task_copy ? "_omp_cpyfn" : "_omp_fn");
1541 /* Returns the type of the induction variable for the child function for
1542 _Cilk_for and the types for _high and _low variables based on TYPE. */
1544 static tree
1545 cilk_for_check_loop_diff_type (tree type)
1547 if (TYPE_PRECISION (type) <= TYPE_PRECISION (uint32_type_node))
1549 if (TYPE_UNSIGNED (type))
1550 return uint32_type_node;
1551 else
1552 return integer_type_node;
1554 else
1556 if (TYPE_UNSIGNED (type))
1557 return uint64_type_node;
1558 else
1559 return long_long_integer_type_node;
1563 /* Return true if CTX may belong to offloaded code: either if current function
1564 is offloaded, or any enclosing context corresponds to a target region. */
1566 static bool
1567 omp_maybe_offloaded_ctx (omp_context *ctx)
1569 if (cgraph_node::get (current_function_decl)->offloadable)
1570 return true;
1571 for (; ctx; ctx = ctx->outer)
1572 if (is_gimple_omp_offloaded (ctx->stmt))
1573 return true;
1574 return false;
1577 /* Build a decl for the omp child function. It'll not contain a body
1578 yet, just the bare decl. */
1580 static void
1581 create_omp_child_function (omp_context *ctx, bool task_copy)
1583 tree decl, type, name, t;
1585 tree cilk_for_count
1586 = (flag_cilkplus && gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1587 ? omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
1588 OMP_CLAUSE__CILK_FOR_COUNT_) : NULL_TREE;
1589 tree cilk_var_type = NULL_TREE;
1591 name = create_omp_child_function_name (task_copy,
1592 cilk_for_count != NULL_TREE);
1593 if (task_copy)
1594 type = build_function_type_list (void_type_node, ptr_type_node,
1595 ptr_type_node, NULL_TREE);
1596 else if (cilk_for_count)
1598 type = TREE_TYPE (OMP_CLAUSE_OPERAND (cilk_for_count, 0));
1599 cilk_var_type = cilk_for_check_loop_diff_type (type);
1600 type = build_function_type_list (void_type_node, ptr_type_node,
1601 cilk_var_type, cilk_var_type, NULL_TREE);
1603 else
1604 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1606 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1608 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1609 || !task_copy);
1610 if (!task_copy)
1611 ctx->cb.dst_fn = decl;
1612 else
1613 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1615 TREE_STATIC (decl) = 1;
1616 TREE_USED (decl) = 1;
1617 DECL_ARTIFICIAL (decl) = 1;
1618 DECL_IGNORED_P (decl) = 0;
1619 TREE_PUBLIC (decl) = 0;
1620 DECL_UNINLINABLE (decl) = 1;
1621 DECL_EXTERNAL (decl) = 0;
1622 DECL_CONTEXT (decl) = NULL_TREE;
1623 DECL_INITIAL (decl) = make_node (BLOCK);
1624 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1625 if (omp_maybe_offloaded_ctx (ctx))
1627 cgraph_node::get_create (decl)->offloadable = 1;
1628 if (ENABLE_OFFLOADING)
1629 g->have_offload = true;
1632 if (cgraph_node::get_create (decl)->offloadable
1633 && !lookup_attribute ("omp declare target",
1634 DECL_ATTRIBUTES (current_function_decl)))
1636 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1637 ? "omp target entrypoint"
1638 : "omp declare target");
1639 DECL_ATTRIBUTES (decl)
1640 = tree_cons (get_identifier (target_attr),
1641 NULL_TREE, DECL_ATTRIBUTES (decl));
1644 t = build_decl (DECL_SOURCE_LOCATION (decl),
1645 RESULT_DECL, NULL_TREE, void_type_node);
1646 DECL_ARTIFICIAL (t) = 1;
1647 DECL_IGNORED_P (t) = 1;
1648 DECL_CONTEXT (t) = decl;
1649 DECL_RESULT (decl) = t;
1651 /* _Cilk_for's child function requires two extra parameters called
1652 __low and __high that are set the by Cilk runtime when it calls this
1653 function. */
1654 if (cilk_for_count)
1656 t = build_decl (DECL_SOURCE_LOCATION (decl),
1657 PARM_DECL, get_identifier ("__high"), cilk_var_type);
1658 DECL_ARTIFICIAL (t) = 1;
1659 DECL_NAMELESS (t) = 1;
1660 DECL_ARG_TYPE (t) = ptr_type_node;
1661 DECL_CONTEXT (t) = current_function_decl;
1662 TREE_USED (t) = 1;
1663 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1664 DECL_ARGUMENTS (decl) = t;
1666 t = build_decl (DECL_SOURCE_LOCATION (decl),
1667 PARM_DECL, get_identifier ("__low"), cilk_var_type);
1668 DECL_ARTIFICIAL (t) = 1;
1669 DECL_NAMELESS (t) = 1;
1670 DECL_ARG_TYPE (t) = ptr_type_node;
1671 DECL_CONTEXT (t) = current_function_decl;
1672 TREE_USED (t) = 1;
1673 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1674 DECL_ARGUMENTS (decl) = t;
1677 tree data_name = get_identifier (".omp_data_i");
1678 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1679 ptr_type_node);
1680 DECL_ARTIFICIAL (t) = 1;
1681 DECL_NAMELESS (t) = 1;
1682 DECL_ARG_TYPE (t) = ptr_type_node;
1683 DECL_CONTEXT (t) = current_function_decl;
1684 TREE_USED (t) = 1;
1685 TREE_READONLY (t) = 1;
1686 if (cilk_for_count)
1687 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1688 DECL_ARGUMENTS (decl) = t;
1689 if (!task_copy)
1690 ctx->receiver_decl = t;
1691 else
1693 t = build_decl (DECL_SOURCE_LOCATION (decl),
1694 PARM_DECL, get_identifier (".omp_data_o"),
1695 ptr_type_node);
1696 DECL_ARTIFICIAL (t) = 1;
1697 DECL_NAMELESS (t) = 1;
1698 DECL_ARG_TYPE (t) = ptr_type_node;
1699 DECL_CONTEXT (t) = current_function_decl;
1700 TREE_USED (t) = 1;
1701 TREE_ADDRESSABLE (t) = 1;
1702 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1703 DECL_ARGUMENTS (decl) = t;
1706 /* Allocate memory for the function structure. The call to
1707 allocate_struct_function clobbers CFUN, so we need to restore
1708 it afterward. */
1709 push_struct_function (decl);
1710 cfun->function_end_locus = gimple_location (ctx->stmt);
1711 init_tree_ssa (cfun);
1712 pop_cfun ();
1715 /* Callback for walk_gimple_seq. Check if combined parallel
1716 contains gimple_omp_for_combined_into_p OMP_FOR. */
1718 tree
1719 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1720 bool *handled_ops_p,
1721 struct walk_stmt_info *wi)
1723 gimple *stmt = gsi_stmt (*gsi_p);
1725 *handled_ops_p = true;
1726 switch (gimple_code (stmt))
1728 WALK_SUBSTMTS;
1730 case GIMPLE_OMP_FOR:
1731 if (gimple_omp_for_combined_into_p (stmt)
1732 && gimple_omp_for_kind (stmt)
1733 == *(const enum gf_mask *) (wi->info))
1735 wi->info = stmt;
1736 return integer_zero_node;
1738 break;
1739 default:
1740 break;
1742 return NULL;
1745 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1747 static void
1748 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1749 omp_context *outer_ctx)
1751 struct walk_stmt_info wi;
1753 memset (&wi, 0, sizeof (wi));
1754 wi.val_only = true;
1755 wi.info = (void *) &msk;
1756 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1757 if (wi.info != (void *) &msk)
1759 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1760 struct omp_for_data fd;
1761 omp_extract_for_data (for_stmt, &fd, NULL);
1762 /* We need two temporaries with fd.loop.v type (istart/iend)
1763 and then (fd.collapse - 1) temporaries with the same
1764 type for count2 ... countN-1 vars if not constant. */
1765 size_t count = 2, i;
1766 tree type = fd.iter_type;
1767 if (fd.collapse > 1
1768 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1770 count += fd.collapse - 1;
1771 /* If there are lastprivate clauses on the inner
1772 GIMPLE_OMP_FOR, add one more temporaries for the total number
1773 of iterations (product of count1 ... countN-1). */
1774 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1775 OMP_CLAUSE_LASTPRIVATE))
1776 count++;
1777 else if (msk == GF_OMP_FOR_KIND_FOR
1778 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1779 OMP_CLAUSE_LASTPRIVATE))
1780 count++;
1782 for (i = 0; i < count; i++)
1784 tree temp = create_tmp_var (type);
1785 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1786 insert_decl_map (&outer_ctx->cb, temp, temp);
1787 OMP_CLAUSE_DECL (c) = temp;
1788 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1789 gimple_omp_taskreg_set_clauses (stmt, c);
1794 /* Scan an OpenMP parallel directive. */
1796 static void
1797 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1799 omp_context *ctx;
1800 tree name;
1801 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1803 /* Ignore parallel directives with empty bodies, unless there
1804 are copyin clauses. */
1805 if (optimize > 0
1806 && empty_body_p (gimple_omp_body (stmt))
1807 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1808 OMP_CLAUSE_COPYIN) == NULL)
1810 gsi_replace (gsi, gimple_build_nop (), false);
1811 return;
1814 if (gimple_omp_parallel_combined_p (stmt))
1815 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1817 ctx = new_omp_context (stmt, outer_ctx);
1818 taskreg_contexts.safe_push (ctx);
1819 if (taskreg_nesting_level > 1)
1820 ctx->is_nested = true;
1821 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1822 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1823 name = create_tmp_var_name (".omp_data_s");
1824 name = build_decl (gimple_location (stmt),
1825 TYPE_DECL, name, ctx->record_type);
1826 DECL_ARTIFICIAL (name) = 1;
1827 DECL_NAMELESS (name) = 1;
1828 TYPE_NAME (ctx->record_type) = name;
1829 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1830 if (!gimple_omp_parallel_grid_phony (stmt))
1832 create_omp_child_function (ctx, false);
1833 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1836 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1837 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1839 if (TYPE_FIELDS (ctx->record_type) == NULL)
1840 ctx->record_type = ctx->receiver_decl = NULL;
1843 /* Scan an OpenMP task directive. */
1845 static void
1846 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1848 omp_context *ctx;
1849 tree name, t;
1850 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1852 /* Ignore task directives with empty bodies, unless they have depend
1853 clause. */
1854 if (optimize > 0
1855 && empty_body_p (gimple_omp_body (stmt))
1856 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1858 gsi_replace (gsi, gimple_build_nop (), false);
1859 return;
1862 if (gimple_omp_task_taskloop_p (stmt))
1863 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1865 ctx = new_omp_context (stmt, outer_ctx);
1866 taskreg_contexts.safe_push (ctx);
1867 if (taskreg_nesting_level > 1)
1868 ctx->is_nested = true;
1869 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1870 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1871 name = create_tmp_var_name (".omp_data_s");
1872 name = build_decl (gimple_location (stmt),
1873 TYPE_DECL, name, ctx->record_type);
1874 DECL_ARTIFICIAL (name) = 1;
1875 DECL_NAMELESS (name) = 1;
1876 TYPE_NAME (ctx->record_type) = name;
1877 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1878 create_omp_child_function (ctx, false);
1879 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1881 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1883 if (ctx->srecord_type)
1885 name = create_tmp_var_name (".omp_data_a");
1886 name = build_decl (gimple_location (stmt),
1887 TYPE_DECL, name, ctx->srecord_type);
1888 DECL_ARTIFICIAL (name) = 1;
1889 DECL_NAMELESS (name) = 1;
1890 TYPE_NAME (ctx->srecord_type) = name;
1891 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1892 create_omp_child_function (ctx, true);
1895 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1897 if (TYPE_FIELDS (ctx->record_type) == NULL)
1899 ctx->record_type = ctx->receiver_decl = NULL;
1900 t = build_int_cst (long_integer_type_node, 0);
1901 gimple_omp_task_set_arg_size (stmt, t);
1902 t = build_int_cst (long_integer_type_node, 1);
1903 gimple_omp_task_set_arg_align (stmt, t);
1907 /* Helper function for finish_taskreg_scan, called through walk_tree.
1908 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1909 tree, replace it in the expression. */
1911 static tree
1912 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1914 if (VAR_P (*tp))
1916 omp_context *ctx = (omp_context *) data;
1917 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1918 if (t != *tp)
1920 if (DECL_HAS_VALUE_EXPR_P (t))
1921 t = unshare_expr (DECL_VALUE_EXPR (t));
1922 *tp = t;
1924 *walk_subtrees = 0;
1926 else if (IS_TYPE_OR_DECL_P (*tp))
1927 *walk_subtrees = 0;
1928 return NULL_TREE;
1931 /* If any decls have been made addressable during scan_omp,
1932 adjust their fields if needed, and layout record types
1933 of parallel/task constructs. */
1935 static void
1936 finish_taskreg_scan (omp_context *ctx)
1938 if (ctx->record_type == NULL_TREE)
1939 return;
1941 /* If any task_shared_vars were needed, verify all
1942 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1943 statements if use_pointer_for_field hasn't changed
1944 because of that. If it did, update field types now. */
1945 if (task_shared_vars)
1947 tree c;
1949 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1950 c; c = OMP_CLAUSE_CHAIN (c))
1951 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1952 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1954 tree decl = OMP_CLAUSE_DECL (c);
1956 /* Global variables don't need to be copied,
1957 the receiver side will use them directly. */
1958 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1959 continue;
1960 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1961 || !use_pointer_for_field (decl, ctx))
1962 continue;
1963 tree field = lookup_field (decl, ctx);
1964 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1965 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1966 continue;
1967 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1968 TREE_THIS_VOLATILE (field) = 0;
1969 DECL_USER_ALIGN (field) = 0;
1970 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1971 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1972 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1973 if (ctx->srecord_type)
1975 tree sfield = lookup_sfield (decl, ctx);
1976 TREE_TYPE (sfield) = TREE_TYPE (field);
1977 TREE_THIS_VOLATILE (sfield) = 0;
1978 DECL_USER_ALIGN (sfield) = 0;
1979 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1980 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1981 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1986 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1988 layout_type (ctx->record_type);
1989 fixup_child_record_type (ctx);
1991 else
1993 location_t loc = gimple_location (ctx->stmt);
1994 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1995 /* Move VLA fields to the end. */
1996 p = &TYPE_FIELDS (ctx->record_type);
1997 while (*p)
1998 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1999 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2001 *q = *p;
2002 *p = TREE_CHAIN (*p);
2003 TREE_CHAIN (*q) = NULL_TREE;
2004 q = &TREE_CHAIN (*q);
2006 else
2007 p = &DECL_CHAIN (*p);
2008 *p = vla_fields;
2009 if (gimple_omp_task_taskloop_p (ctx->stmt))
2011 /* Move fields corresponding to first and second _looptemp_
2012 clause first. There are filled by GOMP_taskloop
2013 and thus need to be in specific positions. */
2014 tree c1 = gimple_omp_task_clauses (ctx->stmt);
2015 c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
2016 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2017 OMP_CLAUSE__LOOPTEMP_);
2018 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2019 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2020 p = &TYPE_FIELDS (ctx->record_type);
2021 while (*p)
2022 if (*p == f1 || *p == f2)
2023 *p = DECL_CHAIN (*p);
2024 else
2025 p = &DECL_CHAIN (*p);
2026 DECL_CHAIN (f1) = f2;
2027 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2028 TYPE_FIELDS (ctx->record_type) = f1;
2029 if (ctx->srecord_type)
2031 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2032 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2033 p = &TYPE_FIELDS (ctx->srecord_type);
2034 while (*p)
2035 if (*p == f1 || *p == f2)
2036 *p = DECL_CHAIN (*p);
2037 else
2038 p = &DECL_CHAIN (*p);
2039 DECL_CHAIN (f1) = f2;
2040 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2041 TYPE_FIELDS (ctx->srecord_type) = f1;
2044 layout_type (ctx->record_type);
2045 fixup_child_record_type (ctx);
2046 if (ctx->srecord_type)
2047 layout_type (ctx->srecord_type);
2048 tree t = fold_convert_loc (loc, long_integer_type_node,
2049 TYPE_SIZE_UNIT (ctx->record_type));
2050 if (TREE_CODE (t) != INTEGER_CST)
2052 t = unshare_expr (t);
2053 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2055 gimple_omp_task_set_arg_size (ctx->stmt, t);
2056 t = build_int_cst (long_integer_type_node,
2057 TYPE_ALIGN_UNIT (ctx->record_type));
2058 gimple_omp_task_set_arg_align (ctx->stmt, t);
2062 /* Find the enclosing offload context. */
2064 static omp_context *
2065 enclosing_target_ctx (omp_context *ctx)
2067 for (; ctx; ctx = ctx->outer)
2068 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2069 break;
2071 return ctx;
2074 /* Return true if ctx is part of an oacc kernels region. */
2076 static bool
2077 ctx_in_oacc_kernels_region (omp_context *ctx)
2079 for (;ctx != NULL; ctx = ctx->outer)
2081 gimple *stmt = ctx->stmt;
2082 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2083 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2084 return true;
2087 return false;
2090 /* Check the parallelism clauses inside a kernels regions.
2091 Until kernels handling moves to use the same loop indirection
2092 scheme as parallel, we need to do this checking early. */
2094 static unsigned
2095 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2097 bool checking = true;
2098 unsigned outer_mask = 0;
2099 unsigned this_mask = 0;
2100 bool has_seq = false, has_auto = false;
2102 if (ctx->outer)
2103 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2104 if (!stmt)
2106 checking = false;
2107 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2108 return outer_mask;
2109 stmt = as_a <gomp_for *> (ctx->stmt);
2112 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2114 switch (OMP_CLAUSE_CODE (c))
2116 case OMP_CLAUSE_GANG:
2117 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2118 break;
2119 case OMP_CLAUSE_WORKER:
2120 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2121 break;
2122 case OMP_CLAUSE_VECTOR:
2123 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2124 break;
2125 case OMP_CLAUSE_SEQ:
2126 has_seq = true;
2127 break;
2128 case OMP_CLAUSE_AUTO:
2129 has_auto = true;
2130 break;
2131 default:
2132 break;
2136 if (checking)
2138 if (has_seq && (this_mask || has_auto))
2139 error_at (gimple_location (stmt), "%<seq%> overrides other"
2140 " OpenACC loop specifiers");
2141 else if (has_auto && this_mask)
2142 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2143 " OpenACC loop specifiers");
2145 if (this_mask & outer_mask)
2146 error_at (gimple_location (stmt), "inner loop uses same"
2147 " OpenACC parallelism as containing loop");
2150 return outer_mask | this_mask;
2153 /* Scan a GIMPLE_OMP_FOR. */
2155 static omp_context *
2156 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2158 omp_context *ctx;
2159 size_t i;
2160 tree clauses = gimple_omp_for_clauses (stmt);
2162 ctx = new_omp_context (stmt, outer_ctx);
2164 if (is_gimple_omp_oacc (stmt))
2166 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2168 if (!tgt || is_oacc_parallel (tgt))
2169 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2171 char const *check = NULL;
2173 switch (OMP_CLAUSE_CODE (c))
2175 case OMP_CLAUSE_GANG:
2176 check = "gang";
2177 break;
2179 case OMP_CLAUSE_WORKER:
2180 check = "worker";
2181 break;
2183 case OMP_CLAUSE_VECTOR:
2184 check = "vector";
2185 break;
2187 default:
2188 break;
2191 if (check && OMP_CLAUSE_OPERAND (c, 0))
2192 error_at (gimple_location (stmt),
2193 "argument not permitted on %qs clause in"
2194 " OpenACC %<parallel%>", check);
2197 if (tgt && is_oacc_kernels (tgt))
2199 /* Strip out reductions, as they are not handled yet. */
2200 tree *prev_ptr = &clauses;
2202 while (tree probe = *prev_ptr)
2204 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2206 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2207 *prev_ptr = *next_ptr;
2208 else
2209 prev_ptr = next_ptr;
2212 gimple_omp_for_set_clauses (stmt, clauses);
2213 check_oacc_kernel_gwv (stmt, ctx);
2217 scan_sharing_clauses (clauses, ctx);
2219 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2220 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2222 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2223 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2224 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2225 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2227 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2228 return ctx;
2231 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2233 static void
2234 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2235 omp_context *outer_ctx)
2237 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2238 gsi_replace (gsi, bind, false);
2239 gimple_seq seq = NULL;
2240 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2241 tree cond = create_tmp_var_raw (integer_type_node);
2242 DECL_CONTEXT (cond) = current_function_decl;
2243 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2244 gimple_bind_set_vars (bind, cond);
2245 gimple_call_set_lhs (g, cond);
2246 gimple_seq_add_stmt (&seq, g);
2247 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2248 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2249 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2250 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2251 gimple_seq_add_stmt (&seq, g);
2252 g = gimple_build_label (lab1);
2253 gimple_seq_add_stmt (&seq, g);
2254 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2255 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2256 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2257 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2258 gimple_omp_for_set_clauses (new_stmt, clause);
2259 gimple_seq_add_stmt (&seq, new_stmt);
2260 g = gimple_build_goto (lab3);
2261 gimple_seq_add_stmt (&seq, g);
2262 g = gimple_build_label (lab2);
2263 gimple_seq_add_stmt (&seq, g);
2264 gimple_seq_add_stmt (&seq, stmt);
2265 g = gimple_build_label (lab3);
2266 gimple_seq_add_stmt (&seq, g);
2267 gimple_bind_set_body (bind, seq);
2268 update_stmt (bind);
2269 scan_omp_for (new_stmt, outer_ctx);
2270 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2273 /* Scan an OpenMP sections directive. */
2275 static void
2276 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2278 omp_context *ctx;
2280 ctx = new_omp_context (stmt, outer_ctx);
2281 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2282 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2285 /* Scan an OpenMP single directive. */
2287 static void
2288 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2290 omp_context *ctx;
2291 tree name;
2293 ctx = new_omp_context (stmt, outer_ctx);
2294 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2295 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2296 name = create_tmp_var_name (".omp_copy_s");
2297 name = build_decl (gimple_location (stmt),
2298 TYPE_DECL, name, ctx->record_type);
2299 TYPE_NAME (ctx->record_type) = name;
2301 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2302 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2304 if (TYPE_FIELDS (ctx->record_type) == NULL)
2305 ctx->record_type = NULL;
2306 else
2307 layout_type (ctx->record_type);
2310 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2311 used in the corresponding offloaded function are restrict. */
2313 static bool
2314 omp_target_base_pointers_restrict_p (tree clauses)
2316 /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2317 used by OpenACC. */
2318 if (flag_openacc == 0)
2319 return false;
2321 /* I. Basic example:
2323 void foo (void)
2325 unsigned int a[2], b[2];
2327 #pragma acc kernels \
2328 copyout (a) \
2329 copyout (b)
2331 a[0] = 0;
2332 b[0] = 1;
2336 After gimplification, we have:
2338 #pragma omp target oacc_kernels \
2339 map(force_from:a [len: 8]) \
2340 map(force_from:b [len: 8])
2342 a[0] = 0;
2343 b[0] = 1;
2346 Because both mappings have the force prefix, we know that they will be
2347 allocated when calling the corresponding offloaded function, which means we
2348 can mark the base pointers for a and b in the offloaded function as
2349 restrict. */
2351 tree c;
2352 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2354 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
2355 return false;
2357 switch (OMP_CLAUSE_MAP_KIND (c))
2359 case GOMP_MAP_FORCE_ALLOC:
2360 case GOMP_MAP_FORCE_TO:
2361 case GOMP_MAP_FORCE_FROM:
2362 case GOMP_MAP_FORCE_TOFROM:
2363 break;
2364 default:
2365 return false;
2369 return true;
2372 /* Scan a GIMPLE_OMP_TARGET. */
2374 static void
2375 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2377 omp_context *ctx;
2378 tree name;
2379 bool offloaded = is_gimple_omp_offloaded (stmt);
2380 tree clauses = gimple_omp_target_clauses (stmt);
2382 ctx = new_omp_context (stmt, outer_ctx);
2383 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2384 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2385 name = create_tmp_var_name (".omp_data_t");
2386 name = build_decl (gimple_location (stmt),
2387 TYPE_DECL, name, ctx->record_type);
2388 DECL_ARTIFICIAL (name) = 1;
2389 DECL_NAMELESS (name) = 1;
2390 TYPE_NAME (ctx->record_type) = name;
2391 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2393 bool base_pointers_restrict = false;
2394 if (offloaded)
2396 create_omp_child_function (ctx, false);
2397 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2399 base_pointers_restrict = omp_target_base_pointers_restrict_p (clauses);
2400 if (base_pointers_restrict
2401 && dump_file && (dump_flags & TDF_DETAILS))
2402 fprintf (dump_file,
2403 "Base pointers in offloaded function are restrict\n");
2406 scan_sharing_clauses (clauses, ctx, base_pointers_restrict);
2407 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2409 if (TYPE_FIELDS (ctx->record_type) == NULL)
2410 ctx->record_type = ctx->receiver_decl = NULL;
2411 else
2413 TYPE_FIELDS (ctx->record_type)
2414 = nreverse (TYPE_FIELDS (ctx->record_type));
2415 if (flag_checking)
2417 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2418 for (tree field = TYPE_FIELDS (ctx->record_type);
2419 field;
2420 field = DECL_CHAIN (field))
2421 gcc_assert (DECL_ALIGN (field) == align);
2423 layout_type (ctx->record_type);
2424 if (offloaded)
2425 fixup_child_record_type (ctx);
2429 /* Scan an OpenMP teams directive. */
2431 static void
2432 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2434 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2435 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2436 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2439 /* Check nesting restrictions. */
2440 static bool
2441 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2443 tree c;
2445 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2446 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2447 the original copy of its contents. */
2448 return true;
2450 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2451 inside an OpenACC CTX. */
2452 if (!(is_gimple_omp (stmt)
2453 && is_gimple_omp_oacc (stmt))
2454 /* Except for atomic codes that we share with OpenMP. */
2455 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2456 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2458 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2460 error_at (gimple_location (stmt),
2461 "non-OpenACC construct inside of OpenACC routine");
2462 return false;
2464 else
2465 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2466 if (is_gimple_omp (octx->stmt)
2467 && is_gimple_omp_oacc (octx->stmt))
2469 error_at (gimple_location (stmt),
2470 "non-OpenACC construct inside of OpenACC region");
2471 return false;
2475 if (ctx != NULL)
2477 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2478 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2480 c = NULL_TREE;
2481 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2483 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2484 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2486 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2487 && (ctx->outer == NULL
2488 || !gimple_omp_for_combined_into_p (ctx->stmt)
2489 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2490 || (gimple_omp_for_kind (ctx->outer->stmt)
2491 != GF_OMP_FOR_KIND_FOR)
2492 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2494 error_at (gimple_location (stmt),
2495 "%<ordered simd threads%> must be closely "
2496 "nested inside of %<for simd%> region");
2497 return false;
2499 return true;
2502 error_at (gimple_location (stmt),
2503 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2504 " may not be nested inside %<simd%> region");
2505 return false;
2507 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2509 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2510 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2511 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2512 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2514 error_at (gimple_location (stmt),
2515 "only %<distribute%> or %<parallel%> regions are "
2516 "allowed to be strictly nested inside %<teams%> "
2517 "region");
2518 return false;
2522 switch (gimple_code (stmt))
2524 case GIMPLE_OMP_FOR:
2525 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2526 return true;
2527 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2529 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2531 error_at (gimple_location (stmt),
2532 "%<distribute%> region must be strictly nested "
2533 "inside %<teams%> construct");
2534 return false;
2536 return true;
2538 /* We split taskloop into task and nested taskloop in it. */
2539 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2540 return true;
2541 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2543 bool ok = false;
2545 if (ctx)
2546 switch (gimple_code (ctx->stmt))
2548 case GIMPLE_OMP_FOR:
2549 ok = (gimple_omp_for_kind (ctx->stmt)
2550 == GF_OMP_FOR_KIND_OACC_LOOP);
2551 break;
2553 case GIMPLE_OMP_TARGET:
2554 switch (gimple_omp_target_kind (ctx->stmt))
2556 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2557 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2558 ok = true;
2559 break;
2561 default:
2562 break;
2565 default:
2566 break;
2568 else if (oacc_get_fn_attrib (current_function_decl))
2569 ok = true;
2570 if (!ok)
2572 error_at (gimple_location (stmt),
2573 "OpenACC loop directive must be associated with"
2574 " an OpenACC compute region");
2575 return false;
2578 /* FALLTHRU */
2579 case GIMPLE_CALL:
2580 if (is_gimple_call (stmt)
2581 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2582 == BUILT_IN_GOMP_CANCEL
2583 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2584 == BUILT_IN_GOMP_CANCELLATION_POINT))
2586 const char *bad = NULL;
2587 const char *kind = NULL;
2588 const char *construct
2589 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2590 == BUILT_IN_GOMP_CANCEL)
2591 ? "#pragma omp cancel"
2592 : "#pragma omp cancellation point";
2593 if (ctx == NULL)
2595 error_at (gimple_location (stmt), "orphaned %qs construct",
2596 construct);
2597 return false;
2599 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2600 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2601 : 0)
2603 case 1:
2604 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2605 bad = "#pragma omp parallel";
2606 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2607 == BUILT_IN_GOMP_CANCEL
2608 && !integer_zerop (gimple_call_arg (stmt, 1)))
2609 ctx->cancellable = true;
2610 kind = "parallel";
2611 break;
2612 case 2:
2613 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2614 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2615 bad = "#pragma omp for";
2616 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2617 == BUILT_IN_GOMP_CANCEL
2618 && !integer_zerop (gimple_call_arg (stmt, 1)))
2620 ctx->cancellable = true;
2621 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2622 OMP_CLAUSE_NOWAIT))
2623 warning_at (gimple_location (stmt), 0,
2624 "%<#pragma omp cancel for%> inside "
2625 "%<nowait%> for construct");
2626 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2627 OMP_CLAUSE_ORDERED))
2628 warning_at (gimple_location (stmt), 0,
2629 "%<#pragma omp cancel for%> inside "
2630 "%<ordered%> for construct");
2632 kind = "for";
2633 break;
2634 case 4:
2635 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2636 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2637 bad = "#pragma omp sections";
2638 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2639 == BUILT_IN_GOMP_CANCEL
2640 && !integer_zerop (gimple_call_arg (stmt, 1)))
2642 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2644 ctx->cancellable = true;
2645 if (omp_find_clause (gimple_omp_sections_clauses
2646 (ctx->stmt),
2647 OMP_CLAUSE_NOWAIT))
2648 warning_at (gimple_location (stmt), 0,
2649 "%<#pragma omp cancel sections%> inside "
2650 "%<nowait%> sections construct");
2652 else
2654 gcc_assert (ctx->outer
2655 && gimple_code (ctx->outer->stmt)
2656 == GIMPLE_OMP_SECTIONS);
2657 ctx->outer->cancellable = true;
2658 if (omp_find_clause (gimple_omp_sections_clauses
2659 (ctx->outer->stmt),
2660 OMP_CLAUSE_NOWAIT))
2661 warning_at (gimple_location (stmt), 0,
2662 "%<#pragma omp cancel sections%> inside "
2663 "%<nowait%> sections construct");
2666 kind = "sections";
2667 break;
2668 case 8:
2669 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2670 bad = "#pragma omp task";
2671 else
2673 for (omp_context *octx = ctx->outer;
2674 octx; octx = octx->outer)
2676 switch (gimple_code (octx->stmt))
2678 case GIMPLE_OMP_TASKGROUP:
2679 break;
2680 case GIMPLE_OMP_TARGET:
2681 if (gimple_omp_target_kind (octx->stmt)
2682 != GF_OMP_TARGET_KIND_REGION)
2683 continue;
2684 /* FALLTHRU */
2685 case GIMPLE_OMP_PARALLEL:
2686 case GIMPLE_OMP_TEAMS:
2687 error_at (gimple_location (stmt),
2688 "%<%s taskgroup%> construct not closely "
2689 "nested inside of %<taskgroup%> region",
2690 construct);
2691 return false;
2692 default:
2693 continue;
2695 break;
2697 ctx->cancellable = true;
2699 kind = "taskgroup";
2700 break;
2701 default:
2702 error_at (gimple_location (stmt), "invalid arguments");
2703 return false;
2705 if (bad)
2707 error_at (gimple_location (stmt),
2708 "%<%s %s%> construct not closely nested inside of %qs",
2709 construct, kind, bad);
2710 return false;
2713 /* FALLTHRU */
2714 case GIMPLE_OMP_SECTIONS:
2715 case GIMPLE_OMP_SINGLE:
2716 for (; ctx != NULL; ctx = ctx->outer)
2717 switch (gimple_code (ctx->stmt))
2719 case GIMPLE_OMP_FOR:
2720 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2721 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2722 break;
2723 /* FALLTHRU */
2724 case GIMPLE_OMP_SECTIONS:
2725 case GIMPLE_OMP_SINGLE:
2726 case GIMPLE_OMP_ORDERED:
2727 case GIMPLE_OMP_MASTER:
2728 case GIMPLE_OMP_TASK:
2729 case GIMPLE_OMP_CRITICAL:
2730 if (is_gimple_call (stmt))
2732 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2733 != BUILT_IN_GOMP_BARRIER)
2734 return true;
2735 error_at (gimple_location (stmt),
2736 "barrier region may not be closely nested inside "
2737 "of work-sharing, %<critical%>, %<ordered%>, "
2738 "%<master%>, explicit %<task%> or %<taskloop%> "
2739 "region");
2740 return false;
2742 error_at (gimple_location (stmt),
2743 "work-sharing region may not be closely nested inside "
2744 "of work-sharing, %<critical%>, %<ordered%>, "
2745 "%<master%>, explicit %<task%> or %<taskloop%> region");
2746 return false;
2747 case GIMPLE_OMP_PARALLEL:
2748 case GIMPLE_OMP_TEAMS:
2749 return true;
2750 case GIMPLE_OMP_TARGET:
2751 if (gimple_omp_target_kind (ctx->stmt)
2752 == GF_OMP_TARGET_KIND_REGION)
2753 return true;
2754 break;
2755 default:
2756 break;
2758 break;
2759 case GIMPLE_OMP_MASTER:
2760 for (; ctx != NULL; ctx = ctx->outer)
2761 switch (gimple_code (ctx->stmt))
2763 case GIMPLE_OMP_FOR:
2764 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2765 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2766 break;
2767 /* FALLTHRU */
2768 case GIMPLE_OMP_SECTIONS:
2769 case GIMPLE_OMP_SINGLE:
2770 case GIMPLE_OMP_TASK:
2771 error_at (gimple_location (stmt),
2772 "%<master%> region may not be closely nested inside "
2773 "of work-sharing, explicit %<task%> or %<taskloop%> "
2774 "region");
2775 return false;
2776 case GIMPLE_OMP_PARALLEL:
2777 case GIMPLE_OMP_TEAMS:
2778 return true;
2779 case GIMPLE_OMP_TARGET:
2780 if (gimple_omp_target_kind (ctx->stmt)
2781 == GF_OMP_TARGET_KIND_REGION)
2782 return true;
2783 break;
2784 default:
2785 break;
2787 break;
2788 case GIMPLE_OMP_TASK:
2789 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2790 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2791 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2792 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2794 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2795 error_at (OMP_CLAUSE_LOCATION (c),
2796 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2797 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2798 return false;
2800 break;
2801 case GIMPLE_OMP_ORDERED:
2802 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2803 c; c = OMP_CLAUSE_CHAIN (c))
2805 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2807 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2808 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2809 continue;
2811 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2812 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2813 || kind == OMP_CLAUSE_DEPEND_SINK)
2815 tree oclause;
2816 /* Look for containing ordered(N) loop. */
2817 if (ctx == NULL
2818 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2819 || (oclause
2820 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2821 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2823 error_at (OMP_CLAUSE_LOCATION (c),
2824 "%<ordered%> construct with %<depend%> clause "
2825 "must be closely nested inside an %<ordered%> "
2826 "loop");
2827 return false;
2829 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2831 error_at (OMP_CLAUSE_LOCATION (c),
2832 "%<ordered%> construct with %<depend%> clause "
2833 "must be closely nested inside a loop with "
2834 "%<ordered%> clause with a parameter");
2835 return false;
2838 else
2840 error_at (OMP_CLAUSE_LOCATION (c),
2841 "invalid depend kind in omp %<ordered%> %<depend%>");
2842 return false;
2845 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2846 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2848 /* ordered simd must be closely nested inside of simd region,
2849 and simd region must not encounter constructs other than
2850 ordered simd, therefore ordered simd may be either orphaned,
2851 or ctx->stmt must be simd. The latter case is handled already
2852 earlier. */
2853 if (ctx != NULL)
2855 error_at (gimple_location (stmt),
2856 "%<ordered%> %<simd%> must be closely nested inside "
2857 "%<simd%> region");
2858 return false;
2861 for (; ctx != NULL; ctx = ctx->outer)
2862 switch (gimple_code (ctx->stmt))
2864 case GIMPLE_OMP_CRITICAL:
2865 case GIMPLE_OMP_TASK:
2866 case GIMPLE_OMP_ORDERED:
2867 ordered_in_taskloop:
2868 error_at (gimple_location (stmt),
2869 "%<ordered%> region may not be closely nested inside "
2870 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2871 "%<taskloop%> region");
2872 return false;
2873 case GIMPLE_OMP_FOR:
2874 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2875 goto ordered_in_taskloop;
2876 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2877 OMP_CLAUSE_ORDERED) == NULL)
2879 error_at (gimple_location (stmt),
2880 "%<ordered%> region must be closely nested inside "
2881 "a loop region with an %<ordered%> clause");
2882 return false;
2884 return true;
2885 case GIMPLE_OMP_TARGET:
2886 if (gimple_omp_target_kind (ctx->stmt)
2887 != GF_OMP_TARGET_KIND_REGION)
2888 break;
2889 /* FALLTHRU */
2890 case GIMPLE_OMP_PARALLEL:
2891 case GIMPLE_OMP_TEAMS:
2892 error_at (gimple_location (stmt),
2893 "%<ordered%> region must be closely nested inside "
2894 "a loop region with an %<ordered%> clause");
2895 return false;
2896 default:
2897 break;
2899 break;
2900 case GIMPLE_OMP_CRITICAL:
2902 tree this_stmt_name
2903 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2904 for (; ctx != NULL; ctx = ctx->outer)
2905 if (gomp_critical *other_crit
2906 = dyn_cast <gomp_critical *> (ctx->stmt))
2907 if (this_stmt_name == gimple_omp_critical_name (other_crit))
2909 error_at (gimple_location (stmt),
2910 "%<critical%> region may not be nested inside "
2911 "a %<critical%> region with the same name");
2912 return false;
2915 break;
2916 case GIMPLE_OMP_TEAMS:
2917 if (ctx == NULL
2918 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2919 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2921 error_at (gimple_location (stmt),
2922 "%<teams%> construct not closely nested inside of "
2923 "%<target%> construct");
2924 return false;
2926 break;
2927 case GIMPLE_OMP_TARGET:
2928 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2929 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2930 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2931 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2933 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2934 error_at (OMP_CLAUSE_LOCATION (c),
2935 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2936 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2937 return false;
2939 if (is_gimple_omp_offloaded (stmt)
2940 && oacc_get_fn_attrib (cfun->decl) != NULL)
2942 error_at (gimple_location (stmt),
2943 "OpenACC region inside of OpenACC routine, nested "
2944 "parallelism not supported yet");
2945 return false;
2947 for (; ctx != NULL; ctx = ctx->outer)
2949 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2951 if (is_gimple_omp (stmt)
2952 && is_gimple_omp_oacc (stmt)
2953 && is_gimple_omp (ctx->stmt))
2955 error_at (gimple_location (stmt),
2956 "OpenACC construct inside of non-OpenACC region");
2957 return false;
2959 continue;
2962 const char *stmt_name, *ctx_stmt_name;
2963 switch (gimple_omp_target_kind (stmt))
2965 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2966 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2967 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2968 case GF_OMP_TARGET_KIND_ENTER_DATA:
2969 stmt_name = "target enter data"; break;
2970 case GF_OMP_TARGET_KIND_EXIT_DATA:
2971 stmt_name = "target exit data"; break;
2972 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2973 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2974 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2975 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
2976 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2977 stmt_name = "enter/exit data"; break;
2978 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2979 break;
2980 default: gcc_unreachable ();
2982 switch (gimple_omp_target_kind (ctx->stmt))
2984 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2985 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
2986 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2987 ctx_stmt_name = "parallel"; break;
2988 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2989 ctx_stmt_name = "kernels"; break;
2990 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
2991 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2992 ctx_stmt_name = "host_data"; break;
2993 default: gcc_unreachable ();
2996 /* OpenACC/OpenMP mismatch? */
2997 if (is_gimple_omp_oacc (stmt)
2998 != is_gimple_omp_oacc (ctx->stmt))
3000 error_at (gimple_location (stmt),
3001 "%s %qs construct inside of %s %qs region",
3002 (is_gimple_omp_oacc (stmt)
3003 ? "OpenACC" : "OpenMP"), stmt_name,
3004 (is_gimple_omp_oacc (ctx->stmt)
3005 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3006 return false;
3008 if (is_gimple_omp_offloaded (ctx->stmt))
3010 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3011 if (is_gimple_omp_oacc (ctx->stmt))
3013 error_at (gimple_location (stmt),
3014 "%qs construct inside of %qs region",
3015 stmt_name, ctx_stmt_name);
3016 return false;
3018 else
3020 warning_at (gimple_location (stmt), 0,
3021 "%qs construct inside of %qs region",
3022 stmt_name, ctx_stmt_name);
3026 break;
3027 default:
3028 break;
3030 return true;
3034 /* Helper function scan_omp.
3036 Callback for walk_tree or operators in walk_gimple_stmt used to
3037 scan for OMP directives in TP. */
3039 static tree
3040 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3042 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3043 omp_context *ctx = (omp_context *) wi->info;
3044 tree t = *tp;
3046 switch (TREE_CODE (t))
3048 case VAR_DECL:
3049 case PARM_DECL:
3050 case LABEL_DECL:
3051 case RESULT_DECL:
3052 if (ctx)
3054 tree repl = remap_decl (t, &ctx->cb);
3055 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3056 *tp = repl;
3058 break;
3060 default:
3061 if (ctx && TYPE_P (t))
3062 *tp = remap_type (t, &ctx->cb);
3063 else if (!DECL_P (t))
3065 *walk_subtrees = 1;
3066 if (ctx)
3068 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3069 if (tem != TREE_TYPE (t))
3071 if (TREE_CODE (t) == INTEGER_CST)
3072 *tp = wide_int_to_tree (tem, t);
3073 else
3074 TREE_TYPE (t) = tem;
3078 break;
3081 return NULL_TREE;
3084 /* Return true if FNDECL is a setjmp or a longjmp. */
3086 static bool
3087 setjmp_or_longjmp_p (const_tree fndecl)
3089 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3090 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
3091 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
3092 return true;
3094 tree declname = DECL_NAME (fndecl);
3095 if (!declname)
3096 return false;
3097 const char *name = IDENTIFIER_POINTER (declname);
3098 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3102 /* Helper function for scan_omp.
3104 Callback for walk_gimple_stmt used to scan for OMP directives in
3105 the current statement in GSI. */
3107 static tree
3108 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3109 struct walk_stmt_info *wi)
3111 gimple *stmt = gsi_stmt (*gsi);
3112 omp_context *ctx = (omp_context *) wi->info;
3114 if (gimple_has_location (stmt))
3115 input_location = gimple_location (stmt);
3117 /* Check the nesting restrictions. */
3118 bool remove = false;
3119 if (is_gimple_omp (stmt))
3120 remove = !check_omp_nesting_restrictions (stmt, ctx);
3121 else if (is_gimple_call (stmt))
3123 tree fndecl = gimple_call_fndecl (stmt);
3124 if (fndecl)
3126 if (setjmp_or_longjmp_p (fndecl)
3127 && ctx
3128 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3129 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3131 remove = true;
3132 error_at (gimple_location (stmt),
3133 "setjmp/longjmp inside simd construct");
3135 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3136 switch (DECL_FUNCTION_CODE (fndecl))
3138 case BUILT_IN_GOMP_BARRIER:
3139 case BUILT_IN_GOMP_CANCEL:
3140 case BUILT_IN_GOMP_CANCELLATION_POINT:
3141 case BUILT_IN_GOMP_TASKYIELD:
3142 case BUILT_IN_GOMP_TASKWAIT:
3143 case BUILT_IN_GOMP_TASKGROUP_START:
3144 case BUILT_IN_GOMP_TASKGROUP_END:
3145 remove = !check_omp_nesting_restrictions (stmt, ctx);
3146 break;
3147 default:
3148 break;
3152 if (remove)
3154 stmt = gimple_build_nop ();
3155 gsi_replace (gsi, stmt, false);
3158 *handled_ops_p = true;
3160 switch (gimple_code (stmt))
3162 case GIMPLE_OMP_PARALLEL:
3163 taskreg_nesting_level++;
3164 scan_omp_parallel (gsi, ctx);
3165 taskreg_nesting_level--;
3166 break;
3168 case GIMPLE_OMP_TASK:
3169 taskreg_nesting_level++;
3170 scan_omp_task (gsi, ctx);
3171 taskreg_nesting_level--;
3172 break;
3174 case GIMPLE_OMP_FOR:
3175 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3176 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3177 && omp_maybe_offloaded_ctx (ctx)
3178 && omp_max_simt_vf ())
3179 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3180 else
3181 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3182 break;
3184 case GIMPLE_OMP_SECTIONS:
3185 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3186 break;
3188 case GIMPLE_OMP_SINGLE:
3189 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3190 break;
3192 case GIMPLE_OMP_SECTION:
3193 case GIMPLE_OMP_MASTER:
3194 case GIMPLE_OMP_TASKGROUP:
3195 case GIMPLE_OMP_ORDERED:
3196 case GIMPLE_OMP_CRITICAL:
3197 case GIMPLE_OMP_GRID_BODY:
3198 ctx = new_omp_context (stmt, ctx);
3199 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3200 break;
3202 case GIMPLE_OMP_TARGET:
3203 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3204 break;
3206 case GIMPLE_OMP_TEAMS:
3207 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3208 break;
3210 case GIMPLE_BIND:
3212 tree var;
3214 *handled_ops_p = false;
3215 if (ctx)
3216 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3217 var ;
3218 var = DECL_CHAIN (var))
3219 insert_decl_map (&ctx->cb, var, var);
3221 break;
3222 default:
3223 *handled_ops_p = false;
3224 break;
3227 return NULL_TREE;
3231 /* Scan all the statements starting at the current statement. CTX
3232 contains context information about the OMP directives and
3233 clauses found during the scan. */
3235 static void
3236 scan_omp (gimple_seq *body_p, omp_context *ctx)
3238 location_t saved_location;
3239 struct walk_stmt_info wi;
3241 memset (&wi, 0, sizeof (wi));
3242 wi.info = ctx;
3243 wi.want_locations = true;
3245 saved_location = input_location;
3246 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3247 input_location = saved_location;
3250 /* Re-gimplification and code generation routines. */
3252 /* If a context was created for STMT when it was scanned, return it. */
3254 static omp_context *
3255 maybe_lookup_ctx (gimple *stmt)
3257 splay_tree_node n;
3258 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3259 return n ? (omp_context *) n->value : NULL;
3263 /* Find the mapping for DECL in CTX or the immediately enclosing
3264 context that has a mapping for DECL.
3266 If CTX is a nested parallel directive, we may have to use the decl
3267 mappings created in CTX's parent context. Suppose that we have the
3268 following parallel nesting (variable UIDs showed for clarity):
3270 iD.1562 = 0;
3271 #omp parallel shared(iD.1562) -> outer parallel
3272 iD.1562 = iD.1562 + 1;
3274 #omp parallel shared (iD.1562) -> inner parallel
3275 iD.1562 = iD.1562 - 1;
3277 Each parallel structure will create a distinct .omp_data_s structure
3278 for copying iD.1562 in/out of the directive:
3280 outer parallel .omp_data_s.1.i -> iD.1562
3281 inner parallel .omp_data_s.2.i -> iD.1562
3283 A shared variable mapping will produce a copy-out operation before
3284 the parallel directive and a copy-in operation after it. So, in
3285 this case we would have:
3287 iD.1562 = 0;
3288 .omp_data_o.1.i = iD.1562;
3289 #omp parallel shared(iD.1562) -> outer parallel
3290 .omp_data_i.1 = &.omp_data_o.1
3291 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3293 .omp_data_o.2.i = iD.1562; -> **
3294 #omp parallel shared(iD.1562) -> inner parallel
3295 .omp_data_i.2 = &.omp_data_o.2
3296 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3299 ** This is a problem. The symbol iD.1562 cannot be referenced
3300 inside the body of the outer parallel region. But since we are
3301 emitting this copy operation while expanding the inner parallel
3302 directive, we need to access the CTX structure of the outer
3303 parallel directive to get the correct mapping:
3305 .omp_data_o.2.i = .omp_data_i.1->i
3307 Since there may be other workshare or parallel directives enclosing
3308 the parallel directive, it may be necessary to walk up the context
3309 parent chain. This is not a problem in general because nested
3310 parallelism happens only rarely. */
3312 static tree
3313 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3315 tree t;
3316 omp_context *up;
3318 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3319 t = maybe_lookup_decl (decl, up);
3321 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3323 return t ? t : decl;
3327 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3328 in outer contexts. */
3330 static tree
3331 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3333 tree t = NULL;
3334 omp_context *up;
3336 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3337 t = maybe_lookup_decl (decl, up);
3339 return t ? t : decl;
3343 /* Construct the initialization value for reduction operation OP. */
3345 tree
3346 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3348 switch (op)
3350 case PLUS_EXPR:
3351 case MINUS_EXPR:
3352 case BIT_IOR_EXPR:
3353 case BIT_XOR_EXPR:
3354 case TRUTH_OR_EXPR:
3355 case TRUTH_ORIF_EXPR:
3356 case TRUTH_XOR_EXPR:
3357 case NE_EXPR:
3358 return build_zero_cst (type);
3360 case MULT_EXPR:
3361 case TRUTH_AND_EXPR:
3362 case TRUTH_ANDIF_EXPR:
3363 case EQ_EXPR:
3364 return fold_convert_loc (loc, type, integer_one_node);
3366 case BIT_AND_EXPR:
3367 return fold_convert_loc (loc, type, integer_minus_one_node);
3369 case MAX_EXPR:
3370 if (SCALAR_FLOAT_TYPE_P (type))
3372 REAL_VALUE_TYPE max, min;
3373 if (HONOR_INFINITIES (type))
3375 real_inf (&max);
3376 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3378 else
3379 real_maxval (&min, 1, TYPE_MODE (type));
3380 return build_real (type, min);
3382 else if (POINTER_TYPE_P (type))
3384 wide_int min
3385 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3386 return wide_int_to_tree (type, min);
3388 else
3390 gcc_assert (INTEGRAL_TYPE_P (type));
3391 return TYPE_MIN_VALUE (type);
3394 case MIN_EXPR:
3395 if (SCALAR_FLOAT_TYPE_P (type))
3397 REAL_VALUE_TYPE max;
3398 if (HONOR_INFINITIES (type))
3399 real_inf (&max);
3400 else
3401 real_maxval (&max, 0, TYPE_MODE (type));
3402 return build_real (type, max);
3404 else if (POINTER_TYPE_P (type))
3406 wide_int max
3407 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3408 return wide_int_to_tree (type, max);
3410 else
3412 gcc_assert (INTEGRAL_TYPE_P (type));
3413 return TYPE_MAX_VALUE (type);
3416 default:
3417 gcc_unreachable ();
3421 /* Construct the initialization value for reduction CLAUSE. */
3423 tree
3424 omp_reduction_init (tree clause, tree type)
3426 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3427 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3430 /* Return alignment to be assumed for var in CLAUSE, which should be
3431 OMP_CLAUSE_ALIGNED. */
3433 static tree
3434 omp_clause_aligned_alignment (tree clause)
3436 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3437 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3439 /* Otherwise return implementation defined alignment. */
3440 unsigned int al = 1;
3441 machine_mode mode, vmode;
3442 int vs = targetm.vectorize.autovectorize_vector_sizes ();
3443 if (vs)
3444 vs = 1 << floor_log2 (vs);
3445 static enum mode_class classes[]
3446 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3447 for (int i = 0; i < 4; i += 2)
3448 for (mode = GET_CLASS_NARROWEST_MODE (classes[i]);
3449 mode != VOIDmode;
3450 mode = GET_MODE_WIDER_MODE (mode))
3452 vmode = targetm.vectorize.preferred_simd_mode (mode);
3453 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3454 continue;
3455 while (vs
3456 && GET_MODE_SIZE (vmode) < vs
3457 && GET_MODE_2XWIDER_MODE (vmode) != VOIDmode)
3458 vmode = GET_MODE_2XWIDER_MODE (vmode);
3460 tree type = lang_hooks.types.type_for_mode (mode, 1);
3461 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3462 continue;
3463 type = build_vector_type (type, GET_MODE_SIZE (vmode)
3464 / GET_MODE_SIZE (mode));
3465 if (TYPE_MODE (type) != vmode)
3466 continue;
3467 if (TYPE_ALIGN_UNIT (type) > al)
3468 al = TYPE_ALIGN_UNIT (type);
3470 return build_int_cst (integer_type_node, al);
3474 /* This structure is part of the interface between lower_rec_simd_input_clauses
3475 and lower_rec_input_clauses. */
3477 struct omplow_simd_context {
3478 tree idx;
3479 tree lane;
3480 vec<tree, va_heap> simt_eargs;
3481 gimple_seq simt_dlist;
3482 int max_vf;
3483 bool is_simt;
3486 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3487 privatization. */
3489 static bool
3490 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3491 omplow_simd_context *sctx, tree &ivar, tree &lvar)
3493 if (sctx->max_vf == 0)
3495 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3496 if (sctx->max_vf > 1)
3498 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3499 OMP_CLAUSE_SAFELEN);
3500 if (c
3501 && (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) != INTEGER_CST
3502 || tree_int_cst_sgn (OMP_CLAUSE_SAFELEN_EXPR (c)) != 1))
3503 sctx->max_vf = 1;
3504 else if (c && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
3505 sctx->max_vf) == -1)
3506 sctx->max_vf = tree_to_shwi (OMP_CLAUSE_SAFELEN_EXPR (c));
3508 if (sctx->max_vf > 1)
3510 sctx->idx = create_tmp_var (unsigned_type_node);
3511 sctx->lane = create_tmp_var (unsigned_type_node);
3514 if (sctx->max_vf == 1)
3515 return false;
3517 if (sctx->is_simt)
3519 if (is_gimple_reg (new_var))
3521 ivar = lvar = new_var;
3522 return true;
3524 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3525 ivar = lvar = create_tmp_var (type);
3526 TREE_ADDRESSABLE (ivar) = 1;
3527 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3528 NULL, DECL_ATTRIBUTES (ivar));
3529 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3530 tree clobber = build_constructor (type, NULL);
3531 TREE_THIS_VOLATILE (clobber) = 1;
3532 gimple *g = gimple_build_assign (ivar, clobber);
3533 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3535 else
3537 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3538 tree avar = create_tmp_var_raw (atype);
3539 if (TREE_ADDRESSABLE (new_var))
3540 TREE_ADDRESSABLE (avar) = 1;
3541 DECL_ATTRIBUTES (avar)
3542 = tree_cons (get_identifier ("omp simd array"), NULL,
3543 DECL_ATTRIBUTES (avar));
3544 gimple_add_tmp_var (avar);
3545 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3546 NULL_TREE, NULL_TREE);
3547 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3548 NULL_TREE, NULL_TREE);
3550 if (DECL_P (new_var))
3552 SET_DECL_VALUE_EXPR (new_var, lvar);
3553 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3555 return true;
3558 /* Helper function of lower_rec_input_clauses. For a reference
3559 in simd reduction, add an underlying variable it will reference. */
3561 static void
3562 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3564 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3565 if (TREE_CONSTANT (z))
3567 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3568 get_name (new_vard));
3569 gimple_add_tmp_var (z);
3570 TREE_ADDRESSABLE (z) = 1;
3571 z = build_fold_addr_expr_loc (loc, z);
3572 gimplify_assign (new_vard, z, ilist);
3576 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3577 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3578 private variables. Initialization statements go in ILIST, while calls
3579 to destructors go in DLIST. */
3581 static void
3582 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3583 omp_context *ctx, struct omp_for_data *fd)
3585 tree c, dtor, copyin_seq, x, ptr;
3586 bool copyin_by_ref = false;
3587 bool lastprivate_firstprivate = false;
3588 bool reduction_omp_orig_ref = false;
3589 int pass;
3590 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3591 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3592 omplow_simd_context sctx = omplow_simd_context ();
3593 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3594 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3595 gimple_seq llist[3] = { };
3597 copyin_seq = NULL;
3598 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3600 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3601 with data sharing clauses referencing variable sized vars. That
3602 is unnecessarily hard to support and very unlikely to result in
3603 vectorized code anyway. */
3604 if (is_simd)
3605 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3606 switch (OMP_CLAUSE_CODE (c))
3608 case OMP_CLAUSE_LINEAR:
3609 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3610 sctx.max_vf = 1;
3611 /* FALLTHRU */
3612 case OMP_CLAUSE_PRIVATE:
3613 case OMP_CLAUSE_FIRSTPRIVATE:
3614 case OMP_CLAUSE_LASTPRIVATE:
3615 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3616 sctx.max_vf = 1;
3617 break;
3618 case OMP_CLAUSE_REDUCTION:
3619 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3620 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3621 sctx.max_vf = 1;
3622 break;
3623 default:
3624 continue;
3627 /* Add a placeholder for simduid. */
3628 if (sctx.is_simt && sctx.max_vf != 1)
3629 sctx.simt_eargs.safe_push (NULL_TREE);
3631 /* Do all the fixed sized types in the first pass, and the variable sized
3632 types in the second pass. This makes sure that the scalar arguments to
3633 the variable sized types are processed before we use them in the
3634 variable sized operations. */
3635 for (pass = 0; pass < 2; ++pass)
3637 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3639 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3640 tree var, new_var;
3641 bool by_ref;
3642 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3644 switch (c_kind)
3646 case OMP_CLAUSE_PRIVATE:
3647 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3648 continue;
3649 break;
3650 case OMP_CLAUSE_SHARED:
3651 /* Ignore shared directives in teams construct. */
3652 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3653 continue;
3654 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3656 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3657 || is_global_var (OMP_CLAUSE_DECL (c)));
3658 continue;
3660 case OMP_CLAUSE_FIRSTPRIVATE:
3661 case OMP_CLAUSE_COPYIN:
3662 break;
3663 case OMP_CLAUSE_LINEAR:
3664 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3665 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3666 lastprivate_firstprivate = true;
3667 break;
3668 case OMP_CLAUSE_REDUCTION:
3669 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3670 reduction_omp_orig_ref = true;
3671 break;
3672 case OMP_CLAUSE__LOOPTEMP_:
3673 /* Handle _looptemp_ clauses only on parallel/task. */
3674 if (fd)
3675 continue;
3676 break;
3677 case OMP_CLAUSE_LASTPRIVATE:
3678 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3680 lastprivate_firstprivate = true;
3681 if (pass != 0 || is_taskloop_ctx (ctx))
3682 continue;
3684 /* Even without corresponding firstprivate, if
3685 decl is Fortran allocatable, it needs outer var
3686 reference. */
3687 else if (pass == 0
3688 && lang_hooks.decls.omp_private_outer_ref
3689 (OMP_CLAUSE_DECL (c)))
3690 lastprivate_firstprivate = true;
3691 break;
3692 case OMP_CLAUSE_ALIGNED:
3693 if (pass == 0)
3694 continue;
3695 var = OMP_CLAUSE_DECL (c);
3696 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3697 && !is_global_var (var))
3699 new_var = maybe_lookup_decl (var, ctx);
3700 if (new_var == NULL_TREE)
3701 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3702 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3703 tree alarg = omp_clause_aligned_alignment (c);
3704 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3705 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3706 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3707 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3708 gimplify_and_add (x, ilist);
3710 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3711 && is_global_var (var))
3713 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3714 new_var = lookup_decl (var, ctx);
3715 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3716 t = build_fold_addr_expr_loc (clause_loc, t);
3717 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3718 tree alarg = omp_clause_aligned_alignment (c);
3719 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3720 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3721 t = fold_convert_loc (clause_loc, ptype, t);
3722 x = create_tmp_var (ptype);
3723 t = build2 (MODIFY_EXPR, ptype, x, t);
3724 gimplify_and_add (t, ilist);
3725 t = build_simple_mem_ref_loc (clause_loc, x);
3726 SET_DECL_VALUE_EXPR (new_var, t);
3727 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3729 continue;
3730 default:
3731 continue;
3734 new_var = var = OMP_CLAUSE_DECL (c);
3735 if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3737 var = TREE_OPERAND (var, 0);
3738 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3739 var = TREE_OPERAND (var, 0);
3740 if (TREE_CODE (var) == INDIRECT_REF
3741 || TREE_CODE (var) == ADDR_EXPR)
3742 var = TREE_OPERAND (var, 0);
3743 if (is_variable_sized (var))
3745 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3746 var = DECL_VALUE_EXPR (var);
3747 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3748 var = TREE_OPERAND (var, 0);
3749 gcc_assert (DECL_P (var));
3751 new_var = var;
3753 if (c_kind != OMP_CLAUSE_COPYIN)
3754 new_var = lookup_decl (var, ctx);
3756 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3758 if (pass != 0)
3759 continue;
3761 /* C/C++ array section reductions. */
3762 else if (c_kind == OMP_CLAUSE_REDUCTION
3763 && var != OMP_CLAUSE_DECL (c))
3765 if (pass == 0)
3766 continue;
3768 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3769 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3770 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3772 tree b = TREE_OPERAND (orig_var, 1);
3773 b = maybe_lookup_decl (b, ctx);
3774 if (b == NULL)
3776 b = TREE_OPERAND (orig_var, 1);
3777 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3779 if (integer_zerop (bias))
3780 bias = b;
3781 else
3783 bias = fold_convert_loc (clause_loc,
3784 TREE_TYPE (b), bias);
3785 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3786 TREE_TYPE (b), b, bias);
3788 orig_var = TREE_OPERAND (orig_var, 0);
3790 if (TREE_CODE (orig_var) == INDIRECT_REF
3791 || TREE_CODE (orig_var) == ADDR_EXPR)
3792 orig_var = TREE_OPERAND (orig_var, 0);
3793 tree d = OMP_CLAUSE_DECL (c);
3794 tree type = TREE_TYPE (d);
3795 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3796 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3797 const char *name = get_name (orig_var);
3798 if (TREE_CONSTANT (v))
3800 x = create_tmp_var_raw (type, name);
3801 gimple_add_tmp_var (x);
3802 TREE_ADDRESSABLE (x) = 1;
3803 x = build_fold_addr_expr_loc (clause_loc, x);
3805 else
3807 tree atmp
3808 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3809 tree t = maybe_lookup_decl (v, ctx);
3810 if (t)
3811 v = t;
3812 else
3813 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3814 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3815 t = fold_build2_loc (clause_loc, PLUS_EXPR,
3816 TREE_TYPE (v), v,
3817 build_int_cst (TREE_TYPE (v), 1));
3818 t = fold_build2_loc (clause_loc, MULT_EXPR,
3819 TREE_TYPE (v), t,
3820 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3821 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3822 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3825 tree ptype = build_pointer_type (TREE_TYPE (type));
3826 x = fold_convert_loc (clause_loc, ptype, x);
3827 tree y = create_tmp_var (ptype, name);
3828 gimplify_assign (y, x, ilist);
3829 x = y;
3830 tree yb = y;
3832 if (!integer_zerop (bias))
3834 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3835 bias);
3836 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3838 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3839 pointer_sized_int_node, yb, bias);
3840 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3841 yb = create_tmp_var (ptype, name);
3842 gimplify_assign (yb, x, ilist);
3843 x = yb;
3846 d = TREE_OPERAND (d, 0);
3847 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3848 d = TREE_OPERAND (d, 0);
3849 if (TREE_CODE (d) == ADDR_EXPR)
3851 if (orig_var != var)
3853 gcc_assert (is_variable_sized (orig_var));
3854 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3856 gimplify_assign (new_var, x, ilist);
3857 tree new_orig_var = lookup_decl (orig_var, ctx);
3858 tree t = build_fold_indirect_ref (new_var);
3859 DECL_IGNORED_P (new_var) = 0;
3860 TREE_THIS_NOTRAP (t);
3861 SET_DECL_VALUE_EXPR (new_orig_var, t);
3862 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3864 else
3866 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3867 build_int_cst (ptype, 0));
3868 SET_DECL_VALUE_EXPR (new_var, x);
3869 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3872 else
3874 gcc_assert (orig_var == var);
3875 if (TREE_CODE (d) == INDIRECT_REF)
3877 x = create_tmp_var (ptype, name);
3878 TREE_ADDRESSABLE (x) = 1;
3879 gimplify_assign (x, yb, ilist);
3880 x = build_fold_addr_expr_loc (clause_loc, x);
3882 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3883 gimplify_assign (new_var, x, ilist);
3885 tree y1 = create_tmp_var (ptype, NULL);
3886 gimplify_assign (y1, y, ilist);
3887 tree i2 = NULL_TREE, y2 = NULL_TREE;
3888 tree body2 = NULL_TREE, end2 = NULL_TREE;
3889 tree y3 = NULL_TREE, y4 = NULL_TREE;
3890 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3892 y2 = create_tmp_var (ptype, NULL);
3893 gimplify_assign (y2, y, ilist);
3894 tree ref = build_outer_var_ref (var, ctx);
3895 /* For ref build_outer_var_ref already performs this. */
3896 if (TREE_CODE (d) == INDIRECT_REF)
3897 gcc_assert (omp_is_reference (var));
3898 else if (TREE_CODE (d) == ADDR_EXPR)
3899 ref = build_fold_addr_expr (ref);
3900 else if (omp_is_reference (var))
3901 ref = build_fold_addr_expr (ref);
3902 ref = fold_convert_loc (clause_loc, ptype, ref);
3903 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3904 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3906 y3 = create_tmp_var (ptype, NULL);
3907 gimplify_assign (y3, unshare_expr (ref), ilist);
3909 if (is_simd)
3911 y4 = create_tmp_var (ptype, NULL);
3912 gimplify_assign (y4, ref, dlist);
3915 tree i = create_tmp_var (TREE_TYPE (v), NULL);
3916 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3917 tree body = create_artificial_label (UNKNOWN_LOCATION);
3918 tree end = create_artificial_label (UNKNOWN_LOCATION);
3919 gimple_seq_add_stmt (ilist, gimple_build_label (body));
3920 if (y2)
3922 i2 = create_tmp_var (TREE_TYPE (v), NULL);
3923 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3924 body2 = create_artificial_label (UNKNOWN_LOCATION);
3925 end2 = create_artificial_label (UNKNOWN_LOCATION);
3926 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3928 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3930 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3931 tree decl_placeholder
3932 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3933 SET_DECL_VALUE_EXPR (decl_placeholder,
3934 build_simple_mem_ref (y1));
3935 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3936 SET_DECL_VALUE_EXPR (placeholder,
3937 y3 ? build_simple_mem_ref (y3)
3938 : error_mark_node);
3939 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3940 x = lang_hooks.decls.omp_clause_default_ctor
3941 (c, build_simple_mem_ref (y1),
3942 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3943 if (x)
3944 gimplify_and_add (x, ilist);
3945 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3947 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3948 lower_omp (&tseq, ctx);
3949 gimple_seq_add_seq (ilist, tseq);
3951 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3952 if (is_simd)
3954 SET_DECL_VALUE_EXPR (decl_placeholder,
3955 build_simple_mem_ref (y2));
3956 SET_DECL_VALUE_EXPR (placeholder,
3957 build_simple_mem_ref (y4));
3958 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3959 lower_omp (&tseq, ctx);
3960 gimple_seq_add_seq (dlist, tseq);
3961 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3963 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3964 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3965 x = lang_hooks.decls.omp_clause_dtor
3966 (c, build_simple_mem_ref (y2));
3967 if (x)
3969 gimple_seq tseq = NULL;
3970 dtor = x;
3971 gimplify_stmt (&dtor, &tseq);
3972 gimple_seq_add_seq (dlist, tseq);
3975 else
3977 x = omp_reduction_init (c, TREE_TYPE (type));
3978 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3980 /* reduction(-:var) sums up the partial results, so it
3981 acts identically to reduction(+:var). */
3982 if (code == MINUS_EXPR)
3983 code = PLUS_EXPR;
3985 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
3986 if (is_simd)
3988 x = build2 (code, TREE_TYPE (type),
3989 build_simple_mem_ref (y4),
3990 build_simple_mem_ref (y2));
3991 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
3994 gimple *g
3995 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
3996 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3997 gimple_seq_add_stmt (ilist, g);
3998 if (y3)
4000 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4001 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4002 gimple_seq_add_stmt (ilist, g);
4004 g = gimple_build_assign (i, PLUS_EXPR, i,
4005 build_int_cst (TREE_TYPE (i), 1));
4006 gimple_seq_add_stmt (ilist, g);
4007 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4008 gimple_seq_add_stmt (ilist, g);
4009 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4010 if (y2)
4012 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4013 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4014 gimple_seq_add_stmt (dlist, g);
4015 if (y4)
4017 g = gimple_build_assign
4018 (y4, POINTER_PLUS_EXPR, y4,
4019 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4020 gimple_seq_add_stmt (dlist, g);
4022 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4023 build_int_cst (TREE_TYPE (i2), 1));
4024 gimple_seq_add_stmt (dlist, g);
4025 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4026 gimple_seq_add_stmt (dlist, g);
4027 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4029 continue;
4031 else if (is_variable_sized (var))
4033 /* For variable sized types, we need to allocate the
4034 actual storage here. Call alloca and store the
4035 result in the pointer decl that we created elsewhere. */
4036 if (pass == 0)
4037 continue;
4039 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4041 gcall *stmt;
4042 tree tmp, atmp;
4044 ptr = DECL_VALUE_EXPR (new_var);
4045 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4046 ptr = TREE_OPERAND (ptr, 0);
4047 gcc_assert (DECL_P (ptr));
4048 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4050 /* void *tmp = __builtin_alloca */
4051 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4052 stmt = gimple_build_call (atmp, 2, x,
4053 size_int (DECL_ALIGN (var)));
4054 tmp = create_tmp_var_raw (ptr_type_node);
4055 gimple_add_tmp_var (tmp);
4056 gimple_call_set_lhs (stmt, tmp);
4058 gimple_seq_add_stmt (ilist, stmt);
4060 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4061 gimplify_assign (ptr, x, ilist);
4064 else if (omp_is_reference (var))
4066 /* For references that are being privatized for Fortran,
4067 allocate new backing storage for the new pointer
4068 variable. This allows us to avoid changing all the
4069 code that expects a pointer to something that expects
4070 a direct variable. */
4071 if (pass == 0)
4072 continue;
4074 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4075 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4077 x = build_receiver_ref (var, false, ctx);
4078 x = build_fold_addr_expr_loc (clause_loc, x);
4080 else if (TREE_CONSTANT (x))
4082 /* For reduction in SIMD loop, defer adding the
4083 initialization of the reference, because if we decide
4084 to use SIMD array for it, the initilization could cause
4085 expansion ICE. */
4086 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4087 x = NULL_TREE;
4088 else
4090 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4091 get_name (var));
4092 gimple_add_tmp_var (x);
4093 TREE_ADDRESSABLE (x) = 1;
4094 x = build_fold_addr_expr_loc (clause_loc, x);
4097 else
4099 tree atmp
4100 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4101 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4102 tree al = size_int (TYPE_ALIGN (rtype));
4103 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4106 if (x)
4108 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4109 gimplify_assign (new_var, x, ilist);
4112 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4114 else if (c_kind == OMP_CLAUSE_REDUCTION
4115 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4117 if (pass == 0)
4118 continue;
4120 else if (pass != 0)
4121 continue;
4123 switch (OMP_CLAUSE_CODE (c))
4125 case OMP_CLAUSE_SHARED:
4126 /* Ignore shared directives in teams construct. */
4127 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4128 continue;
4129 /* Shared global vars are just accessed directly. */
4130 if (is_global_var (new_var))
4131 break;
4132 /* For taskloop firstprivate/lastprivate, represented
4133 as firstprivate and shared clause on the task, new_var
4134 is the firstprivate var. */
4135 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4136 break;
4137 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4138 needs to be delayed until after fixup_child_record_type so
4139 that we get the correct type during the dereference. */
4140 by_ref = use_pointer_for_field (var, ctx);
4141 x = build_receiver_ref (var, by_ref, ctx);
4142 SET_DECL_VALUE_EXPR (new_var, x);
4143 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4145 /* ??? If VAR is not passed by reference, and the variable
4146 hasn't been initialized yet, then we'll get a warning for
4147 the store into the omp_data_s structure. Ideally, we'd be
4148 able to notice this and not store anything at all, but
4149 we're generating code too early. Suppress the warning. */
4150 if (!by_ref)
4151 TREE_NO_WARNING (var) = 1;
4152 break;
4154 case OMP_CLAUSE_LASTPRIVATE:
4155 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4156 break;
4157 /* FALLTHRU */
4159 case OMP_CLAUSE_PRIVATE:
4160 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4161 x = build_outer_var_ref (var, ctx);
4162 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4164 if (is_task_ctx (ctx))
4165 x = build_receiver_ref (var, false, ctx);
4166 else
4167 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4169 else
4170 x = NULL;
4171 do_private:
4172 tree nx;
4173 nx = lang_hooks.decls.omp_clause_default_ctor
4174 (c, unshare_expr (new_var), x);
4175 if (is_simd)
4177 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4178 if ((TREE_ADDRESSABLE (new_var) || nx || y
4179 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4180 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4181 ivar, lvar))
4183 if (nx)
4184 x = lang_hooks.decls.omp_clause_default_ctor
4185 (c, unshare_expr (ivar), x);
4186 if (nx && x)
4187 gimplify_and_add (x, &llist[0]);
4188 if (y)
4190 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4191 if (y)
4193 gimple_seq tseq = NULL;
4195 dtor = y;
4196 gimplify_stmt (&dtor, &tseq);
4197 gimple_seq_add_seq (&llist[1], tseq);
4200 break;
4203 if (nx)
4204 gimplify_and_add (nx, ilist);
4205 /* FALLTHRU */
4207 do_dtor:
4208 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4209 if (x)
4211 gimple_seq tseq = NULL;
4213 dtor = x;
4214 gimplify_stmt (&dtor, &tseq);
4215 gimple_seq_add_seq (dlist, tseq);
4217 break;
4219 case OMP_CLAUSE_LINEAR:
4220 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4221 goto do_firstprivate;
4222 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4223 x = NULL;
4224 else
4225 x = build_outer_var_ref (var, ctx);
4226 goto do_private;
4228 case OMP_CLAUSE_FIRSTPRIVATE:
4229 if (is_task_ctx (ctx))
4231 if (omp_is_reference (var) || is_variable_sized (var))
4232 goto do_dtor;
4233 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4234 ctx))
4235 || use_pointer_for_field (var, NULL))
4237 x = build_receiver_ref (var, false, ctx);
4238 SET_DECL_VALUE_EXPR (new_var, x);
4239 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4240 goto do_dtor;
4243 do_firstprivate:
4244 x = build_outer_var_ref (var, ctx);
4245 if (is_simd)
4247 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4248 && gimple_omp_for_combined_into_p (ctx->stmt))
4250 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4251 tree stept = TREE_TYPE (t);
4252 tree ct = omp_find_clause (clauses,
4253 OMP_CLAUSE__LOOPTEMP_);
4254 gcc_assert (ct);
4255 tree l = OMP_CLAUSE_DECL (ct);
4256 tree n1 = fd->loop.n1;
4257 tree step = fd->loop.step;
4258 tree itype = TREE_TYPE (l);
4259 if (POINTER_TYPE_P (itype))
4260 itype = signed_type_for (itype);
4261 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4262 if (TYPE_UNSIGNED (itype)
4263 && fd->loop.cond_code == GT_EXPR)
4264 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4265 fold_build1 (NEGATE_EXPR, itype, l),
4266 fold_build1 (NEGATE_EXPR,
4267 itype, step));
4268 else
4269 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4270 t = fold_build2 (MULT_EXPR, stept,
4271 fold_convert (stept, l), t);
4273 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4275 x = lang_hooks.decls.omp_clause_linear_ctor
4276 (c, new_var, x, t);
4277 gimplify_and_add (x, ilist);
4278 goto do_dtor;
4281 if (POINTER_TYPE_P (TREE_TYPE (x)))
4282 x = fold_build2 (POINTER_PLUS_EXPR,
4283 TREE_TYPE (x), x, t);
4284 else
4285 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4288 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4289 || TREE_ADDRESSABLE (new_var))
4290 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4291 ivar, lvar))
4293 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4295 tree iv = create_tmp_var (TREE_TYPE (new_var));
4296 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4297 gimplify_and_add (x, ilist);
4298 gimple_stmt_iterator gsi
4299 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4300 gassign *g
4301 = gimple_build_assign (unshare_expr (lvar), iv);
4302 gsi_insert_before_without_update (&gsi, g,
4303 GSI_SAME_STMT);
4304 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4305 enum tree_code code = PLUS_EXPR;
4306 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4307 code = POINTER_PLUS_EXPR;
4308 g = gimple_build_assign (iv, code, iv, t);
4309 gsi_insert_before_without_update (&gsi, g,
4310 GSI_SAME_STMT);
4311 break;
4313 x = lang_hooks.decls.omp_clause_copy_ctor
4314 (c, unshare_expr (ivar), x);
4315 gimplify_and_add (x, &llist[0]);
4316 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4317 if (x)
4319 gimple_seq tseq = NULL;
4321 dtor = x;
4322 gimplify_stmt (&dtor, &tseq);
4323 gimple_seq_add_seq (&llist[1], tseq);
4325 break;
4328 x = lang_hooks.decls.omp_clause_copy_ctor
4329 (c, unshare_expr (new_var), x);
4330 gimplify_and_add (x, ilist);
4331 goto do_dtor;
4333 case OMP_CLAUSE__LOOPTEMP_:
4334 gcc_assert (is_taskreg_ctx (ctx));
4335 x = build_outer_var_ref (var, ctx);
4336 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4337 gimplify_and_add (x, ilist);
4338 break;
4340 case OMP_CLAUSE_COPYIN:
4341 by_ref = use_pointer_for_field (var, NULL);
4342 x = build_receiver_ref (var, by_ref, ctx);
4343 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4344 append_to_statement_list (x, &copyin_seq);
4345 copyin_by_ref |= by_ref;
4346 break;
4348 case OMP_CLAUSE_REDUCTION:
4349 /* OpenACC reductions are initialized using the
4350 GOACC_REDUCTION internal function. */
4351 if (is_gimple_omp_oacc (ctx->stmt))
4352 break;
4353 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4355 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4356 gimple *tseq;
4357 x = build_outer_var_ref (var, ctx);
4359 if (omp_is_reference (var)
4360 && !useless_type_conversion_p (TREE_TYPE (placeholder),
4361 TREE_TYPE (x)))
4362 x = build_fold_addr_expr_loc (clause_loc, x);
4363 SET_DECL_VALUE_EXPR (placeholder, x);
4364 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4365 tree new_vard = new_var;
4366 if (omp_is_reference (var))
4368 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4369 new_vard = TREE_OPERAND (new_var, 0);
4370 gcc_assert (DECL_P (new_vard));
4372 if (is_simd
4373 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4374 ivar, lvar))
4376 if (new_vard == new_var)
4378 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4379 SET_DECL_VALUE_EXPR (new_var, ivar);
4381 else
4383 SET_DECL_VALUE_EXPR (new_vard,
4384 build_fold_addr_expr (ivar));
4385 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4387 x = lang_hooks.decls.omp_clause_default_ctor
4388 (c, unshare_expr (ivar),
4389 build_outer_var_ref (var, ctx));
4390 if (x)
4391 gimplify_and_add (x, &llist[0]);
4392 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4394 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4395 lower_omp (&tseq, ctx);
4396 gimple_seq_add_seq (&llist[0], tseq);
4398 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4399 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4400 lower_omp (&tseq, ctx);
4401 gimple_seq_add_seq (&llist[1], tseq);
4402 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4403 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4404 if (new_vard == new_var)
4405 SET_DECL_VALUE_EXPR (new_var, lvar);
4406 else
4407 SET_DECL_VALUE_EXPR (new_vard,
4408 build_fold_addr_expr (lvar));
4409 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4410 if (x)
4412 tseq = NULL;
4413 dtor = x;
4414 gimplify_stmt (&dtor, &tseq);
4415 gimple_seq_add_seq (&llist[1], tseq);
4417 break;
4419 /* If this is a reference to constant size reduction var
4420 with placeholder, we haven't emitted the initializer
4421 for it because it is undesirable if SIMD arrays are used.
4422 But if they aren't used, we need to emit the deferred
4423 initialization now. */
4424 else if (omp_is_reference (var) && is_simd)
4425 handle_simd_reference (clause_loc, new_vard, ilist);
4426 x = lang_hooks.decls.omp_clause_default_ctor
4427 (c, unshare_expr (new_var),
4428 build_outer_var_ref (var, ctx));
4429 if (x)
4430 gimplify_and_add (x, ilist);
4431 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4433 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4434 lower_omp (&tseq, ctx);
4435 gimple_seq_add_seq (ilist, tseq);
4437 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4438 if (is_simd)
4440 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4441 lower_omp (&tseq, ctx);
4442 gimple_seq_add_seq (dlist, tseq);
4443 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4445 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4446 goto do_dtor;
4448 else
4450 x = omp_reduction_init (c, TREE_TYPE (new_var));
4451 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4452 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4454 /* reduction(-:var) sums up the partial results, so it
4455 acts identically to reduction(+:var). */
4456 if (code == MINUS_EXPR)
4457 code = PLUS_EXPR;
4459 tree new_vard = new_var;
4460 if (is_simd && omp_is_reference (var))
4462 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4463 new_vard = TREE_OPERAND (new_var, 0);
4464 gcc_assert (DECL_P (new_vard));
4466 if (is_simd
4467 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4468 ivar, lvar))
4470 tree ref = build_outer_var_ref (var, ctx);
4472 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4474 if (sctx.is_simt)
4476 if (!simt_lane)
4477 simt_lane = create_tmp_var (unsigned_type_node);
4478 x = build_call_expr_internal_loc
4479 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4480 TREE_TYPE (ivar), 2, ivar, simt_lane);
4481 x = build2 (code, TREE_TYPE (ivar), ivar, x);
4482 gimplify_assign (ivar, x, &llist[2]);
4484 x = build2 (code, TREE_TYPE (ref), ref, ivar);
4485 ref = build_outer_var_ref (var, ctx);
4486 gimplify_assign (ref, x, &llist[1]);
4488 if (new_vard != new_var)
4490 SET_DECL_VALUE_EXPR (new_vard,
4491 build_fold_addr_expr (lvar));
4492 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4495 else
4497 if (omp_is_reference (var) && is_simd)
4498 handle_simd_reference (clause_loc, new_vard, ilist);
4499 gimplify_assign (new_var, x, ilist);
4500 if (is_simd)
4502 tree ref = build_outer_var_ref (var, ctx);
4504 x = build2 (code, TREE_TYPE (ref), ref, new_var);
4505 ref = build_outer_var_ref (var, ctx);
4506 gimplify_assign (ref, x, dlist);
4510 break;
4512 default:
4513 gcc_unreachable ();
4518 if (sctx.max_vf == 1)
4519 sctx.is_simt = false;
4521 if (sctx.lane || sctx.is_simt)
4523 uid = create_tmp_var (ptr_type_node, "simduid");
4524 /* Don't want uninit warnings on simduid, it is always uninitialized,
4525 but we use it not for the value, but for the DECL_UID only. */
4526 TREE_NO_WARNING (uid) = 1;
4527 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4528 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4529 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4530 gimple_omp_for_set_clauses (ctx->stmt, c);
4532 /* Emit calls denoting privatized variables and initializing a pointer to
4533 structure that holds private variables as fields after ompdevlow pass. */
4534 if (sctx.is_simt)
4536 sctx.simt_eargs[0] = uid;
4537 gimple *g
4538 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4539 gimple_call_set_lhs (g, uid);
4540 gimple_seq_add_stmt (ilist, g);
4541 sctx.simt_eargs.release ();
4543 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4544 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4545 gimple_call_set_lhs (g, simtrec);
4546 gimple_seq_add_stmt (ilist, g);
4548 if (sctx.lane)
4550 gimple *g
4551 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4552 gimple_call_set_lhs (g, sctx.lane);
4553 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4554 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4555 g = gimple_build_assign (sctx.lane, INTEGER_CST,
4556 build_int_cst (unsigned_type_node, 0));
4557 gimple_seq_add_stmt (ilist, g);
4558 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4559 if (llist[2])
4561 tree simt_vf = create_tmp_var (unsigned_type_node);
4562 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4563 gimple_call_set_lhs (g, simt_vf);
4564 gimple_seq_add_stmt (dlist, g);
4566 tree t = build_int_cst (unsigned_type_node, 1);
4567 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4568 gimple_seq_add_stmt (dlist, g);
4570 t = build_int_cst (unsigned_type_node, 0);
4571 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4572 gimple_seq_add_stmt (dlist, g);
4574 tree body = create_artificial_label (UNKNOWN_LOCATION);
4575 tree header = create_artificial_label (UNKNOWN_LOCATION);
4576 tree end = create_artificial_label (UNKNOWN_LOCATION);
4577 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4578 gimple_seq_add_stmt (dlist, gimple_build_label (body));
4580 gimple_seq_add_seq (dlist, llist[2]);
4582 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4583 gimple_seq_add_stmt (dlist, g);
4585 gimple_seq_add_stmt (dlist, gimple_build_label (header));
4586 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4587 gimple_seq_add_stmt (dlist, g);
4589 gimple_seq_add_stmt (dlist, gimple_build_label (end));
4591 for (int i = 0; i < 2; i++)
4592 if (llist[i])
4594 tree vf = create_tmp_var (unsigned_type_node);
4595 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4596 gimple_call_set_lhs (g, vf);
4597 gimple_seq *seq = i == 0 ? ilist : dlist;
4598 gimple_seq_add_stmt (seq, g);
4599 tree t = build_int_cst (unsigned_type_node, 0);
4600 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4601 gimple_seq_add_stmt (seq, g);
4602 tree body = create_artificial_label (UNKNOWN_LOCATION);
4603 tree header = create_artificial_label (UNKNOWN_LOCATION);
4604 tree end = create_artificial_label (UNKNOWN_LOCATION);
4605 gimple_seq_add_stmt (seq, gimple_build_goto (header));
4606 gimple_seq_add_stmt (seq, gimple_build_label (body));
4607 gimple_seq_add_seq (seq, llist[i]);
4608 t = build_int_cst (unsigned_type_node, 1);
4609 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
4610 gimple_seq_add_stmt (seq, g);
4611 gimple_seq_add_stmt (seq, gimple_build_label (header));
4612 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
4613 gimple_seq_add_stmt (seq, g);
4614 gimple_seq_add_stmt (seq, gimple_build_label (end));
4617 if (sctx.is_simt)
4619 gimple_seq_add_seq (dlist, sctx.simt_dlist);
4620 gimple *g
4621 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4622 gimple_seq_add_stmt (dlist, g);
4625 /* The copyin sequence is not to be executed by the main thread, since
4626 that would result in self-copies. Perhaps not visible to scalars,
4627 but it certainly is to C++ operator=. */
4628 if (copyin_seq)
4630 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4632 x = build2 (NE_EXPR, boolean_type_node, x,
4633 build_int_cst (TREE_TYPE (x), 0));
4634 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4635 gimplify_and_add (x, ilist);
4638 /* If any copyin variable is passed by reference, we must ensure the
4639 master thread doesn't modify it before it is copied over in all
4640 threads. Similarly for variables in both firstprivate and
4641 lastprivate clauses we need to ensure the lastprivate copying
4642 happens after firstprivate copying in all threads. And similarly
4643 for UDRs if initializer expression refers to omp_orig. */
4644 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4646 /* Don't add any barrier for #pragma omp simd or
4647 #pragma omp distribute. */
4648 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4649 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4650 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4653 /* If max_vf is non-zero, then we can use only a vectorization factor
4654 up to the max_vf we chose. So stick it into the safelen clause. */
4655 if (sctx.max_vf)
4657 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4658 OMP_CLAUSE_SAFELEN);
4659 if (c == NULL_TREE
4660 || (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) == INTEGER_CST
4661 && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
4662 sctx.max_vf) == 1))
4664 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4665 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4666 sctx.max_vf);
4667 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4668 gimple_omp_for_set_clauses (ctx->stmt, c);
4674 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4675 both parallel and workshare constructs. PREDICATE may be NULL if it's
4676 always true. */
4678 static void
4679 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4680 omp_context *ctx)
4682 tree x, c, label = NULL, orig_clauses = clauses;
4683 bool par_clauses = false;
4684 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4686 /* Early exit if there are no lastprivate or linear clauses. */
4687 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4688 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4689 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4690 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4691 break;
4692 if (clauses == NULL)
4694 /* If this was a workshare clause, see if it had been combined
4695 with its parallel. In that case, look for the clauses on the
4696 parallel statement itself. */
4697 if (is_parallel_ctx (ctx))
4698 return;
4700 ctx = ctx->outer;
4701 if (ctx == NULL || !is_parallel_ctx (ctx))
4702 return;
4704 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4705 OMP_CLAUSE_LASTPRIVATE);
4706 if (clauses == NULL)
4707 return;
4708 par_clauses = true;
4711 bool maybe_simt = false;
4712 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4713 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4715 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4716 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4717 if (simduid)
4718 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4721 if (predicate)
4723 gcond *stmt;
4724 tree label_true, arm1, arm2;
4725 enum tree_code pred_code = TREE_CODE (predicate);
4727 label = create_artificial_label (UNKNOWN_LOCATION);
4728 label_true = create_artificial_label (UNKNOWN_LOCATION);
4729 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4731 arm1 = TREE_OPERAND (predicate, 0);
4732 arm2 = TREE_OPERAND (predicate, 1);
4733 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4734 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4736 else
4738 arm1 = predicate;
4739 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4740 arm2 = boolean_false_node;
4741 pred_code = NE_EXPR;
4743 if (maybe_simt)
4745 c = build2 (pred_code, boolean_type_node, arm1, arm2);
4746 c = fold_convert (integer_type_node, c);
4747 simtcond = create_tmp_var (integer_type_node);
4748 gimplify_assign (simtcond, c, stmt_list);
4749 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4750 1, simtcond);
4751 c = create_tmp_var (integer_type_node);
4752 gimple_call_set_lhs (g, c);
4753 gimple_seq_add_stmt (stmt_list, g);
4754 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4755 label_true, label);
4757 else
4758 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4759 gimple_seq_add_stmt (stmt_list, stmt);
4760 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4763 for (c = clauses; c ;)
4765 tree var, new_var;
4766 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4768 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4769 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4770 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4772 var = OMP_CLAUSE_DECL (c);
4773 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4774 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4775 && is_taskloop_ctx (ctx))
4777 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4778 new_var = lookup_decl (var, ctx->outer);
4780 else
4782 new_var = lookup_decl (var, ctx);
4783 /* Avoid uninitialized warnings for lastprivate and
4784 for linear iterators. */
4785 if (predicate
4786 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4787 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4788 TREE_NO_WARNING (new_var) = 1;
4791 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4793 tree val = DECL_VALUE_EXPR (new_var);
4794 if (TREE_CODE (val) == ARRAY_REF
4795 && VAR_P (TREE_OPERAND (val, 0))
4796 && lookup_attribute ("omp simd array",
4797 DECL_ATTRIBUTES (TREE_OPERAND (val,
4798 0))))
4800 if (lastlane == NULL)
4802 lastlane = create_tmp_var (unsigned_type_node);
4803 gcall *g
4804 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4805 2, simduid,
4806 TREE_OPERAND (val, 1));
4807 gimple_call_set_lhs (g, lastlane);
4808 gimple_seq_add_stmt (stmt_list, g);
4810 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4811 TREE_OPERAND (val, 0), lastlane,
4812 NULL_TREE, NULL_TREE);
4815 else if (maybe_simt)
4817 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
4818 ? DECL_VALUE_EXPR (new_var)
4819 : new_var);
4820 if (simtlast == NULL)
4822 simtlast = create_tmp_var (unsigned_type_node);
4823 gcall *g = gimple_build_call_internal
4824 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4825 gimple_call_set_lhs (g, simtlast);
4826 gimple_seq_add_stmt (stmt_list, g);
4828 x = build_call_expr_internal_loc
4829 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4830 TREE_TYPE (val), 2, val, simtlast);
4831 new_var = unshare_expr (new_var);
4832 gimplify_assign (new_var, x, stmt_list);
4833 new_var = unshare_expr (new_var);
4836 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4837 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4839 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4840 gimple_seq_add_seq (stmt_list,
4841 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4842 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4844 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4845 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4847 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4848 gimple_seq_add_seq (stmt_list,
4849 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4850 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4853 x = NULL_TREE;
4854 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4855 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4857 gcc_checking_assert (is_taskloop_ctx (ctx));
4858 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4859 ctx->outer->outer);
4860 if (is_global_var (ovar))
4861 x = ovar;
4863 if (!x)
4864 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4865 if (omp_is_reference (var))
4866 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4867 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4868 gimplify_and_add (x, stmt_list);
4870 c = OMP_CLAUSE_CHAIN (c);
4871 if (c == NULL && !par_clauses)
4873 /* If this was a workshare clause, see if it had been combined
4874 with its parallel. In that case, continue looking for the
4875 clauses also on the parallel statement itself. */
4876 if (is_parallel_ctx (ctx))
4877 break;
4879 ctx = ctx->outer;
4880 if (ctx == NULL || !is_parallel_ctx (ctx))
4881 break;
4883 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4884 OMP_CLAUSE_LASTPRIVATE);
4885 par_clauses = true;
4889 if (label)
4890 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4893 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4894 (which might be a placeholder). INNER is true if this is an inner
4895 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4896 join markers. Generate the before-loop forking sequence in
4897 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4898 general form of these sequences is
4900 GOACC_REDUCTION_SETUP
4901 GOACC_FORK
4902 GOACC_REDUCTION_INIT
4904 GOACC_REDUCTION_FINI
4905 GOACC_JOIN
4906 GOACC_REDUCTION_TEARDOWN. */
4908 static void
4909 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4910 gcall *fork, gcall *join, gimple_seq *fork_seq,
4911 gimple_seq *join_seq, omp_context *ctx)
4913 gimple_seq before_fork = NULL;
4914 gimple_seq after_fork = NULL;
4915 gimple_seq before_join = NULL;
4916 gimple_seq after_join = NULL;
4917 tree init_code = NULL_TREE, fini_code = NULL_TREE,
4918 setup_code = NULL_TREE, teardown_code = NULL_TREE;
4919 unsigned offset = 0;
4921 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4922 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4924 tree orig = OMP_CLAUSE_DECL (c);
4925 tree var = maybe_lookup_decl (orig, ctx);
4926 tree ref_to_res = NULL_TREE;
4927 tree incoming, outgoing, v1, v2, v3;
4928 bool is_private = false;
4930 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4931 if (rcode == MINUS_EXPR)
4932 rcode = PLUS_EXPR;
4933 else if (rcode == TRUTH_ANDIF_EXPR)
4934 rcode = BIT_AND_EXPR;
4935 else if (rcode == TRUTH_ORIF_EXPR)
4936 rcode = BIT_IOR_EXPR;
4937 tree op = build_int_cst (unsigned_type_node, rcode);
4939 if (!var)
4940 var = orig;
4942 incoming = outgoing = var;
4944 if (!inner)
4946 /* See if an outer construct also reduces this variable. */
4947 omp_context *outer = ctx;
4949 while (omp_context *probe = outer->outer)
4951 enum gimple_code type = gimple_code (probe->stmt);
4952 tree cls;
4954 switch (type)
4956 case GIMPLE_OMP_FOR:
4957 cls = gimple_omp_for_clauses (probe->stmt);
4958 break;
4960 case GIMPLE_OMP_TARGET:
4961 if (gimple_omp_target_kind (probe->stmt)
4962 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
4963 goto do_lookup;
4965 cls = gimple_omp_target_clauses (probe->stmt);
4966 break;
4968 default:
4969 goto do_lookup;
4972 outer = probe;
4973 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
4974 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4975 && orig == OMP_CLAUSE_DECL (cls))
4977 incoming = outgoing = lookup_decl (orig, probe);
4978 goto has_outer_reduction;
4980 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4981 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4982 && orig == OMP_CLAUSE_DECL (cls))
4984 is_private = true;
4985 goto do_lookup;
4989 do_lookup:
4990 /* This is the outermost construct with this reduction,
4991 see if there's a mapping for it. */
4992 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
4993 && maybe_lookup_field (orig, outer) && !is_private)
4995 ref_to_res = build_receiver_ref (orig, false, outer);
4996 if (omp_is_reference (orig))
4997 ref_to_res = build_simple_mem_ref (ref_to_res);
4999 tree type = TREE_TYPE (var);
5000 if (POINTER_TYPE_P (type))
5001 type = TREE_TYPE (type);
5003 outgoing = var;
5004 incoming = omp_reduction_init_op (loc, rcode, type);
5006 else
5008 /* Try to look at enclosing contexts for reduction var,
5009 use original if no mapping found. */
5010 tree t = NULL_TREE;
5011 omp_context *c = ctx->outer;
5012 while (c && !t)
5014 t = maybe_lookup_decl (orig, c);
5015 c = c->outer;
5017 incoming = outgoing = (t ? t : orig);
5020 has_outer_reduction:;
5023 if (!ref_to_res)
5024 ref_to_res = integer_zero_node;
5026 if (omp_is_reference (orig))
5028 tree type = TREE_TYPE (var);
5029 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
5031 if (!inner)
5033 tree x = create_tmp_var (TREE_TYPE (type), id);
5034 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
5037 v1 = create_tmp_var (type, id);
5038 v2 = create_tmp_var (type, id);
5039 v3 = create_tmp_var (type, id);
5041 gimplify_assign (v1, var, fork_seq);
5042 gimplify_assign (v2, var, fork_seq);
5043 gimplify_assign (v3, var, fork_seq);
5045 var = build_simple_mem_ref (var);
5046 v1 = build_simple_mem_ref (v1);
5047 v2 = build_simple_mem_ref (v2);
5048 v3 = build_simple_mem_ref (v3);
5049 outgoing = build_simple_mem_ref (outgoing);
5051 if (!TREE_CONSTANT (incoming))
5052 incoming = build_simple_mem_ref (incoming);
5054 else
5055 v1 = v2 = v3 = var;
5057 /* Determine position in reduction buffer, which may be used
5058 by target. */
5059 enum machine_mode mode = TYPE_MODE (TREE_TYPE (var));
5060 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
5061 offset = (offset + align - 1) & ~(align - 1);
5062 tree off = build_int_cst (sizetype, offset);
5063 offset += GET_MODE_SIZE (mode);
5065 if (!init_code)
5067 init_code = build_int_cst (integer_type_node,
5068 IFN_GOACC_REDUCTION_INIT);
5069 fini_code = build_int_cst (integer_type_node,
5070 IFN_GOACC_REDUCTION_FINI);
5071 setup_code = build_int_cst (integer_type_node,
5072 IFN_GOACC_REDUCTION_SETUP);
5073 teardown_code = build_int_cst (integer_type_node,
5074 IFN_GOACC_REDUCTION_TEARDOWN);
5077 tree setup_call
5078 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5079 TREE_TYPE (var), 6, setup_code,
5080 unshare_expr (ref_to_res),
5081 incoming, level, op, off);
5082 tree init_call
5083 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5084 TREE_TYPE (var), 6, init_code,
5085 unshare_expr (ref_to_res),
5086 v1, level, op, off);
5087 tree fini_call
5088 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5089 TREE_TYPE (var), 6, fini_code,
5090 unshare_expr (ref_to_res),
5091 v2, level, op, off);
5092 tree teardown_call
5093 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5094 TREE_TYPE (var), 6, teardown_code,
5095 ref_to_res, v3, level, op, off);
5097 gimplify_assign (v1, setup_call, &before_fork);
5098 gimplify_assign (v2, init_call, &after_fork);
5099 gimplify_assign (v3, fini_call, &before_join);
5100 gimplify_assign (outgoing, teardown_call, &after_join);
5103 /* Now stitch things together. */
5104 gimple_seq_add_seq (fork_seq, before_fork);
5105 if (fork)
5106 gimple_seq_add_stmt (fork_seq, fork);
5107 gimple_seq_add_seq (fork_seq, after_fork);
5109 gimple_seq_add_seq (join_seq, before_join);
5110 if (join)
5111 gimple_seq_add_stmt (join_seq, join);
5112 gimple_seq_add_seq (join_seq, after_join);
5115 /* Generate code to implement the REDUCTION clauses. */
5117 static void
5118 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5120 gimple_seq sub_seq = NULL;
5121 gimple *stmt;
5122 tree x, c;
5123 int count = 0;
5125 /* OpenACC loop reductions are handled elsewhere. */
5126 if (is_gimple_omp_oacc (ctx->stmt))
5127 return;
5129 /* SIMD reductions are handled in lower_rec_input_clauses. */
5130 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5131 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5132 return;
5134 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5135 update in that case, otherwise use a lock. */
5136 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5137 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5139 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5140 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5142 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5143 count = -1;
5144 break;
5146 count++;
5149 if (count == 0)
5150 return;
5152 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5154 tree var, ref, new_var, orig_var;
5155 enum tree_code code;
5156 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5158 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5159 continue;
5161 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
5162 orig_var = var = OMP_CLAUSE_DECL (c);
5163 if (TREE_CODE (var) == MEM_REF)
5165 var = TREE_OPERAND (var, 0);
5166 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5167 var = TREE_OPERAND (var, 0);
5168 if (TREE_CODE (var) == ADDR_EXPR)
5169 var = TREE_OPERAND (var, 0);
5170 else
5172 /* If this is a pointer or referenced based array
5173 section, the var could be private in the outer
5174 context e.g. on orphaned loop construct. Pretend this
5175 is private variable's outer reference. */
5176 ccode = OMP_CLAUSE_PRIVATE;
5177 if (TREE_CODE (var) == INDIRECT_REF)
5178 var = TREE_OPERAND (var, 0);
5180 orig_var = var;
5181 if (is_variable_sized (var))
5183 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5184 var = DECL_VALUE_EXPR (var);
5185 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5186 var = TREE_OPERAND (var, 0);
5187 gcc_assert (DECL_P (var));
5190 new_var = lookup_decl (var, ctx);
5191 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5192 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5193 ref = build_outer_var_ref (var, ctx, ccode);
5194 code = OMP_CLAUSE_REDUCTION_CODE (c);
5196 /* reduction(-:var) sums up the partial results, so it acts
5197 identically to reduction(+:var). */
5198 if (code == MINUS_EXPR)
5199 code = PLUS_EXPR;
5201 if (count == 1)
5203 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5205 addr = save_expr (addr);
5206 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5207 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5208 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5209 gimplify_and_add (x, stmt_seqp);
5210 return;
5212 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5214 tree d = OMP_CLAUSE_DECL (c);
5215 tree type = TREE_TYPE (d);
5216 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5217 tree i = create_tmp_var (TREE_TYPE (v), NULL);
5218 tree ptype = build_pointer_type (TREE_TYPE (type));
5219 tree bias = TREE_OPERAND (d, 1);
5220 d = TREE_OPERAND (d, 0);
5221 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5223 tree b = TREE_OPERAND (d, 1);
5224 b = maybe_lookup_decl (b, ctx);
5225 if (b == NULL)
5227 b = TREE_OPERAND (d, 1);
5228 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5230 if (integer_zerop (bias))
5231 bias = b;
5232 else
5234 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5235 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5236 TREE_TYPE (b), b, bias);
5238 d = TREE_OPERAND (d, 0);
5240 /* For ref build_outer_var_ref already performs this, so
5241 only new_var needs a dereference. */
5242 if (TREE_CODE (d) == INDIRECT_REF)
5244 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5245 gcc_assert (omp_is_reference (var) && var == orig_var);
5247 else if (TREE_CODE (d) == ADDR_EXPR)
5249 if (orig_var == var)
5251 new_var = build_fold_addr_expr (new_var);
5252 ref = build_fold_addr_expr (ref);
5255 else
5257 gcc_assert (orig_var == var);
5258 if (omp_is_reference (var))
5259 ref = build_fold_addr_expr (ref);
5261 if (DECL_P (v))
5263 tree t = maybe_lookup_decl (v, ctx);
5264 if (t)
5265 v = t;
5266 else
5267 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5268 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5270 if (!integer_zerop (bias))
5272 bias = fold_convert_loc (clause_loc, sizetype, bias);
5273 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5274 TREE_TYPE (new_var), new_var,
5275 unshare_expr (bias));
5276 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5277 TREE_TYPE (ref), ref, bias);
5279 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5280 ref = fold_convert_loc (clause_loc, ptype, ref);
5281 tree m = create_tmp_var (ptype, NULL);
5282 gimplify_assign (m, new_var, stmt_seqp);
5283 new_var = m;
5284 m = create_tmp_var (ptype, NULL);
5285 gimplify_assign (m, ref, stmt_seqp);
5286 ref = m;
5287 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5288 tree body = create_artificial_label (UNKNOWN_LOCATION);
5289 tree end = create_artificial_label (UNKNOWN_LOCATION);
5290 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5291 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5292 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5293 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5295 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5296 tree decl_placeholder
5297 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5298 SET_DECL_VALUE_EXPR (placeholder, out);
5299 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5300 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5301 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5302 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5303 gimple_seq_add_seq (&sub_seq,
5304 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5305 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5306 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5307 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5309 else
5311 x = build2 (code, TREE_TYPE (out), out, priv);
5312 out = unshare_expr (out);
5313 gimplify_assign (out, x, &sub_seq);
5315 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5316 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5317 gimple_seq_add_stmt (&sub_seq, g);
5318 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5319 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5320 gimple_seq_add_stmt (&sub_seq, g);
5321 g = gimple_build_assign (i, PLUS_EXPR, i,
5322 build_int_cst (TREE_TYPE (i), 1));
5323 gimple_seq_add_stmt (&sub_seq, g);
5324 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5325 gimple_seq_add_stmt (&sub_seq, g);
5326 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5328 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5330 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5332 if (omp_is_reference (var)
5333 && !useless_type_conversion_p (TREE_TYPE (placeholder),
5334 TREE_TYPE (ref)))
5335 ref = build_fold_addr_expr_loc (clause_loc, ref);
5336 SET_DECL_VALUE_EXPR (placeholder, ref);
5337 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5338 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5339 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5340 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5341 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5343 else
5345 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5346 ref = build_outer_var_ref (var, ctx);
5347 gimplify_assign (ref, x, &sub_seq);
5351 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5353 gimple_seq_add_stmt (stmt_seqp, stmt);
5355 gimple_seq_add_seq (stmt_seqp, sub_seq);
5357 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5359 gimple_seq_add_stmt (stmt_seqp, stmt);
5363 /* Generate code to implement the COPYPRIVATE clauses. */
5365 static void
5366 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5367 omp_context *ctx)
5369 tree c;
5371 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5373 tree var, new_var, ref, x;
5374 bool by_ref;
5375 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5377 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5378 continue;
5380 var = OMP_CLAUSE_DECL (c);
5381 by_ref = use_pointer_for_field (var, NULL);
5383 ref = build_sender_ref (var, ctx);
5384 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5385 if (by_ref)
5387 x = build_fold_addr_expr_loc (clause_loc, new_var);
5388 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5390 gimplify_assign (ref, x, slist);
5392 ref = build_receiver_ref (var, false, ctx);
5393 if (by_ref)
5395 ref = fold_convert_loc (clause_loc,
5396 build_pointer_type (TREE_TYPE (new_var)),
5397 ref);
5398 ref = build_fold_indirect_ref_loc (clause_loc, ref);
5400 if (omp_is_reference (var))
5402 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5403 ref = build_simple_mem_ref_loc (clause_loc, ref);
5404 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5406 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5407 gimplify_and_add (x, rlist);
5412 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5413 and REDUCTION from the sender (aka parent) side. */
5415 static void
5416 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5417 omp_context *ctx)
5419 tree c, t;
5420 int ignored_looptemp = 0;
5421 bool is_taskloop = false;
5423 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5424 by GOMP_taskloop. */
5425 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5427 ignored_looptemp = 2;
5428 is_taskloop = true;
5431 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5433 tree val, ref, x, var;
5434 bool by_ref, do_in = false, do_out = false;
5435 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5437 switch (OMP_CLAUSE_CODE (c))
5439 case OMP_CLAUSE_PRIVATE:
5440 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5441 break;
5442 continue;
5443 case OMP_CLAUSE_FIRSTPRIVATE:
5444 case OMP_CLAUSE_COPYIN:
5445 case OMP_CLAUSE_LASTPRIVATE:
5446 case OMP_CLAUSE_REDUCTION:
5447 break;
5448 case OMP_CLAUSE_SHARED:
5449 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5450 break;
5451 continue;
5452 case OMP_CLAUSE__LOOPTEMP_:
5453 if (ignored_looptemp)
5455 ignored_looptemp--;
5456 continue;
5458 break;
5459 default:
5460 continue;
5463 val = OMP_CLAUSE_DECL (c);
5464 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5465 && TREE_CODE (val) == MEM_REF)
5467 val = TREE_OPERAND (val, 0);
5468 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5469 val = TREE_OPERAND (val, 0);
5470 if (TREE_CODE (val) == INDIRECT_REF
5471 || TREE_CODE (val) == ADDR_EXPR)
5472 val = TREE_OPERAND (val, 0);
5473 if (is_variable_sized (val))
5474 continue;
5477 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5478 outer taskloop region. */
5479 omp_context *ctx_for_o = ctx;
5480 if (is_taskloop
5481 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5482 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5483 ctx_for_o = ctx->outer;
5485 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5487 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5488 && is_global_var (var))
5489 continue;
5491 t = omp_member_access_dummy_var (var);
5492 if (t)
5494 var = DECL_VALUE_EXPR (var);
5495 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5496 if (o != t)
5497 var = unshare_and_remap (var, t, o);
5498 else
5499 var = unshare_expr (var);
5502 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5504 /* Handle taskloop firstprivate/lastprivate, where the
5505 lastprivate on GIMPLE_OMP_TASK is represented as
5506 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5507 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5508 x = omp_build_component_ref (ctx->sender_decl, f);
5509 if (use_pointer_for_field (val, ctx))
5510 var = build_fold_addr_expr (var);
5511 gimplify_assign (x, var, ilist);
5512 DECL_ABSTRACT_ORIGIN (f) = NULL;
5513 continue;
5516 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5517 || val == OMP_CLAUSE_DECL (c))
5518 && is_variable_sized (val))
5519 continue;
5520 by_ref = use_pointer_for_field (val, NULL);
5522 switch (OMP_CLAUSE_CODE (c))
5524 case OMP_CLAUSE_FIRSTPRIVATE:
5525 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5526 && !by_ref
5527 && is_task_ctx (ctx))
5528 TREE_NO_WARNING (var) = 1;
5529 do_in = true;
5530 break;
5532 case OMP_CLAUSE_PRIVATE:
5533 case OMP_CLAUSE_COPYIN:
5534 case OMP_CLAUSE__LOOPTEMP_:
5535 do_in = true;
5536 break;
5538 case OMP_CLAUSE_LASTPRIVATE:
5539 if (by_ref || omp_is_reference (val))
5541 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5542 continue;
5543 do_in = true;
5545 else
5547 do_out = true;
5548 if (lang_hooks.decls.omp_private_outer_ref (val))
5549 do_in = true;
5551 break;
5553 case OMP_CLAUSE_REDUCTION:
5554 do_in = true;
5555 if (val == OMP_CLAUSE_DECL (c))
5556 do_out = !(by_ref || omp_is_reference (val));
5557 else
5558 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5559 break;
5561 default:
5562 gcc_unreachable ();
5565 if (do_in)
5567 ref = build_sender_ref (val, ctx);
5568 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5569 gimplify_assign (ref, x, ilist);
5570 if (is_task_ctx (ctx))
5571 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5574 if (do_out)
5576 ref = build_sender_ref (val, ctx);
5577 gimplify_assign (var, ref, olist);
5582 /* Generate code to implement SHARED from the sender (aka parent)
5583 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5584 list things that got automatically shared. */
5586 static void
5587 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5589 tree var, ovar, nvar, t, f, x, record_type;
5591 if (ctx->record_type == NULL)
5592 return;
5594 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5595 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5597 ovar = DECL_ABSTRACT_ORIGIN (f);
5598 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5599 continue;
5601 nvar = maybe_lookup_decl (ovar, ctx);
5602 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5603 continue;
5605 /* If CTX is a nested parallel directive. Find the immediately
5606 enclosing parallel or workshare construct that contains a
5607 mapping for OVAR. */
5608 var = lookup_decl_in_outer_ctx (ovar, ctx);
5610 t = omp_member_access_dummy_var (var);
5611 if (t)
5613 var = DECL_VALUE_EXPR (var);
5614 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5615 if (o != t)
5616 var = unshare_and_remap (var, t, o);
5617 else
5618 var = unshare_expr (var);
5621 if (use_pointer_for_field (ovar, ctx))
5623 x = build_sender_ref (ovar, ctx);
5624 var = build_fold_addr_expr (var);
5625 gimplify_assign (x, var, ilist);
5627 else
5629 x = build_sender_ref (ovar, ctx);
5630 gimplify_assign (x, var, ilist);
5632 if (!TREE_READONLY (var)
5633 /* We don't need to receive a new reference to a result
5634 or parm decl. In fact we may not store to it as we will
5635 invalidate any pending RSO and generate wrong gimple
5636 during inlining. */
5637 && !((TREE_CODE (var) == RESULT_DECL
5638 || TREE_CODE (var) == PARM_DECL)
5639 && DECL_BY_REFERENCE (var)))
5641 x = build_sender_ref (ovar, ctx);
5642 gimplify_assign (var, x, olist);
5648 /* Emit an OpenACC head marker call, encapulating the partitioning and
5649 other information that must be processed by the target compiler.
5650 Return the maximum number of dimensions the associated loop might
5651 be partitioned over. */
5653 static unsigned
5654 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5655 gimple_seq *seq, omp_context *ctx)
5657 unsigned levels = 0;
5658 unsigned tag = 0;
5659 tree gang_static = NULL_TREE;
5660 auto_vec<tree, 5> args;
5662 args.quick_push (build_int_cst
5663 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5664 args.quick_push (ddvar);
5665 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5667 switch (OMP_CLAUSE_CODE (c))
5669 case OMP_CLAUSE_GANG:
5670 tag |= OLF_DIM_GANG;
5671 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5672 /* static:* is represented by -1, and we can ignore it, as
5673 scheduling is always static. */
5674 if (gang_static && integer_minus_onep (gang_static))
5675 gang_static = NULL_TREE;
5676 levels++;
5677 break;
5679 case OMP_CLAUSE_WORKER:
5680 tag |= OLF_DIM_WORKER;
5681 levels++;
5682 break;
5684 case OMP_CLAUSE_VECTOR:
5685 tag |= OLF_DIM_VECTOR;
5686 levels++;
5687 break;
5689 case OMP_CLAUSE_SEQ:
5690 tag |= OLF_SEQ;
5691 break;
5693 case OMP_CLAUSE_AUTO:
5694 tag |= OLF_AUTO;
5695 break;
5697 case OMP_CLAUSE_INDEPENDENT:
5698 tag |= OLF_INDEPENDENT;
5699 break;
5701 case OMP_CLAUSE_TILE:
5702 tag |= OLF_TILE;
5703 break;
5705 default:
5706 continue;
5710 if (gang_static)
5712 if (DECL_P (gang_static))
5713 gang_static = build_outer_var_ref (gang_static, ctx);
5714 tag |= OLF_GANG_STATIC;
5717 /* In a parallel region, loops are implicitly INDEPENDENT. */
5718 omp_context *tgt = enclosing_target_ctx (ctx);
5719 if (!tgt || is_oacc_parallel (tgt))
5720 tag |= OLF_INDEPENDENT;
5722 if (tag & OLF_TILE)
5723 /* Tiling could use all 3 levels. */
5724 levels = 3;
5725 else
5727 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5728 Ensure at least one level, or 2 for possible auto
5729 partitioning */
5730 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5731 << OLF_DIM_BASE) | OLF_SEQ));
5733 if (levels < 1u + maybe_auto)
5734 levels = 1u + maybe_auto;
5737 args.quick_push (build_int_cst (integer_type_node, levels));
5738 args.quick_push (build_int_cst (integer_type_node, tag));
5739 if (gang_static)
5740 args.quick_push (gang_static);
5742 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5743 gimple_set_location (call, loc);
5744 gimple_set_lhs (call, ddvar);
5745 gimple_seq_add_stmt (seq, call);
5747 return levels;
5750 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5751 partitioning level of the enclosed region. */
5753 static void
5754 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5755 tree tofollow, gimple_seq *seq)
5757 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5758 : IFN_UNIQUE_OACC_TAIL_MARK);
5759 tree marker = build_int_cst (integer_type_node, marker_kind);
5760 int nargs = 2 + (tofollow != NULL_TREE);
5761 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5762 marker, ddvar, tofollow);
5763 gimple_set_location (call, loc);
5764 gimple_set_lhs (call, ddvar);
5765 gimple_seq_add_stmt (seq, call);
5768 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5769 the loop clauses, from which we extract reductions. Initialize
5770 HEAD and TAIL. */
5772 static void
5773 lower_oacc_head_tail (location_t loc, tree clauses,
5774 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5776 bool inner = false;
5777 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5778 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5780 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5781 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5782 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5784 gcc_assert (count);
5785 for (unsigned done = 1; count; count--, done++)
5787 gimple_seq fork_seq = NULL;
5788 gimple_seq join_seq = NULL;
5790 tree place = build_int_cst (integer_type_node, -1);
5791 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5792 fork_kind, ddvar, place);
5793 gimple_set_location (fork, loc);
5794 gimple_set_lhs (fork, ddvar);
5796 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5797 join_kind, ddvar, place);
5798 gimple_set_location (join, loc);
5799 gimple_set_lhs (join, ddvar);
5801 /* Mark the beginning of this level sequence. */
5802 if (inner)
5803 lower_oacc_loop_marker (loc, ddvar, true,
5804 build_int_cst (integer_type_node, count),
5805 &fork_seq);
5806 lower_oacc_loop_marker (loc, ddvar, false,
5807 build_int_cst (integer_type_node, done),
5808 &join_seq);
5810 lower_oacc_reductions (loc, clauses, place, inner,
5811 fork, join, &fork_seq, &join_seq, ctx);
5813 /* Append this level to head. */
5814 gimple_seq_add_seq (head, fork_seq);
5815 /* Prepend it to tail. */
5816 gimple_seq_add_seq (&join_seq, *tail);
5817 *tail = join_seq;
5819 inner = true;
5822 /* Mark the end of the sequence. */
5823 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5824 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5827 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5828 catch handler and return it. This prevents programs from violating the
5829 structured block semantics with throws. */
5831 static gimple_seq
5832 maybe_catch_exception (gimple_seq body)
5834 gimple *g;
5835 tree decl;
5837 if (!flag_exceptions)
5838 return body;
5840 if (lang_hooks.eh_protect_cleanup_actions != NULL)
5841 decl = lang_hooks.eh_protect_cleanup_actions ();
5842 else
5843 decl = builtin_decl_explicit (BUILT_IN_TRAP);
5845 g = gimple_build_eh_must_not_throw (decl);
5846 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5847 GIMPLE_TRY_CATCH);
5849 return gimple_seq_alloc_with_stmt (g);
5853 /* Routines to lower OMP directives into OMP-GIMPLE. */
5855 /* If ctx is a worksharing context inside of a cancellable parallel
5856 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5857 and conditional branch to parallel's cancel_label to handle
5858 cancellation in the implicit barrier. */
5860 static void
5861 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5863 gimple *omp_return = gimple_seq_last_stmt (*body);
5864 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5865 if (gimple_omp_return_nowait_p (omp_return))
5866 return;
5867 if (ctx->outer
5868 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5869 && ctx->outer->cancellable)
5871 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5872 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5873 tree lhs = create_tmp_var (c_bool_type);
5874 gimple_omp_return_set_lhs (omp_return, lhs);
5875 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5876 gimple *g = gimple_build_cond (NE_EXPR, lhs,
5877 fold_convert (c_bool_type,
5878 boolean_false_node),
5879 ctx->outer->cancel_label, fallthru_label);
5880 gimple_seq_add_stmt (body, g);
5881 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5885 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5886 CTX is the enclosing OMP context for the current statement. */
5888 static void
5889 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5891 tree block, control;
5892 gimple_stmt_iterator tgsi;
5893 gomp_sections *stmt;
5894 gimple *t;
5895 gbind *new_stmt, *bind;
5896 gimple_seq ilist, dlist, olist, new_body;
5898 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5900 push_gimplify_context ();
5902 dlist = NULL;
5903 ilist = NULL;
5904 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5905 &ilist, &dlist, ctx, NULL);
5907 new_body = gimple_omp_body (stmt);
5908 gimple_omp_set_body (stmt, NULL);
5909 tgsi = gsi_start (new_body);
5910 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5912 omp_context *sctx;
5913 gimple *sec_start;
5915 sec_start = gsi_stmt (tgsi);
5916 sctx = maybe_lookup_ctx (sec_start);
5917 gcc_assert (sctx);
5919 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5920 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5921 GSI_CONTINUE_LINKING);
5922 gimple_omp_set_body (sec_start, NULL);
5924 if (gsi_one_before_end_p (tgsi))
5926 gimple_seq l = NULL;
5927 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5928 &l, ctx);
5929 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5930 gimple_omp_section_set_last (sec_start);
5933 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5934 GSI_CONTINUE_LINKING);
5937 block = make_node (BLOCK);
5938 bind = gimple_build_bind (NULL, new_body, block);
5940 olist = NULL;
5941 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
5943 block = make_node (BLOCK);
5944 new_stmt = gimple_build_bind (NULL, NULL, block);
5945 gsi_replace (gsi_p, new_stmt, true);
5947 pop_gimplify_context (new_stmt);
5948 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5949 BLOCK_VARS (block) = gimple_bind_vars (bind);
5950 if (BLOCK_VARS (block))
5951 TREE_USED (block) = 1;
5953 new_body = NULL;
5954 gimple_seq_add_seq (&new_body, ilist);
5955 gimple_seq_add_stmt (&new_body, stmt);
5956 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5957 gimple_seq_add_stmt (&new_body, bind);
5959 control = create_tmp_var (unsigned_type_node, ".section");
5960 t = gimple_build_omp_continue (control, control);
5961 gimple_omp_sections_set_control (stmt, control);
5962 gimple_seq_add_stmt (&new_body, t);
5964 gimple_seq_add_seq (&new_body, olist);
5965 if (ctx->cancellable)
5966 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5967 gimple_seq_add_seq (&new_body, dlist);
5969 new_body = maybe_catch_exception (new_body);
5971 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5972 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5973 t = gimple_build_omp_return (nowait);
5974 gimple_seq_add_stmt (&new_body, t);
5975 maybe_add_implicit_barrier_cancel (ctx, &new_body);
5977 gimple_bind_set_body (new_stmt, new_body);
5981 /* A subroutine of lower_omp_single. Expand the simple form of
5982 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5984 if (GOMP_single_start ())
5985 BODY;
5986 [ GOMP_barrier (); ] -> unless 'nowait' is present.
5988 FIXME. It may be better to delay expanding the logic of this until
5989 pass_expand_omp. The expanded logic may make the job more difficult
5990 to a synchronization analysis pass. */
5992 static void
5993 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
5995 location_t loc = gimple_location (single_stmt);
5996 tree tlabel = create_artificial_label (loc);
5997 tree flabel = create_artificial_label (loc);
5998 gimple *call, *cond;
5999 tree lhs, decl;
6001 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
6002 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
6003 call = gimple_build_call (decl, 0);
6004 gimple_call_set_lhs (call, lhs);
6005 gimple_seq_add_stmt (pre_p, call);
6007 cond = gimple_build_cond (EQ_EXPR, lhs,
6008 fold_convert_loc (loc, TREE_TYPE (lhs),
6009 boolean_true_node),
6010 tlabel, flabel);
6011 gimple_seq_add_stmt (pre_p, cond);
6012 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
6013 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6014 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
6018 /* A subroutine of lower_omp_single. Expand the simple form of
6019 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
6021 #pragma omp single copyprivate (a, b, c)
6023 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
6026 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6028 BODY;
6029 copyout.a = a;
6030 copyout.b = b;
6031 copyout.c = c;
6032 GOMP_single_copy_end (&copyout);
6034 else
6036 a = copyout_p->a;
6037 b = copyout_p->b;
6038 c = copyout_p->c;
6040 GOMP_barrier ();
6043 FIXME. It may be better to delay expanding the logic of this until
6044 pass_expand_omp. The expanded logic may make the job more difficult
6045 to a synchronization analysis pass. */
6047 static void
6048 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
6049 omp_context *ctx)
6051 tree ptr_type, t, l0, l1, l2, bfn_decl;
6052 gimple_seq copyin_seq;
6053 location_t loc = gimple_location (single_stmt);
6055 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
6057 ptr_type = build_pointer_type (ctx->record_type);
6058 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
6060 l0 = create_artificial_label (loc);
6061 l1 = create_artificial_label (loc);
6062 l2 = create_artificial_label (loc);
6064 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6065 t = build_call_expr_loc (loc, bfn_decl, 0);
6066 t = fold_convert_loc (loc, ptr_type, t);
6067 gimplify_assign (ctx->receiver_decl, t, pre_p);
6069 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6070 build_int_cst (ptr_type, 0));
6071 t = build3 (COND_EXPR, void_type_node, t,
6072 build_and_jump (&l0), build_and_jump (&l1));
6073 gimplify_and_add (t, pre_p);
6075 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6077 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6079 copyin_seq = NULL;
6080 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6081 &copyin_seq, ctx);
6083 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6084 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6085 t = build_call_expr_loc (loc, bfn_decl, 1, t);
6086 gimplify_and_add (t, pre_p);
6088 t = build_and_jump (&l2);
6089 gimplify_and_add (t, pre_p);
6091 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6093 gimple_seq_add_seq (pre_p, copyin_seq);
6095 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6099 /* Expand code for an OpenMP single directive. */
6101 static void
6102 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6104 tree block;
6105 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6106 gbind *bind;
6107 gimple_seq bind_body, bind_body_tail = NULL, dlist;
6109 push_gimplify_context ();
6111 block = make_node (BLOCK);
6112 bind = gimple_build_bind (NULL, NULL, block);
6113 gsi_replace (gsi_p, bind, true);
6114 bind_body = NULL;
6115 dlist = NULL;
6116 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6117 &bind_body, &dlist, ctx, NULL);
6118 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6120 gimple_seq_add_stmt (&bind_body, single_stmt);
6122 if (ctx->record_type)
6123 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6124 else
6125 lower_omp_single_simple (single_stmt, &bind_body);
6127 gimple_omp_set_body (single_stmt, NULL);
6129 gimple_seq_add_seq (&bind_body, dlist);
6131 bind_body = maybe_catch_exception (bind_body);
6133 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6134 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6135 gimple *g = gimple_build_omp_return (nowait);
6136 gimple_seq_add_stmt (&bind_body_tail, g);
6137 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6138 if (ctx->record_type)
6140 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6141 tree clobber = build_constructor (ctx->record_type, NULL);
6142 TREE_THIS_VOLATILE (clobber) = 1;
6143 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6144 clobber), GSI_SAME_STMT);
6146 gimple_seq_add_seq (&bind_body, bind_body_tail);
6147 gimple_bind_set_body (bind, bind_body);
6149 pop_gimplify_context (bind);
6151 gimple_bind_append_vars (bind, ctx->block_vars);
6152 BLOCK_VARS (block) = ctx->block_vars;
6153 if (BLOCK_VARS (block))
6154 TREE_USED (block) = 1;
6158 /* Expand code for an OpenMP master directive. */
6160 static void
6161 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6163 tree block, lab = NULL, x, bfn_decl;
6164 gimple *stmt = gsi_stmt (*gsi_p);
6165 gbind *bind;
6166 location_t loc = gimple_location (stmt);
6167 gimple_seq tseq;
6169 push_gimplify_context ();
6171 block = make_node (BLOCK);
6172 bind = gimple_build_bind (NULL, NULL, block);
6173 gsi_replace (gsi_p, bind, true);
6174 gimple_bind_add_stmt (bind, stmt);
6176 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6177 x = build_call_expr_loc (loc, bfn_decl, 0);
6178 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6179 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6180 tseq = NULL;
6181 gimplify_and_add (x, &tseq);
6182 gimple_bind_add_seq (bind, tseq);
6184 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6185 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6186 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6187 gimple_omp_set_body (stmt, NULL);
6189 gimple_bind_add_stmt (bind, gimple_build_label (lab));
6191 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6193 pop_gimplify_context (bind);
6195 gimple_bind_append_vars (bind, ctx->block_vars);
6196 BLOCK_VARS (block) = ctx->block_vars;
6200 /* Expand code for an OpenMP taskgroup directive. */
6202 static void
6203 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6205 gimple *stmt = gsi_stmt (*gsi_p);
6206 gcall *x;
6207 gbind *bind;
6208 tree block = make_node (BLOCK);
6210 bind = gimple_build_bind (NULL, NULL, block);
6211 gsi_replace (gsi_p, bind, true);
6212 gimple_bind_add_stmt (bind, stmt);
6214 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6216 gimple_bind_add_stmt (bind, x);
6218 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6219 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6220 gimple_omp_set_body (stmt, NULL);
6222 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6224 gimple_bind_append_vars (bind, ctx->block_vars);
6225 BLOCK_VARS (block) = ctx->block_vars;
6229 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6231 static void
6232 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6233 omp_context *ctx)
6235 struct omp_for_data fd;
6236 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6237 return;
6239 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6240 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6241 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6242 if (!fd.ordered)
6243 return;
6245 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6246 tree c = gimple_omp_ordered_clauses (ord_stmt);
6247 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6248 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6250 /* Merge depend clauses from multiple adjacent
6251 #pragma omp ordered depend(sink:...) constructs
6252 into one #pragma omp ordered depend(sink:...), so that
6253 we can optimize them together. */
6254 gimple_stmt_iterator gsi = *gsi_p;
6255 gsi_next (&gsi);
6256 while (!gsi_end_p (gsi))
6258 gimple *stmt = gsi_stmt (gsi);
6259 if (is_gimple_debug (stmt)
6260 || gimple_code (stmt) == GIMPLE_NOP)
6262 gsi_next (&gsi);
6263 continue;
6265 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6266 break;
6267 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6268 c = gimple_omp_ordered_clauses (ord_stmt2);
6269 if (c == NULL_TREE
6270 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6271 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6272 break;
6273 while (*list_p)
6274 list_p = &OMP_CLAUSE_CHAIN (*list_p);
6275 *list_p = c;
6276 gsi_remove (&gsi, true);
6280 /* Canonicalize sink dependence clauses into one folded clause if
6281 possible.
6283 The basic algorithm is to create a sink vector whose first
6284 element is the GCD of all the first elements, and whose remaining
6285 elements are the minimum of the subsequent columns.
6287 We ignore dependence vectors whose first element is zero because
6288 such dependencies are known to be executed by the same thread.
6290 We take into account the direction of the loop, so a minimum
6291 becomes a maximum if the loop is iterating forwards. We also
6292 ignore sink clauses where the loop direction is unknown, or where
6293 the offsets are clearly invalid because they are not a multiple
6294 of the loop increment.
6296 For example:
6298 #pragma omp for ordered(2)
6299 for (i=0; i < N; ++i)
6300 for (j=0; j < M; ++j)
6302 #pragma omp ordered \
6303 depend(sink:i-8,j-2) \
6304 depend(sink:i,j-1) \ // Completely ignored because i+0.
6305 depend(sink:i-4,j-3) \
6306 depend(sink:i-6,j-4)
6307 #pragma omp ordered depend(source)
6310 Folded clause is:
6312 depend(sink:-gcd(8,4,6),-min(2,3,4))
6313 -or-
6314 depend(sink:-2,-2)
6317 /* FIXME: Computing GCD's where the first element is zero is
6318 non-trivial in the presence of collapsed loops. Do this later. */
6319 if (fd.collapse > 1)
6320 return;
6322 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6324 /* wide_int is not a POD so it must be default-constructed. */
6325 for (unsigned i = 0; i != 2 * len - 1; ++i)
6326 new (static_cast<void*>(folded_deps + i)) wide_int ();
6328 tree folded_dep = NULL_TREE;
6329 /* TRUE if the first dimension's offset is negative. */
6330 bool neg_offset_p = false;
6332 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6333 unsigned int i;
6334 while ((c = *list_p) != NULL)
6336 bool remove = false;
6338 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6339 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6340 goto next_ordered_clause;
6342 tree vec;
6343 for (vec = OMP_CLAUSE_DECL (c), i = 0;
6344 vec && TREE_CODE (vec) == TREE_LIST;
6345 vec = TREE_CHAIN (vec), ++i)
6347 gcc_assert (i < len);
6349 /* omp_extract_for_data has canonicalized the condition. */
6350 gcc_assert (fd.loops[i].cond_code == LT_EXPR
6351 || fd.loops[i].cond_code == GT_EXPR);
6352 bool forward = fd.loops[i].cond_code == LT_EXPR;
6353 bool maybe_lexically_later = true;
6355 /* While the committee makes up its mind, bail if we have any
6356 non-constant steps. */
6357 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6358 goto lower_omp_ordered_ret;
6360 tree itype = TREE_TYPE (TREE_VALUE (vec));
6361 if (POINTER_TYPE_P (itype))
6362 itype = sizetype;
6363 wide_int offset = wide_int::from (TREE_PURPOSE (vec),
6364 TYPE_PRECISION (itype),
6365 TYPE_SIGN (itype));
6367 /* Ignore invalid offsets that are not multiples of the step. */
6368 if (!wi::multiple_of_p
6369 (wi::abs (offset), wi::abs ((wide_int) fd.loops[i].step),
6370 UNSIGNED))
6372 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6373 "ignoring sink clause with offset that is not "
6374 "a multiple of the loop step");
6375 remove = true;
6376 goto next_ordered_clause;
6379 /* Calculate the first dimension. The first dimension of
6380 the folded dependency vector is the GCD of the first
6381 elements, while ignoring any first elements whose offset
6382 is 0. */
6383 if (i == 0)
6385 /* Ignore dependence vectors whose first dimension is 0. */
6386 if (offset == 0)
6388 remove = true;
6389 goto next_ordered_clause;
6391 else
6393 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6395 error_at (OMP_CLAUSE_LOCATION (c),
6396 "first offset must be in opposite direction "
6397 "of loop iterations");
6398 goto lower_omp_ordered_ret;
6400 if (forward)
6401 offset = -offset;
6402 neg_offset_p = forward;
6403 /* Initialize the first time around. */
6404 if (folded_dep == NULL_TREE)
6406 folded_dep = c;
6407 folded_deps[0] = offset;
6409 else
6410 folded_deps[0] = wi::gcd (folded_deps[0],
6411 offset, UNSIGNED);
6414 /* Calculate minimum for the remaining dimensions. */
6415 else
6417 folded_deps[len + i - 1] = offset;
6418 if (folded_dep == c)
6419 folded_deps[i] = offset;
6420 else if (maybe_lexically_later
6421 && !wi::eq_p (folded_deps[i], offset))
6423 if (forward ^ wi::gts_p (folded_deps[i], offset))
6425 unsigned int j;
6426 folded_dep = c;
6427 for (j = 1; j <= i; j++)
6428 folded_deps[j] = folded_deps[len + j - 1];
6430 else
6431 maybe_lexically_later = false;
6435 gcc_assert (i == len);
6437 remove = true;
6439 next_ordered_clause:
6440 if (remove)
6441 *list_p = OMP_CLAUSE_CHAIN (c);
6442 else
6443 list_p = &OMP_CLAUSE_CHAIN (c);
6446 if (folded_dep)
6448 if (neg_offset_p)
6449 folded_deps[0] = -folded_deps[0];
6451 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6452 if (POINTER_TYPE_P (itype))
6453 itype = sizetype;
6455 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6456 = wide_int_to_tree (itype, folded_deps[0]);
6457 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6458 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6461 lower_omp_ordered_ret:
6463 /* Ordered without clauses is #pragma omp threads, while we want
6464 a nop instead if we remove all clauses. */
6465 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6466 gsi_replace (gsi_p, gimple_build_nop (), true);
6470 /* Expand code for an OpenMP ordered directive. */
6472 static void
6473 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6475 tree block;
6476 gimple *stmt = gsi_stmt (*gsi_p), *g;
6477 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6478 gcall *x;
6479 gbind *bind;
6480 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6481 OMP_CLAUSE_SIMD);
6482 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6483 loop. */
6484 bool maybe_simt
6485 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6486 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6487 OMP_CLAUSE_THREADS);
6489 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6490 OMP_CLAUSE_DEPEND))
6492 /* FIXME: This is needs to be moved to the expansion to verify various
6493 conditions only testable on cfg with dominators computed, and also
6494 all the depend clauses to be merged still might need to be available
6495 for the runtime checks. */
6496 if (0)
6497 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6498 return;
6501 push_gimplify_context ();
6503 block = make_node (BLOCK);
6504 bind = gimple_build_bind (NULL, NULL, block);
6505 gsi_replace (gsi_p, bind, true);
6506 gimple_bind_add_stmt (bind, stmt);
6508 if (simd)
6510 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6511 build_int_cst (NULL_TREE, threads));
6512 cfun->has_simduid_loops = true;
6514 else
6515 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6517 gimple_bind_add_stmt (bind, x);
6519 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6520 if (maybe_simt)
6522 counter = create_tmp_var (integer_type_node);
6523 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6524 gimple_call_set_lhs (g, counter);
6525 gimple_bind_add_stmt (bind, g);
6527 body = create_artificial_label (UNKNOWN_LOCATION);
6528 test = create_artificial_label (UNKNOWN_LOCATION);
6529 gimple_bind_add_stmt (bind, gimple_build_label (body));
6531 tree simt_pred = create_tmp_var (integer_type_node);
6532 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6533 gimple_call_set_lhs (g, simt_pred);
6534 gimple_bind_add_stmt (bind, g);
6536 tree t = create_artificial_label (UNKNOWN_LOCATION);
6537 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6538 gimple_bind_add_stmt (bind, g);
6540 gimple_bind_add_stmt (bind, gimple_build_label (t));
6542 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6543 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6544 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6545 gimple_omp_set_body (stmt, NULL);
6547 if (maybe_simt)
6549 gimple_bind_add_stmt (bind, gimple_build_label (test));
6550 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6551 gimple_bind_add_stmt (bind, g);
6553 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6554 tree nonneg = create_tmp_var (integer_type_node);
6555 gimple_seq tseq = NULL;
6556 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6557 gimple_bind_add_seq (bind, tseq);
6559 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6560 gimple_call_set_lhs (g, nonneg);
6561 gimple_bind_add_stmt (bind, g);
6563 tree end = create_artificial_label (UNKNOWN_LOCATION);
6564 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6565 gimple_bind_add_stmt (bind, g);
6567 gimple_bind_add_stmt (bind, gimple_build_label (end));
6569 if (simd)
6570 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6571 build_int_cst (NULL_TREE, threads));
6572 else
6573 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6575 gimple_bind_add_stmt (bind, x);
6577 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6579 pop_gimplify_context (bind);
6581 gimple_bind_append_vars (bind, ctx->block_vars);
6582 BLOCK_VARS (block) = gimple_bind_vars (bind);
6586 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6587 substitution of a couple of function calls. But in the NAMED case,
6588 requires that languages coordinate a symbol name. It is therefore
6589 best put here in common code. */
6591 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6593 static void
6594 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6596 tree block;
6597 tree name, lock, unlock;
6598 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6599 gbind *bind;
6600 location_t loc = gimple_location (stmt);
6601 gimple_seq tbody;
6603 name = gimple_omp_critical_name (stmt);
6604 if (name)
6606 tree decl;
6608 if (!critical_name_mutexes)
6609 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6611 tree *n = critical_name_mutexes->get (name);
6612 if (n == NULL)
6614 char *new_str;
6616 decl = create_tmp_var_raw (ptr_type_node);
6618 new_str = ACONCAT ((".gomp_critical_user_",
6619 IDENTIFIER_POINTER (name), NULL));
6620 DECL_NAME (decl) = get_identifier (new_str);
6621 TREE_PUBLIC (decl) = 1;
6622 TREE_STATIC (decl) = 1;
6623 DECL_COMMON (decl) = 1;
6624 DECL_ARTIFICIAL (decl) = 1;
6625 DECL_IGNORED_P (decl) = 1;
6627 varpool_node::finalize_decl (decl);
6629 critical_name_mutexes->put (name, decl);
6631 else
6632 decl = *n;
6634 /* If '#pragma omp critical' is inside offloaded region or
6635 inside function marked as offloadable, the symbol must be
6636 marked as offloadable too. */
6637 omp_context *octx;
6638 if (cgraph_node::get (current_function_decl)->offloadable)
6639 varpool_node::get_create (decl)->offloadable = 1;
6640 else
6641 for (octx = ctx->outer; octx; octx = octx->outer)
6642 if (is_gimple_omp_offloaded (octx->stmt))
6644 varpool_node::get_create (decl)->offloadable = 1;
6645 break;
6648 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6649 lock = build_call_expr_loc (loc, lock, 1,
6650 build_fold_addr_expr_loc (loc, decl));
6652 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6653 unlock = build_call_expr_loc (loc, unlock, 1,
6654 build_fold_addr_expr_loc (loc, decl));
6656 else
6658 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6659 lock = build_call_expr_loc (loc, lock, 0);
6661 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6662 unlock = build_call_expr_loc (loc, unlock, 0);
6665 push_gimplify_context ();
6667 block = make_node (BLOCK);
6668 bind = gimple_build_bind (NULL, NULL, block);
6669 gsi_replace (gsi_p, bind, true);
6670 gimple_bind_add_stmt (bind, stmt);
6672 tbody = gimple_bind_body (bind);
6673 gimplify_and_add (lock, &tbody);
6674 gimple_bind_set_body (bind, tbody);
6676 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6677 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6678 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6679 gimple_omp_set_body (stmt, NULL);
6681 tbody = gimple_bind_body (bind);
6682 gimplify_and_add (unlock, &tbody);
6683 gimple_bind_set_body (bind, tbody);
6685 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6687 pop_gimplify_context (bind);
6688 gimple_bind_append_vars (bind, ctx->block_vars);
6689 BLOCK_VARS (block) = gimple_bind_vars (bind);
6692 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6693 for a lastprivate clause. Given a loop control predicate of (V
6694 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6695 is appended to *DLIST, iterator initialization is appended to
6696 *BODY_P. */
6698 static void
6699 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6700 gimple_seq *dlist, struct omp_context *ctx)
6702 tree clauses, cond, vinit;
6703 enum tree_code cond_code;
6704 gimple_seq stmts;
6706 cond_code = fd->loop.cond_code;
6707 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6709 /* When possible, use a strict equality expression. This can let VRP
6710 type optimizations deduce the value and remove a copy. */
6711 if (tree_fits_shwi_p (fd->loop.step))
6713 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6714 if (step == 1 || step == -1)
6715 cond_code = EQ_EXPR;
6718 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6719 || gimple_omp_for_grid_phony (fd->for_stmt))
6720 cond = omp_grid_lastprivate_predicate (fd);
6721 else
6723 tree n2 = fd->loop.n2;
6724 if (fd->collapse > 1
6725 && TREE_CODE (n2) != INTEGER_CST
6726 && gimple_omp_for_combined_into_p (fd->for_stmt))
6728 struct omp_context *taskreg_ctx = NULL;
6729 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6731 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6732 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6733 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6735 if (gimple_omp_for_combined_into_p (gfor))
6737 gcc_assert (ctx->outer->outer
6738 && is_parallel_ctx (ctx->outer->outer));
6739 taskreg_ctx = ctx->outer->outer;
6741 else
6743 struct omp_for_data outer_fd;
6744 omp_extract_for_data (gfor, &outer_fd, NULL);
6745 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6748 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6749 taskreg_ctx = ctx->outer->outer;
6751 else if (is_taskreg_ctx (ctx->outer))
6752 taskreg_ctx = ctx->outer;
6753 if (taskreg_ctx)
6755 int i;
6756 tree taskreg_clauses
6757 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6758 tree innerc = omp_find_clause (taskreg_clauses,
6759 OMP_CLAUSE__LOOPTEMP_);
6760 gcc_assert (innerc);
6761 for (i = 0; i < fd->collapse; i++)
6763 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6764 OMP_CLAUSE__LOOPTEMP_);
6765 gcc_assert (innerc);
6767 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6768 OMP_CLAUSE__LOOPTEMP_);
6769 if (innerc)
6770 n2 = fold_convert (TREE_TYPE (n2),
6771 lookup_decl (OMP_CLAUSE_DECL (innerc),
6772 taskreg_ctx));
6775 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6778 clauses = gimple_omp_for_clauses (fd->for_stmt);
6779 stmts = NULL;
6780 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6781 if (!gimple_seq_empty_p (stmts))
6783 gimple_seq_add_seq (&stmts, *dlist);
6784 *dlist = stmts;
6786 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6787 vinit = fd->loop.n1;
6788 if (cond_code == EQ_EXPR
6789 && tree_fits_shwi_p (fd->loop.n2)
6790 && ! integer_zerop (fd->loop.n2))
6791 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6792 else
6793 vinit = unshare_expr (vinit);
6795 /* Initialize the iterator variable, so that threads that don't execute
6796 any iterations don't execute the lastprivate clauses by accident. */
6797 gimplify_assign (fd->loop.v, vinit, body_p);
6802 /* Lower code for an OMP loop directive. */
6804 static void
6805 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6807 tree *rhs_p, block;
6808 struct omp_for_data fd, *fdp = NULL;
6809 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6810 gbind *new_stmt;
6811 gimple_seq omp_for_body, body, dlist;
6812 gimple_seq oacc_head = NULL, oacc_tail = NULL;
6813 size_t i;
6815 push_gimplify_context ();
6817 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6819 block = make_node (BLOCK);
6820 new_stmt = gimple_build_bind (NULL, NULL, block);
6821 /* Replace at gsi right away, so that 'stmt' is no member
6822 of a sequence anymore as we're going to add to a different
6823 one below. */
6824 gsi_replace (gsi_p, new_stmt, true);
6826 /* Move declaration of temporaries in the loop body before we make
6827 it go away. */
6828 omp_for_body = gimple_omp_body (stmt);
6829 if (!gimple_seq_empty_p (omp_for_body)
6830 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6832 gbind *inner_bind
6833 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6834 tree vars = gimple_bind_vars (inner_bind);
6835 gimple_bind_append_vars (new_stmt, vars);
6836 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6837 keep them on the inner_bind and it's block. */
6838 gimple_bind_set_vars (inner_bind, NULL_TREE);
6839 if (gimple_bind_block (inner_bind))
6840 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6843 if (gimple_omp_for_combined_into_p (stmt))
6845 omp_extract_for_data (stmt, &fd, NULL);
6846 fdp = &fd;
6848 /* We need two temporaries with fd.loop.v type (istart/iend)
6849 and then (fd.collapse - 1) temporaries with the same
6850 type for count2 ... countN-1 vars if not constant. */
6851 size_t count = 2;
6852 tree type = fd.iter_type;
6853 if (fd.collapse > 1
6854 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6855 count += fd.collapse - 1;
6856 bool taskreg_for
6857 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6858 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6859 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6860 tree simtc = NULL;
6861 tree clauses = *pc;
6862 if (taskreg_for)
6863 outerc
6864 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6865 OMP_CLAUSE__LOOPTEMP_);
6866 if (ctx->simt_stmt)
6867 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6868 OMP_CLAUSE__LOOPTEMP_);
6869 for (i = 0; i < count; i++)
6871 tree temp;
6872 if (taskreg_for)
6874 gcc_assert (outerc);
6875 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6876 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6877 OMP_CLAUSE__LOOPTEMP_);
6879 else
6881 /* If there are 2 adjacent SIMD stmts, one with _simt_
6882 clause, another without, make sure they have the same
6883 decls in _looptemp_ clauses, because the outer stmt
6884 they are combined into will look up just one inner_stmt. */
6885 if (ctx->simt_stmt)
6886 temp = OMP_CLAUSE_DECL (simtc);
6887 else
6888 temp = create_tmp_var (type);
6889 insert_decl_map (&ctx->outer->cb, temp, temp);
6891 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6892 OMP_CLAUSE_DECL (*pc) = temp;
6893 pc = &OMP_CLAUSE_CHAIN (*pc);
6894 if (ctx->simt_stmt)
6895 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6896 OMP_CLAUSE__LOOPTEMP_);
6898 *pc = clauses;
6901 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6902 dlist = NULL;
6903 body = NULL;
6904 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6905 fdp);
6906 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6908 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6910 /* Lower the header expressions. At this point, we can assume that
6911 the header is of the form:
6913 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6915 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6916 using the .omp_data_s mapping, if needed. */
6917 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6919 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6920 if (!is_gimple_min_invariant (*rhs_p))
6921 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6923 rhs_p = gimple_omp_for_final_ptr (stmt, i);
6924 if (!is_gimple_min_invariant (*rhs_p))
6925 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6927 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6928 if (!is_gimple_min_invariant (*rhs_p))
6929 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6932 /* Once lowered, extract the bounds and clauses. */
6933 omp_extract_for_data (stmt, &fd, NULL);
6935 if (is_gimple_omp_oacc (ctx->stmt)
6936 && !ctx_in_oacc_kernels_region (ctx))
6937 lower_oacc_head_tail (gimple_location (stmt),
6938 gimple_omp_for_clauses (stmt),
6939 &oacc_head, &oacc_tail, ctx);
6941 /* Add OpenACC partitioning and reduction markers just before the loop. */
6942 if (oacc_head)
6943 gimple_seq_add_seq (&body, oacc_head);
6945 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
6947 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6948 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
6949 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6950 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6952 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6953 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6954 OMP_CLAUSE_LINEAR_STEP (c)
6955 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6956 ctx);
6959 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6960 && gimple_omp_for_grid_phony (stmt));
6961 if (!phony_loop)
6962 gimple_seq_add_stmt (&body, stmt);
6963 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6965 if (!phony_loop)
6966 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6967 fd.loop.v));
6969 /* After the loop, add exit clauses. */
6970 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
6972 if (ctx->cancellable)
6973 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
6975 gimple_seq_add_seq (&body, dlist);
6977 body = maybe_catch_exception (body);
6979 if (!phony_loop)
6981 /* Region exit marker goes at the end of the loop body. */
6982 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
6983 maybe_add_implicit_barrier_cancel (ctx, &body);
6986 /* Add OpenACC joining and reduction markers just after the loop. */
6987 if (oacc_tail)
6988 gimple_seq_add_seq (&body, oacc_tail);
6990 pop_gimplify_context (new_stmt);
6992 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6993 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
6994 if (BLOCK_VARS (block))
6995 TREE_USED (block) = 1;
6997 gimple_bind_set_body (new_stmt, body);
6998 gimple_omp_set_body (stmt, NULL);
6999 gimple_omp_for_set_pre_body (stmt, NULL);
7002 /* Callback for walk_stmts. Check if the current statement only contains
7003 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
7005 static tree
7006 check_combined_parallel (gimple_stmt_iterator *gsi_p,
7007 bool *handled_ops_p,
7008 struct walk_stmt_info *wi)
7010 int *info = (int *) wi->info;
7011 gimple *stmt = gsi_stmt (*gsi_p);
7013 *handled_ops_p = true;
7014 switch (gimple_code (stmt))
7016 WALK_SUBSTMTS;
7018 case GIMPLE_OMP_FOR:
7019 case GIMPLE_OMP_SECTIONS:
7020 *info = *info == 0 ? 1 : -1;
7021 break;
7022 default:
7023 *info = -1;
7024 break;
7026 return NULL;
7029 struct omp_taskcopy_context
7031 /* This field must be at the beginning, as we do "inheritance": Some
7032 callback functions for tree-inline.c (e.g., omp_copy_decl)
7033 receive a copy_body_data pointer that is up-casted to an
7034 omp_context pointer. */
7035 copy_body_data cb;
7036 omp_context *ctx;
7039 static tree
7040 task_copyfn_copy_decl (tree var, copy_body_data *cb)
7042 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
7044 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
7045 return create_tmp_var (TREE_TYPE (var));
7047 return var;
7050 static tree
7051 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
7053 tree name, new_fields = NULL, type, f;
7055 type = lang_hooks.types.make_type (RECORD_TYPE);
7056 name = DECL_NAME (TYPE_NAME (orig_type));
7057 name = build_decl (gimple_location (tcctx->ctx->stmt),
7058 TYPE_DECL, name, type);
7059 TYPE_NAME (type) = name;
7061 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
7063 tree new_f = copy_node (f);
7064 DECL_CONTEXT (new_f) = type;
7065 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7066 TREE_CHAIN (new_f) = new_fields;
7067 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7068 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7069 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7070 &tcctx->cb, NULL);
7071 new_fields = new_f;
7072 tcctx->cb.decl_map->put (f, new_f);
7074 TYPE_FIELDS (type) = nreverse (new_fields);
7075 layout_type (type);
7076 return type;
7079 /* Create task copyfn. */
7081 static void
7082 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7084 struct function *child_cfun;
7085 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7086 tree record_type, srecord_type, bind, list;
7087 bool record_needs_remap = false, srecord_needs_remap = false;
7088 splay_tree_node n;
7089 struct omp_taskcopy_context tcctx;
7090 location_t loc = gimple_location (task_stmt);
7092 child_fn = gimple_omp_task_copy_fn (task_stmt);
7093 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7094 gcc_assert (child_cfun->cfg == NULL);
7095 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
7097 /* Reset DECL_CONTEXT on function arguments. */
7098 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7099 DECL_CONTEXT (t) = child_fn;
7101 /* Populate the function. */
7102 push_gimplify_context ();
7103 push_cfun (child_cfun);
7105 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7106 TREE_SIDE_EFFECTS (bind) = 1;
7107 list = NULL;
7108 DECL_SAVED_TREE (child_fn) = bind;
7109 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
7111 /* Remap src and dst argument types if needed. */
7112 record_type = ctx->record_type;
7113 srecord_type = ctx->srecord_type;
7114 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7115 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7117 record_needs_remap = true;
7118 break;
7120 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7121 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7123 srecord_needs_remap = true;
7124 break;
7127 if (record_needs_remap || srecord_needs_remap)
7129 memset (&tcctx, '\0', sizeof (tcctx));
7130 tcctx.cb.src_fn = ctx->cb.src_fn;
7131 tcctx.cb.dst_fn = child_fn;
7132 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7133 gcc_checking_assert (tcctx.cb.src_node);
7134 tcctx.cb.dst_node = tcctx.cb.src_node;
7135 tcctx.cb.src_cfun = ctx->cb.src_cfun;
7136 tcctx.cb.copy_decl = task_copyfn_copy_decl;
7137 tcctx.cb.eh_lp_nr = 0;
7138 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7139 tcctx.cb.decl_map = new hash_map<tree, tree>;
7140 tcctx.ctx = ctx;
7142 if (record_needs_remap)
7143 record_type = task_copyfn_remap_type (&tcctx, record_type);
7144 if (srecord_needs_remap)
7145 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7147 else
7148 tcctx.cb.decl_map = NULL;
7150 arg = DECL_ARGUMENTS (child_fn);
7151 TREE_TYPE (arg) = build_pointer_type (record_type);
7152 sarg = DECL_CHAIN (arg);
7153 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7155 /* First pass: initialize temporaries used in record_type and srecord_type
7156 sizes and field offsets. */
7157 if (tcctx.cb.decl_map)
7158 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7159 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7161 tree *p;
7163 decl = OMP_CLAUSE_DECL (c);
7164 p = tcctx.cb.decl_map->get (decl);
7165 if (p == NULL)
7166 continue;
7167 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7168 sf = (tree) n->value;
7169 sf = *tcctx.cb.decl_map->get (sf);
7170 src = build_simple_mem_ref_loc (loc, sarg);
7171 src = omp_build_component_ref (src, sf);
7172 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7173 append_to_statement_list (t, &list);
7176 /* Second pass: copy shared var pointers and copy construct non-VLA
7177 firstprivate vars. */
7178 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7179 switch (OMP_CLAUSE_CODE (c))
7181 splay_tree_key key;
7182 case OMP_CLAUSE_SHARED:
7183 decl = OMP_CLAUSE_DECL (c);
7184 key = (splay_tree_key) decl;
7185 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7186 key = (splay_tree_key) &DECL_UID (decl);
7187 n = splay_tree_lookup (ctx->field_map, key);
7188 if (n == NULL)
7189 break;
7190 f = (tree) n->value;
7191 if (tcctx.cb.decl_map)
7192 f = *tcctx.cb.decl_map->get (f);
7193 n = splay_tree_lookup (ctx->sfield_map, key);
7194 sf = (tree) n->value;
7195 if (tcctx.cb.decl_map)
7196 sf = *tcctx.cb.decl_map->get (sf);
7197 src = build_simple_mem_ref_loc (loc, sarg);
7198 src = omp_build_component_ref (src, sf);
7199 dst = build_simple_mem_ref_loc (loc, arg);
7200 dst = omp_build_component_ref (dst, f);
7201 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7202 append_to_statement_list (t, &list);
7203 break;
7204 case OMP_CLAUSE_FIRSTPRIVATE:
7205 decl = OMP_CLAUSE_DECL (c);
7206 if (is_variable_sized (decl))
7207 break;
7208 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7209 if (n == NULL)
7210 break;
7211 f = (tree) n->value;
7212 if (tcctx.cb.decl_map)
7213 f = *tcctx.cb.decl_map->get (f);
7214 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7215 if (n != NULL)
7217 sf = (tree) n->value;
7218 if (tcctx.cb.decl_map)
7219 sf = *tcctx.cb.decl_map->get (sf);
7220 src = build_simple_mem_ref_loc (loc, sarg);
7221 src = omp_build_component_ref (src, sf);
7222 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7223 src = build_simple_mem_ref_loc (loc, src);
7225 else
7226 src = decl;
7227 dst = build_simple_mem_ref_loc (loc, arg);
7228 dst = omp_build_component_ref (dst, f);
7229 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7230 append_to_statement_list (t, &list);
7231 break;
7232 case OMP_CLAUSE_PRIVATE:
7233 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7234 break;
7235 decl = OMP_CLAUSE_DECL (c);
7236 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7237 f = (tree) n->value;
7238 if (tcctx.cb.decl_map)
7239 f = *tcctx.cb.decl_map->get (f);
7240 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7241 if (n != NULL)
7243 sf = (tree) n->value;
7244 if (tcctx.cb.decl_map)
7245 sf = *tcctx.cb.decl_map->get (sf);
7246 src = build_simple_mem_ref_loc (loc, sarg);
7247 src = omp_build_component_ref (src, sf);
7248 if (use_pointer_for_field (decl, NULL))
7249 src = build_simple_mem_ref_loc (loc, src);
7251 else
7252 src = decl;
7253 dst = build_simple_mem_ref_loc (loc, arg);
7254 dst = omp_build_component_ref (dst, f);
7255 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7256 append_to_statement_list (t, &list);
7257 break;
7258 default:
7259 break;
7262 /* Last pass: handle VLA firstprivates. */
7263 if (tcctx.cb.decl_map)
7264 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7265 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7267 tree ind, ptr, df;
7269 decl = OMP_CLAUSE_DECL (c);
7270 if (!is_variable_sized (decl))
7271 continue;
7272 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7273 if (n == NULL)
7274 continue;
7275 f = (tree) n->value;
7276 f = *tcctx.cb.decl_map->get (f);
7277 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7278 ind = DECL_VALUE_EXPR (decl);
7279 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7280 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7281 n = splay_tree_lookup (ctx->sfield_map,
7282 (splay_tree_key) TREE_OPERAND (ind, 0));
7283 sf = (tree) n->value;
7284 sf = *tcctx.cb.decl_map->get (sf);
7285 src = build_simple_mem_ref_loc (loc, sarg);
7286 src = omp_build_component_ref (src, sf);
7287 src = build_simple_mem_ref_loc (loc, src);
7288 dst = build_simple_mem_ref_loc (loc, arg);
7289 dst = omp_build_component_ref (dst, f);
7290 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7291 append_to_statement_list (t, &list);
7292 n = splay_tree_lookup (ctx->field_map,
7293 (splay_tree_key) TREE_OPERAND (ind, 0));
7294 df = (tree) n->value;
7295 df = *tcctx.cb.decl_map->get (df);
7296 ptr = build_simple_mem_ref_loc (loc, arg);
7297 ptr = omp_build_component_ref (ptr, df);
7298 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7299 build_fold_addr_expr_loc (loc, dst));
7300 append_to_statement_list (t, &list);
7303 t = build1 (RETURN_EXPR, void_type_node, NULL);
7304 append_to_statement_list (t, &list);
7306 if (tcctx.cb.decl_map)
7307 delete tcctx.cb.decl_map;
7308 pop_gimplify_context (NULL);
7309 BIND_EXPR_BODY (bind) = list;
7310 pop_cfun ();
7313 static void
7314 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7316 tree c, clauses;
7317 gimple *g;
7318 size_t n_in = 0, n_out = 0, idx = 2, i;
7320 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7321 gcc_assert (clauses);
7322 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7323 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7324 switch (OMP_CLAUSE_DEPEND_KIND (c))
7326 case OMP_CLAUSE_DEPEND_IN:
7327 n_in++;
7328 break;
7329 case OMP_CLAUSE_DEPEND_OUT:
7330 case OMP_CLAUSE_DEPEND_INOUT:
7331 n_out++;
7332 break;
7333 case OMP_CLAUSE_DEPEND_SOURCE:
7334 case OMP_CLAUSE_DEPEND_SINK:
7335 /* FALLTHRU */
7336 default:
7337 gcc_unreachable ();
7339 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7340 tree array = create_tmp_var (type);
7341 TREE_ADDRESSABLE (array) = 1;
7342 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7343 NULL_TREE);
7344 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7345 gimple_seq_add_stmt (iseq, g);
7346 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7347 NULL_TREE);
7348 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7349 gimple_seq_add_stmt (iseq, g);
7350 for (i = 0; i < 2; i++)
7352 if ((i ? n_in : n_out) == 0)
7353 continue;
7354 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7355 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7356 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7358 tree t = OMP_CLAUSE_DECL (c);
7359 t = fold_convert (ptr_type_node, t);
7360 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7361 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7362 NULL_TREE, NULL_TREE);
7363 g = gimple_build_assign (r, t);
7364 gimple_seq_add_stmt (iseq, g);
7367 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7368 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7369 OMP_CLAUSE_CHAIN (c) = *pclauses;
7370 *pclauses = c;
7371 tree clobber = build_constructor (type, NULL);
7372 TREE_THIS_VOLATILE (clobber) = 1;
7373 g = gimple_build_assign (array, clobber);
7374 gimple_seq_add_stmt (oseq, g);
7377 /* Lower the OpenMP parallel or task directive in the current statement
7378 in GSI_P. CTX holds context information for the directive. */
7380 static void
7381 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7383 tree clauses;
7384 tree child_fn, t;
7385 gimple *stmt = gsi_stmt (*gsi_p);
7386 gbind *par_bind, *bind, *dep_bind = NULL;
7387 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7388 location_t loc = gimple_location (stmt);
7390 clauses = gimple_omp_taskreg_clauses (stmt);
7391 par_bind
7392 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7393 par_body = gimple_bind_body (par_bind);
7394 child_fn = ctx->cb.dst_fn;
7395 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7396 && !gimple_omp_parallel_combined_p (stmt))
7398 struct walk_stmt_info wi;
7399 int ws_num = 0;
7401 memset (&wi, 0, sizeof (wi));
7402 wi.info = &ws_num;
7403 wi.val_only = true;
7404 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7405 if (ws_num == 1)
7406 gimple_omp_parallel_set_combined_p (stmt, true);
7408 gimple_seq dep_ilist = NULL;
7409 gimple_seq dep_olist = NULL;
7410 if (gimple_code (stmt) == GIMPLE_OMP_TASK
7411 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7413 push_gimplify_context ();
7414 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7415 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7416 &dep_ilist, &dep_olist);
7419 if (ctx->srecord_type)
7420 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7422 push_gimplify_context ();
7424 par_olist = NULL;
7425 par_ilist = NULL;
7426 par_rlist = NULL;
7427 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7428 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7429 if (phony_construct && ctx->record_type)
7431 gcc_checking_assert (!ctx->receiver_decl);
7432 ctx->receiver_decl = create_tmp_var
7433 (build_reference_type (ctx->record_type), ".omp_rec");
7435 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7436 lower_omp (&par_body, ctx);
7437 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7438 lower_reduction_clauses (clauses, &par_rlist, ctx);
7440 /* Declare all the variables created by mapping and the variables
7441 declared in the scope of the parallel body. */
7442 record_vars_into (ctx->block_vars, child_fn);
7443 record_vars_into (gimple_bind_vars (par_bind), child_fn);
7445 if (ctx->record_type)
7447 ctx->sender_decl
7448 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7449 : ctx->record_type, ".omp_data_o");
7450 DECL_NAMELESS (ctx->sender_decl) = 1;
7451 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7452 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7455 olist = NULL;
7456 ilist = NULL;
7457 lower_send_clauses (clauses, &ilist, &olist, ctx);
7458 lower_send_shared_vars (&ilist, &olist, ctx);
7460 if (ctx->record_type)
7462 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7463 TREE_THIS_VOLATILE (clobber) = 1;
7464 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7465 clobber));
7468 /* Once all the expansions are done, sequence all the different
7469 fragments inside gimple_omp_body. */
7471 new_body = NULL;
7473 if (ctx->record_type)
7475 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7476 /* fixup_child_record_type might have changed receiver_decl's type. */
7477 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7478 gimple_seq_add_stmt (&new_body,
7479 gimple_build_assign (ctx->receiver_decl, t));
7482 gimple_seq_add_seq (&new_body, par_ilist);
7483 gimple_seq_add_seq (&new_body, par_body);
7484 gimple_seq_add_seq (&new_body, par_rlist);
7485 if (ctx->cancellable)
7486 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7487 gimple_seq_add_seq (&new_body, par_olist);
7488 new_body = maybe_catch_exception (new_body);
7489 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7490 gimple_seq_add_stmt (&new_body,
7491 gimple_build_omp_continue (integer_zero_node,
7492 integer_zero_node));
7493 if (!phony_construct)
7495 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7496 gimple_omp_set_body (stmt, new_body);
7499 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7500 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7501 gimple_bind_add_seq (bind, ilist);
7502 if (!phony_construct)
7503 gimple_bind_add_stmt (bind, stmt);
7504 else
7505 gimple_bind_add_seq (bind, new_body);
7506 gimple_bind_add_seq (bind, olist);
7508 pop_gimplify_context (NULL);
7510 if (dep_bind)
7512 gimple_bind_add_seq (dep_bind, dep_ilist);
7513 gimple_bind_add_stmt (dep_bind, bind);
7514 gimple_bind_add_seq (dep_bind, dep_olist);
7515 pop_gimplify_context (dep_bind);
7519 /* Lower the GIMPLE_OMP_TARGET in the current statement
7520 in GSI_P. CTX holds context information for the directive. */
7522 static void
7523 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7525 tree clauses;
7526 tree child_fn, t, c;
7527 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7528 gbind *tgt_bind, *bind, *dep_bind = NULL;
7529 gimple_seq tgt_body, olist, ilist, fplist, new_body;
7530 location_t loc = gimple_location (stmt);
7531 bool offloaded, data_region;
7532 unsigned int map_cnt = 0;
7534 offloaded = is_gimple_omp_offloaded (stmt);
7535 switch (gimple_omp_target_kind (stmt))
7537 case GF_OMP_TARGET_KIND_REGION:
7538 case GF_OMP_TARGET_KIND_UPDATE:
7539 case GF_OMP_TARGET_KIND_ENTER_DATA:
7540 case GF_OMP_TARGET_KIND_EXIT_DATA:
7541 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7542 case GF_OMP_TARGET_KIND_OACC_KERNELS:
7543 case GF_OMP_TARGET_KIND_OACC_UPDATE:
7544 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7545 case GF_OMP_TARGET_KIND_OACC_DECLARE:
7546 data_region = false;
7547 break;
7548 case GF_OMP_TARGET_KIND_DATA:
7549 case GF_OMP_TARGET_KIND_OACC_DATA:
7550 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7551 data_region = true;
7552 break;
7553 default:
7554 gcc_unreachable ();
7557 clauses = gimple_omp_target_clauses (stmt);
7559 gimple_seq dep_ilist = NULL;
7560 gimple_seq dep_olist = NULL;
7561 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7563 push_gimplify_context ();
7564 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7565 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7566 &dep_ilist, &dep_olist);
7569 tgt_bind = NULL;
7570 tgt_body = NULL;
7571 if (offloaded)
7573 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7574 tgt_body = gimple_bind_body (tgt_bind);
7576 else if (data_region)
7577 tgt_body = gimple_omp_body (stmt);
7578 child_fn = ctx->cb.dst_fn;
7580 push_gimplify_context ();
7581 fplist = NULL;
7583 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7584 switch (OMP_CLAUSE_CODE (c))
7586 tree var, x;
7588 default:
7589 break;
7590 case OMP_CLAUSE_MAP:
7591 #if CHECKING_P
7592 /* First check what we're prepared to handle in the following. */
7593 switch (OMP_CLAUSE_MAP_KIND (c))
7595 case GOMP_MAP_ALLOC:
7596 case GOMP_MAP_TO:
7597 case GOMP_MAP_FROM:
7598 case GOMP_MAP_TOFROM:
7599 case GOMP_MAP_POINTER:
7600 case GOMP_MAP_TO_PSET:
7601 case GOMP_MAP_DELETE:
7602 case GOMP_MAP_RELEASE:
7603 case GOMP_MAP_ALWAYS_TO:
7604 case GOMP_MAP_ALWAYS_FROM:
7605 case GOMP_MAP_ALWAYS_TOFROM:
7606 case GOMP_MAP_FIRSTPRIVATE_POINTER:
7607 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7608 case GOMP_MAP_STRUCT:
7609 case GOMP_MAP_ALWAYS_POINTER:
7610 break;
7611 case GOMP_MAP_FORCE_ALLOC:
7612 case GOMP_MAP_FORCE_TO:
7613 case GOMP_MAP_FORCE_FROM:
7614 case GOMP_MAP_FORCE_TOFROM:
7615 case GOMP_MAP_FORCE_PRESENT:
7616 case GOMP_MAP_FORCE_DEVICEPTR:
7617 case GOMP_MAP_DEVICE_RESIDENT:
7618 case GOMP_MAP_LINK:
7619 gcc_assert (is_gimple_omp_oacc (stmt));
7620 break;
7621 default:
7622 gcc_unreachable ();
7624 #endif
7625 /* FALLTHRU */
7626 case OMP_CLAUSE_TO:
7627 case OMP_CLAUSE_FROM:
7628 oacc_firstprivate:
7629 var = OMP_CLAUSE_DECL (c);
7630 if (!DECL_P (var))
7632 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7633 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7634 && (OMP_CLAUSE_MAP_KIND (c)
7635 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
7636 map_cnt++;
7637 continue;
7640 if (DECL_SIZE (var)
7641 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7643 tree var2 = DECL_VALUE_EXPR (var);
7644 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7645 var2 = TREE_OPERAND (var2, 0);
7646 gcc_assert (DECL_P (var2));
7647 var = var2;
7650 if (offloaded
7651 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7652 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7653 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7655 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7657 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7658 && varpool_node::get_create (var)->offloadable)
7659 continue;
7661 tree type = build_pointer_type (TREE_TYPE (var));
7662 tree new_var = lookup_decl (var, ctx);
7663 x = create_tmp_var_raw (type, get_name (new_var));
7664 gimple_add_tmp_var (x);
7665 x = build_simple_mem_ref (x);
7666 SET_DECL_VALUE_EXPR (new_var, x);
7667 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7669 continue;
7672 if (!maybe_lookup_field (var, ctx))
7673 continue;
7675 /* Don't remap oacc parallel reduction variables, because the
7676 intermediate result must be local to each gang. */
7677 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7678 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7680 x = build_receiver_ref (var, true, ctx);
7681 tree new_var = lookup_decl (var, ctx);
7683 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7684 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7685 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7686 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7687 x = build_simple_mem_ref (x);
7688 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7690 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7691 if (omp_is_reference (new_var))
7693 /* Create a local object to hold the instance
7694 value. */
7695 tree type = TREE_TYPE (TREE_TYPE (new_var));
7696 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7697 tree inst = create_tmp_var (type, id);
7698 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7699 x = build_fold_addr_expr (inst);
7701 gimplify_assign (new_var, x, &fplist);
7703 else if (DECL_P (new_var))
7705 SET_DECL_VALUE_EXPR (new_var, x);
7706 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7708 else
7709 gcc_unreachable ();
7711 map_cnt++;
7712 break;
7714 case OMP_CLAUSE_FIRSTPRIVATE:
7715 if (is_oacc_parallel (ctx))
7716 goto oacc_firstprivate;
7717 map_cnt++;
7718 var = OMP_CLAUSE_DECL (c);
7719 if (!omp_is_reference (var)
7720 && !is_gimple_reg_type (TREE_TYPE (var)))
7722 tree new_var = lookup_decl (var, ctx);
7723 if (is_variable_sized (var))
7725 tree pvar = DECL_VALUE_EXPR (var);
7726 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7727 pvar = TREE_OPERAND (pvar, 0);
7728 gcc_assert (DECL_P (pvar));
7729 tree new_pvar = lookup_decl (pvar, ctx);
7730 x = build_fold_indirect_ref (new_pvar);
7731 TREE_THIS_NOTRAP (x) = 1;
7733 else
7734 x = build_receiver_ref (var, true, ctx);
7735 SET_DECL_VALUE_EXPR (new_var, x);
7736 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7738 break;
7740 case OMP_CLAUSE_PRIVATE:
7741 if (is_gimple_omp_oacc (ctx->stmt))
7742 break;
7743 var = OMP_CLAUSE_DECL (c);
7744 if (is_variable_sized (var))
7746 tree new_var = lookup_decl (var, ctx);
7747 tree pvar = DECL_VALUE_EXPR (var);
7748 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7749 pvar = TREE_OPERAND (pvar, 0);
7750 gcc_assert (DECL_P (pvar));
7751 tree new_pvar = lookup_decl (pvar, ctx);
7752 x = build_fold_indirect_ref (new_pvar);
7753 TREE_THIS_NOTRAP (x) = 1;
7754 SET_DECL_VALUE_EXPR (new_var, x);
7755 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7757 break;
7759 case OMP_CLAUSE_USE_DEVICE_PTR:
7760 case OMP_CLAUSE_IS_DEVICE_PTR:
7761 var = OMP_CLAUSE_DECL (c);
7762 map_cnt++;
7763 if (is_variable_sized (var))
7765 tree new_var = lookup_decl (var, ctx);
7766 tree pvar = DECL_VALUE_EXPR (var);
7767 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7768 pvar = TREE_OPERAND (pvar, 0);
7769 gcc_assert (DECL_P (pvar));
7770 tree new_pvar = lookup_decl (pvar, ctx);
7771 x = build_fold_indirect_ref (new_pvar);
7772 TREE_THIS_NOTRAP (x) = 1;
7773 SET_DECL_VALUE_EXPR (new_var, x);
7774 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7776 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7778 tree new_var = lookup_decl (var, ctx);
7779 tree type = build_pointer_type (TREE_TYPE (var));
7780 x = create_tmp_var_raw (type, get_name (new_var));
7781 gimple_add_tmp_var (x);
7782 x = build_simple_mem_ref (x);
7783 SET_DECL_VALUE_EXPR (new_var, x);
7784 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7786 else
7788 tree new_var = lookup_decl (var, ctx);
7789 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7790 gimple_add_tmp_var (x);
7791 SET_DECL_VALUE_EXPR (new_var, x);
7792 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7794 break;
7797 if (offloaded)
7799 target_nesting_level++;
7800 lower_omp (&tgt_body, ctx);
7801 target_nesting_level--;
7803 else if (data_region)
7804 lower_omp (&tgt_body, ctx);
7806 if (offloaded)
7808 /* Declare all the variables created by mapping and the variables
7809 declared in the scope of the target body. */
7810 record_vars_into (ctx->block_vars, child_fn);
7811 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7814 olist = NULL;
7815 ilist = NULL;
7816 if (ctx->record_type)
7818 ctx->sender_decl
7819 = create_tmp_var (ctx->record_type, ".omp_data_arr");
7820 DECL_NAMELESS (ctx->sender_decl) = 1;
7821 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7822 t = make_tree_vec (3);
7823 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7824 TREE_VEC_ELT (t, 1)
7825 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7826 ".omp_data_sizes");
7827 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7828 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7829 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7830 tree tkind_type = short_unsigned_type_node;
7831 int talign_shift = 8;
7832 TREE_VEC_ELT (t, 2)
7833 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7834 ".omp_data_kinds");
7835 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7836 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7837 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7838 gimple_omp_target_set_data_arg (stmt, t);
7840 vec<constructor_elt, va_gc> *vsize;
7841 vec<constructor_elt, va_gc> *vkind;
7842 vec_alloc (vsize, map_cnt);
7843 vec_alloc (vkind, map_cnt);
7844 unsigned int map_idx = 0;
7846 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7847 switch (OMP_CLAUSE_CODE (c))
7849 tree ovar, nc, s, purpose, var, x, type;
7850 unsigned int talign;
7852 default:
7853 break;
7855 case OMP_CLAUSE_MAP:
7856 case OMP_CLAUSE_TO:
7857 case OMP_CLAUSE_FROM:
7858 oacc_firstprivate_map:
7859 nc = c;
7860 ovar = OMP_CLAUSE_DECL (c);
7861 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7862 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7863 || (OMP_CLAUSE_MAP_KIND (c)
7864 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7865 break;
7866 if (!DECL_P (ovar))
7868 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7869 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7871 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7872 == get_base_address (ovar));
7873 nc = OMP_CLAUSE_CHAIN (c);
7874 ovar = OMP_CLAUSE_DECL (nc);
7876 else
7878 tree x = build_sender_ref (ovar, ctx);
7879 tree v
7880 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7881 gimplify_assign (x, v, &ilist);
7882 nc = NULL_TREE;
7885 else
7887 if (DECL_SIZE (ovar)
7888 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7890 tree ovar2 = DECL_VALUE_EXPR (ovar);
7891 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7892 ovar2 = TREE_OPERAND (ovar2, 0);
7893 gcc_assert (DECL_P (ovar2));
7894 ovar = ovar2;
7896 if (!maybe_lookup_field (ovar, ctx))
7897 continue;
7900 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7901 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7902 talign = DECL_ALIGN_UNIT (ovar);
7903 if (nc)
7905 var = lookup_decl_in_outer_ctx (ovar, ctx);
7906 x = build_sender_ref (ovar, ctx);
7908 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7909 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7910 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7911 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7913 gcc_assert (offloaded);
7914 tree avar
7915 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7916 mark_addressable (avar);
7917 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7918 talign = DECL_ALIGN_UNIT (avar);
7919 avar = build_fold_addr_expr (avar);
7920 gimplify_assign (x, avar, &ilist);
7922 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7924 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7925 if (!omp_is_reference (var))
7927 if (is_gimple_reg (var)
7928 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7929 TREE_NO_WARNING (var) = 1;
7930 var = build_fold_addr_expr (var);
7932 else
7933 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7934 gimplify_assign (x, var, &ilist);
7936 else if (is_gimple_reg (var))
7938 gcc_assert (offloaded);
7939 tree avar = create_tmp_var (TREE_TYPE (var));
7940 mark_addressable (avar);
7941 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7942 if (GOMP_MAP_COPY_TO_P (map_kind)
7943 || map_kind == GOMP_MAP_POINTER
7944 || map_kind == GOMP_MAP_TO_PSET
7945 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7947 /* If we need to initialize a temporary
7948 with VAR because it is not addressable, and
7949 the variable hasn't been initialized yet, then
7950 we'll get a warning for the store to avar.
7951 Don't warn in that case, the mapping might
7952 be implicit. */
7953 TREE_NO_WARNING (var) = 1;
7954 gimplify_assign (avar, var, &ilist);
7956 avar = build_fold_addr_expr (avar);
7957 gimplify_assign (x, avar, &ilist);
7958 if ((GOMP_MAP_COPY_FROM_P (map_kind)
7959 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7960 && !TYPE_READONLY (TREE_TYPE (var)))
7962 x = unshare_expr (x);
7963 x = build_simple_mem_ref (x);
7964 gimplify_assign (var, x, &olist);
7967 else
7969 var = build_fold_addr_expr (var);
7970 gimplify_assign (x, var, &ilist);
7973 s = NULL_TREE;
7974 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7976 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7977 s = TREE_TYPE (ovar);
7978 if (TREE_CODE (s) == REFERENCE_TYPE)
7979 s = TREE_TYPE (s);
7980 s = TYPE_SIZE_UNIT (s);
7982 else
7983 s = OMP_CLAUSE_SIZE (c);
7984 if (s == NULL_TREE)
7985 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
7986 s = fold_convert (size_type_node, s);
7987 purpose = size_int (map_idx++);
7988 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
7989 if (TREE_CODE (s) != INTEGER_CST)
7990 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
7992 unsigned HOST_WIDE_INT tkind, tkind_zero;
7993 switch (OMP_CLAUSE_CODE (c))
7995 case OMP_CLAUSE_MAP:
7996 tkind = OMP_CLAUSE_MAP_KIND (c);
7997 tkind_zero = tkind;
7998 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
7999 switch (tkind)
8001 case GOMP_MAP_ALLOC:
8002 case GOMP_MAP_TO:
8003 case GOMP_MAP_FROM:
8004 case GOMP_MAP_TOFROM:
8005 case GOMP_MAP_ALWAYS_TO:
8006 case GOMP_MAP_ALWAYS_FROM:
8007 case GOMP_MAP_ALWAYS_TOFROM:
8008 case GOMP_MAP_RELEASE:
8009 case GOMP_MAP_FORCE_TO:
8010 case GOMP_MAP_FORCE_FROM:
8011 case GOMP_MAP_FORCE_TOFROM:
8012 case GOMP_MAP_FORCE_PRESENT:
8013 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
8014 break;
8015 case GOMP_MAP_DELETE:
8016 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
8017 default:
8018 break;
8020 if (tkind_zero != tkind)
8022 if (integer_zerop (s))
8023 tkind = tkind_zero;
8024 else if (integer_nonzerop (s))
8025 tkind_zero = tkind;
8027 break;
8028 case OMP_CLAUSE_FIRSTPRIVATE:
8029 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
8030 tkind = GOMP_MAP_TO;
8031 tkind_zero = tkind;
8032 break;
8033 case OMP_CLAUSE_TO:
8034 tkind = GOMP_MAP_TO;
8035 tkind_zero = tkind;
8036 break;
8037 case OMP_CLAUSE_FROM:
8038 tkind = GOMP_MAP_FROM;
8039 tkind_zero = tkind;
8040 break;
8041 default:
8042 gcc_unreachable ();
8044 gcc_checking_assert (tkind
8045 < (HOST_WIDE_INT_C (1U) << talign_shift));
8046 gcc_checking_assert (tkind_zero
8047 < (HOST_WIDE_INT_C (1U) << talign_shift));
8048 talign = ceil_log2 (talign);
8049 tkind |= talign << talign_shift;
8050 tkind_zero |= talign << talign_shift;
8051 gcc_checking_assert (tkind
8052 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8053 gcc_checking_assert (tkind_zero
8054 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8055 if (tkind == tkind_zero)
8056 x = build_int_cstu (tkind_type, tkind);
8057 else
8059 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8060 x = build3 (COND_EXPR, tkind_type,
8061 fold_build2 (EQ_EXPR, boolean_type_node,
8062 unshare_expr (s), size_zero_node),
8063 build_int_cstu (tkind_type, tkind_zero),
8064 build_int_cstu (tkind_type, tkind));
8066 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8067 if (nc && nc != c)
8068 c = nc;
8069 break;
8071 case OMP_CLAUSE_FIRSTPRIVATE:
8072 if (is_oacc_parallel (ctx))
8073 goto oacc_firstprivate_map;
8074 ovar = OMP_CLAUSE_DECL (c);
8075 if (omp_is_reference (ovar))
8076 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8077 else
8078 talign = DECL_ALIGN_UNIT (ovar);
8079 var = lookup_decl_in_outer_ctx (ovar, ctx);
8080 x = build_sender_ref (ovar, ctx);
8081 tkind = GOMP_MAP_FIRSTPRIVATE;
8082 type = TREE_TYPE (ovar);
8083 if (omp_is_reference (ovar))
8084 type = TREE_TYPE (type);
8085 if ((INTEGRAL_TYPE_P (type)
8086 && TYPE_PRECISION (type) <= POINTER_SIZE)
8087 || TREE_CODE (type) == POINTER_TYPE)
8089 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8090 tree t = var;
8091 if (omp_is_reference (var))
8092 t = build_simple_mem_ref (var);
8093 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8094 TREE_NO_WARNING (var) = 1;
8095 if (TREE_CODE (type) != POINTER_TYPE)
8096 t = fold_convert (pointer_sized_int_node, t);
8097 t = fold_convert (TREE_TYPE (x), t);
8098 gimplify_assign (x, t, &ilist);
8100 else if (omp_is_reference (var))
8101 gimplify_assign (x, var, &ilist);
8102 else if (is_gimple_reg (var))
8104 tree avar = create_tmp_var (TREE_TYPE (var));
8105 mark_addressable (avar);
8106 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8107 TREE_NO_WARNING (var) = 1;
8108 gimplify_assign (avar, var, &ilist);
8109 avar = build_fold_addr_expr (avar);
8110 gimplify_assign (x, avar, &ilist);
8112 else
8114 var = build_fold_addr_expr (var);
8115 gimplify_assign (x, var, &ilist);
8117 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8118 s = size_int (0);
8119 else if (omp_is_reference (ovar))
8120 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8121 else
8122 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8123 s = fold_convert (size_type_node, s);
8124 purpose = size_int (map_idx++);
8125 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8126 if (TREE_CODE (s) != INTEGER_CST)
8127 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8129 gcc_checking_assert (tkind
8130 < (HOST_WIDE_INT_C (1U) << talign_shift));
8131 talign = ceil_log2 (talign);
8132 tkind |= talign << talign_shift;
8133 gcc_checking_assert (tkind
8134 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8135 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8136 build_int_cstu (tkind_type, tkind));
8137 break;
8139 case OMP_CLAUSE_USE_DEVICE_PTR:
8140 case OMP_CLAUSE_IS_DEVICE_PTR:
8141 ovar = OMP_CLAUSE_DECL (c);
8142 var = lookup_decl_in_outer_ctx (ovar, ctx);
8143 x = build_sender_ref (ovar, ctx);
8144 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8145 tkind = GOMP_MAP_USE_DEVICE_PTR;
8146 else
8147 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8148 type = TREE_TYPE (ovar);
8149 if (TREE_CODE (type) == ARRAY_TYPE)
8150 var = build_fold_addr_expr (var);
8151 else
8153 if (omp_is_reference (ovar))
8155 type = TREE_TYPE (type);
8156 if (TREE_CODE (type) != ARRAY_TYPE)
8157 var = build_simple_mem_ref (var);
8158 var = fold_convert (TREE_TYPE (x), var);
8161 gimplify_assign (x, var, &ilist);
8162 s = size_int (0);
8163 purpose = size_int (map_idx++);
8164 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8165 gcc_checking_assert (tkind
8166 < (HOST_WIDE_INT_C (1U) << talign_shift));
8167 gcc_checking_assert (tkind
8168 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8169 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8170 build_int_cstu (tkind_type, tkind));
8171 break;
8174 gcc_assert (map_idx == map_cnt);
8176 DECL_INITIAL (TREE_VEC_ELT (t, 1))
8177 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8178 DECL_INITIAL (TREE_VEC_ELT (t, 2))
8179 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8180 for (int i = 1; i <= 2; i++)
8181 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8183 gimple_seq initlist = NULL;
8184 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8185 TREE_VEC_ELT (t, i)),
8186 &initlist, true, NULL_TREE);
8187 gimple_seq_add_seq (&ilist, initlist);
8189 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8190 NULL);
8191 TREE_THIS_VOLATILE (clobber) = 1;
8192 gimple_seq_add_stmt (&olist,
8193 gimple_build_assign (TREE_VEC_ELT (t, i),
8194 clobber));
8197 tree clobber = build_constructor (ctx->record_type, NULL);
8198 TREE_THIS_VOLATILE (clobber) = 1;
8199 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8200 clobber));
8203 /* Once all the expansions are done, sequence all the different
8204 fragments inside gimple_omp_body. */
8206 new_body = NULL;
8208 if (offloaded
8209 && ctx->record_type)
8211 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8212 /* fixup_child_record_type might have changed receiver_decl's type. */
8213 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8214 gimple_seq_add_stmt (&new_body,
8215 gimple_build_assign (ctx->receiver_decl, t));
8217 gimple_seq_add_seq (&new_body, fplist);
8219 if (offloaded || data_region)
8221 tree prev = NULL_TREE;
8222 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8223 switch (OMP_CLAUSE_CODE (c))
8225 tree var, x;
8226 default:
8227 break;
8228 case OMP_CLAUSE_FIRSTPRIVATE:
8229 if (is_gimple_omp_oacc (ctx->stmt))
8230 break;
8231 var = OMP_CLAUSE_DECL (c);
8232 if (omp_is_reference (var)
8233 || is_gimple_reg_type (TREE_TYPE (var)))
8235 tree new_var = lookup_decl (var, ctx);
8236 tree type;
8237 type = TREE_TYPE (var);
8238 if (omp_is_reference (var))
8239 type = TREE_TYPE (type);
8240 if ((INTEGRAL_TYPE_P (type)
8241 && TYPE_PRECISION (type) <= POINTER_SIZE)
8242 || TREE_CODE (type) == POINTER_TYPE)
8244 x = build_receiver_ref (var, false, ctx);
8245 if (TREE_CODE (type) != POINTER_TYPE)
8246 x = fold_convert (pointer_sized_int_node, x);
8247 x = fold_convert (type, x);
8248 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8249 fb_rvalue);
8250 if (omp_is_reference (var))
8252 tree v = create_tmp_var_raw (type, get_name (var));
8253 gimple_add_tmp_var (v);
8254 TREE_ADDRESSABLE (v) = 1;
8255 gimple_seq_add_stmt (&new_body,
8256 gimple_build_assign (v, x));
8257 x = build_fold_addr_expr (v);
8259 gimple_seq_add_stmt (&new_body,
8260 gimple_build_assign (new_var, x));
8262 else
8264 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8265 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8266 fb_rvalue);
8267 gimple_seq_add_stmt (&new_body,
8268 gimple_build_assign (new_var, x));
8271 else if (is_variable_sized (var))
8273 tree pvar = DECL_VALUE_EXPR (var);
8274 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8275 pvar = TREE_OPERAND (pvar, 0);
8276 gcc_assert (DECL_P (pvar));
8277 tree new_var = lookup_decl (pvar, ctx);
8278 x = build_receiver_ref (var, false, ctx);
8279 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8280 gimple_seq_add_stmt (&new_body,
8281 gimple_build_assign (new_var, x));
8283 break;
8284 case OMP_CLAUSE_PRIVATE:
8285 if (is_gimple_omp_oacc (ctx->stmt))
8286 break;
8287 var = OMP_CLAUSE_DECL (c);
8288 if (omp_is_reference (var))
8290 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8291 tree new_var = lookup_decl (var, ctx);
8292 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8293 if (TREE_CONSTANT (x))
8295 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8296 get_name (var));
8297 gimple_add_tmp_var (x);
8298 TREE_ADDRESSABLE (x) = 1;
8299 x = build_fold_addr_expr_loc (clause_loc, x);
8301 else
8302 break;
8304 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8305 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8306 gimple_seq_add_stmt (&new_body,
8307 gimple_build_assign (new_var, x));
8309 break;
8310 case OMP_CLAUSE_USE_DEVICE_PTR:
8311 case OMP_CLAUSE_IS_DEVICE_PTR:
8312 var = OMP_CLAUSE_DECL (c);
8313 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8314 x = build_sender_ref (var, ctx);
8315 else
8316 x = build_receiver_ref (var, false, ctx);
8317 if (is_variable_sized (var))
8319 tree pvar = DECL_VALUE_EXPR (var);
8320 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8321 pvar = TREE_OPERAND (pvar, 0);
8322 gcc_assert (DECL_P (pvar));
8323 tree new_var = lookup_decl (pvar, ctx);
8324 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8325 gimple_seq_add_stmt (&new_body,
8326 gimple_build_assign (new_var, x));
8328 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8330 tree new_var = lookup_decl (var, ctx);
8331 new_var = DECL_VALUE_EXPR (new_var);
8332 gcc_assert (TREE_CODE (new_var) == MEM_REF);
8333 new_var = TREE_OPERAND (new_var, 0);
8334 gcc_assert (DECL_P (new_var));
8335 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8336 gimple_seq_add_stmt (&new_body,
8337 gimple_build_assign (new_var, x));
8339 else
8341 tree type = TREE_TYPE (var);
8342 tree new_var = lookup_decl (var, ctx);
8343 if (omp_is_reference (var))
8345 type = TREE_TYPE (type);
8346 if (TREE_CODE (type) != ARRAY_TYPE)
8348 tree v = create_tmp_var_raw (type, get_name (var));
8349 gimple_add_tmp_var (v);
8350 TREE_ADDRESSABLE (v) = 1;
8351 x = fold_convert (type, x);
8352 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8353 fb_rvalue);
8354 gimple_seq_add_stmt (&new_body,
8355 gimple_build_assign (v, x));
8356 x = build_fold_addr_expr (v);
8359 new_var = DECL_VALUE_EXPR (new_var);
8360 x = fold_convert (TREE_TYPE (new_var), x);
8361 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8362 gimple_seq_add_stmt (&new_body,
8363 gimple_build_assign (new_var, x));
8365 break;
8367 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8368 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8369 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8370 or references to VLAs. */
8371 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8372 switch (OMP_CLAUSE_CODE (c))
8374 tree var;
8375 default:
8376 break;
8377 case OMP_CLAUSE_MAP:
8378 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8379 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8381 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8382 HOST_WIDE_INT offset = 0;
8383 gcc_assert (prev);
8384 var = OMP_CLAUSE_DECL (c);
8385 if (DECL_P (var)
8386 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8387 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8388 ctx))
8389 && varpool_node::get_create (var)->offloadable)
8390 break;
8391 if (TREE_CODE (var) == INDIRECT_REF
8392 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8393 var = TREE_OPERAND (var, 0);
8394 if (TREE_CODE (var) == COMPONENT_REF)
8396 var = get_addr_base_and_unit_offset (var, &offset);
8397 gcc_assert (var != NULL_TREE && DECL_P (var));
8399 else if (DECL_SIZE (var)
8400 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8402 tree var2 = DECL_VALUE_EXPR (var);
8403 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8404 var2 = TREE_OPERAND (var2, 0);
8405 gcc_assert (DECL_P (var2));
8406 var = var2;
8408 tree new_var = lookup_decl (var, ctx), x;
8409 tree type = TREE_TYPE (new_var);
8410 bool is_ref;
8411 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8412 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8413 == COMPONENT_REF))
8415 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8416 is_ref = true;
8417 new_var = build2 (MEM_REF, type,
8418 build_fold_addr_expr (new_var),
8419 build_int_cst (build_pointer_type (type),
8420 offset));
8422 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8424 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8425 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8426 new_var = build2 (MEM_REF, type,
8427 build_fold_addr_expr (new_var),
8428 build_int_cst (build_pointer_type (type),
8429 offset));
8431 else
8432 is_ref = omp_is_reference (var);
8433 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8434 is_ref = false;
8435 bool ref_to_array = false;
8436 if (is_ref)
8438 type = TREE_TYPE (type);
8439 if (TREE_CODE (type) == ARRAY_TYPE)
8441 type = build_pointer_type (type);
8442 ref_to_array = true;
8445 else if (TREE_CODE (type) == ARRAY_TYPE)
8447 tree decl2 = DECL_VALUE_EXPR (new_var);
8448 gcc_assert (TREE_CODE (decl2) == MEM_REF);
8449 decl2 = TREE_OPERAND (decl2, 0);
8450 gcc_assert (DECL_P (decl2));
8451 new_var = decl2;
8452 type = TREE_TYPE (new_var);
8454 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8455 x = fold_convert_loc (clause_loc, type, x);
8456 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8458 tree bias = OMP_CLAUSE_SIZE (c);
8459 if (DECL_P (bias))
8460 bias = lookup_decl (bias, ctx);
8461 bias = fold_convert_loc (clause_loc, sizetype, bias);
8462 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8463 bias);
8464 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8465 TREE_TYPE (x), x, bias);
8467 if (ref_to_array)
8468 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8469 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8470 if (is_ref && !ref_to_array)
8472 tree t = create_tmp_var_raw (type, get_name (var));
8473 gimple_add_tmp_var (t);
8474 TREE_ADDRESSABLE (t) = 1;
8475 gimple_seq_add_stmt (&new_body,
8476 gimple_build_assign (t, x));
8477 x = build_fold_addr_expr_loc (clause_loc, t);
8479 gimple_seq_add_stmt (&new_body,
8480 gimple_build_assign (new_var, x));
8481 prev = NULL_TREE;
8483 else if (OMP_CLAUSE_CHAIN (c)
8484 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8485 == OMP_CLAUSE_MAP
8486 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8487 == GOMP_MAP_FIRSTPRIVATE_POINTER
8488 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8489 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8490 prev = c;
8491 break;
8492 case OMP_CLAUSE_PRIVATE:
8493 var = OMP_CLAUSE_DECL (c);
8494 if (is_variable_sized (var))
8496 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8497 tree new_var = lookup_decl (var, ctx);
8498 tree pvar = DECL_VALUE_EXPR (var);
8499 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8500 pvar = TREE_OPERAND (pvar, 0);
8501 gcc_assert (DECL_P (pvar));
8502 tree new_pvar = lookup_decl (pvar, ctx);
8503 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8504 tree al = size_int (DECL_ALIGN (var));
8505 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8506 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8507 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8508 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8509 gimple_seq_add_stmt (&new_body,
8510 gimple_build_assign (new_pvar, x));
8512 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8514 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8515 tree new_var = lookup_decl (var, ctx);
8516 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8517 if (TREE_CONSTANT (x))
8518 break;
8519 else
8521 tree atmp
8522 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8523 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8524 tree al = size_int (TYPE_ALIGN (rtype));
8525 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8528 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8529 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8530 gimple_seq_add_stmt (&new_body,
8531 gimple_build_assign (new_var, x));
8533 break;
8536 gimple_seq fork_seq = NULL;
8537 gimple_seq join_seq = NULL;
8539 if (is_oacc_parallel (ctx))
8541 /* If there are reductions on the offloaded region itself, treat
8542 them as a dummy GANG loop. */
8543 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8545 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8546 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8549 gimple_seq_add_seq (&new_body, fork_seq);
8550 gimple_seq_add_seq (&new_body, tgt_body);
8551 gimple_seq_add_seq (&new_body, join_seq);
8553 if (offloaded)
8554 new_body = maybe_catch_exception (new_body);
8556 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8557 gimple_omp_set_body (stmt, new_body);
8560 bind = gimple_build_bind (NULL, NULL,
8561 tgt_bind ? gimple_bind_block (tgt_bind)
8562 : NULL_TREE);
8563 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8564 gimple_bind_add_seq (bind, ilist);
8565 gimple_bind_add_stmt (bind, stmt);
8566 gimple_bind_add_seq (bind, olist);
8568 pop_gimplify_context (NULL);
8570 if (dep_bind)
8572 gimple_bind_add_seq (dep_bind, dep_ilist);
8573 gimple_bind_add_stmt (dep_bind, bind);
8574 gimple_bind_add_seq (dep_bind, dep_olist);
8575 pop_gimplify_context (dep_bind);
8579 /* Expand code for an OpenMP teams directive. */
8581 static void
8582 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8584 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8585 push_gimplify_context ();
8587 tree block = make_node (BLOCK);
8588 gbind *bind = gimple_build_bind (NULL, NULL, block);
8589 gsi_replace (gsi_p, bind, true);
8590 gimple_seq bind_body = NULL;
8591 gimple_seq dlist = NULL;
8592 gimple_seq olist = NULL;
8594 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8595 OMP_CLAUSE_NUM_TEAMS);
8596 if (num_teams == NULL_TREE)
8597 num_teams = build_int_cst (unsigned_type_node, 0);
8598 else
8600 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8601 num_teams = fold_convert (unsigned_type_node, num_teams);
8602 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8604 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8605 OMP_CLAUSE_THREAD_LIMIT);
8606 if (thread_limit == NULL_TREE)
8607 thread_limit = build_int_cst (unsigned_type_node, 0);
8608 else
8610 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8611 thread_limit = fold_convert (unsigned_type_node, thread_limit);
8612 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8613 fb_rvalue);
8616 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8617 &bind_body, &dlist, ctx, NULL);
8618 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8619 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8620 if (!gimple_omp_teams_grid_phony (teams_stmt))
8622 gimple_seq_add_stmt (&bind_body, teams_stmt);
8623 location_t loc = gimple_location (teams_stmt);
8624 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8625 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8626 gimple_set_location (call, loc);
8627 gimple_seq_add_stmt (&bind_body, call);
8630 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8631 gimple_omp_set_body (teams_stmt, NULL);
8632 gimple_seq_add_seq (&bind_body, olist);
8633 gimple_seq_add_seq (&bind_body, dlist);
8634 if (!gimple_omp_teams_grid_phony (teams_stmt))
8635 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8636 gimple_bind_set_body (bind, bind_body);
8638 pop_gimplify_context (bind);
8640 gimple_bind_append_vars (bind, ctx->block_vars);
8641 BLOCK_VARS (block) = ctx->block_vars;
8642 if (BLOCK_VARS (block))
8643 TREE_USED (block) = 1;
8646 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8648 static void
8649 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8651 gimple *stmt = gsi_stmt (*gsi_p);
8652 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8653 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8654 gimple_build_omp_return (false));
8658 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8659 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8660 of OMP context, but with task_shared_vars set. */
8662 static tree
8663 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8664 void *data)
8666 tree t = *tp;
8668 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8669 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8670 return t;
8672 if (task_shared_vars
8673 && DECL_P (t)
8674 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8675 return t;
8677 /* If a global variable has been privatized, TREE_CONSTANT on
8678 ADDR_EXPR might be wrong. */
8679 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8680 recompute_tree_invariant_for_addr_expr (t);
8682 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8683 return NULL_TREE;
8686 /* Data to be communicated between lower_omp_regimplify_operands and
8687 lower_omp_regimplify_operands_p. */
8689 struct lower_omp_regimplify_operands_data
8691 omp_context *ctx;
8692 vec<tree> *decls;
8695 /* Helper function for lower_omp_regimplify_operands. Find
8696 omp_member_access_dummy_var vars and adjust temporarily their
8697 DECL_VALUE_EXPRs if needed. */
8699 static tree
8700 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8701 void *data)
8703 tree t = omp_member_access_dummy_var (*tp);
8704 if (t)
8706 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8707 lower_omp_regimplify_operands_data *ldata
8708 = (lower_omp_regimplify_operands_data *) wi->info;
8709 tree o = maybe_lookup_decl (t, ldata->ctx);
8710 if (o != t)
8712 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8713 ldata->decls->safe_push (*tp);
8714 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8715 SET_DECL_VALUE_EXPR (*tp, v);
8718 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8719 return NULL_TREE;
8722 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8723 of omp_member_access_dummy_var vars during regimplification. */
8725 static void
8726 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8727 gimple_stmt_iterator *gsi_p)
8729 auto_vec<tree, 10> decls;
8730 if (ctx)
8732 struct walk_stmt_info wi;
8733 memset (&wi, '\0', sizeof (wi));
8734 struct lower_omp_regimplify_operands_data data;
8735 data.ctx = ctx;
8736 data.decls = &decls;
8737 wi.info = &data;
8738 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8740 gimple_regimplify_operands (stmt, gsi_p);
8741 while (!decls.is_empty ())
8743 tree t = decls.pop ();
8744 tree v = decls.pop ();
8745 SET_DECL_VALUE_EXPR (t, v);
8749 static void
8750 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8752 gimple *stmt = gsi_stmt (*gsi_p);
8753 struct walk_stmt_info wi;
8754 gcall *call_stmt;
8756 if (gimple_has_location (stmt))
8757 input_location = gimple_location (stmt);
8759 if (task_shared_vars)
8760 memset (&wi, '\0', sizeof (wi));
8762 /* If we have issued syntax errors, avoid doing any heavy lifting.
8763 Just replace the OMP directives with a NOP to avoid
8764 confusing RTL expansion. */
8765 if (seen_error () && is_gimple_omp (stmt))
8767 gsi_replace (gsi_p, gimple_build_nop (), true);
8768 return;
8771 switch (gimple_code (stmt))
8773 case GIMPLE_COND:
8775 gcond *cond_stmt = as_a <gcond *> (stmt);
8776 if ((ctx || task_shared_vars)
8777 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8778 lower_omp_regimplify_p,
8779 ctx ? NULL : &wi, NULL)
8780 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8781 lower_omp_regimplify_p,
8782 ctx ? NULL : &wi, NULL)))
8783 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8785 break;
8786 case GIMPLE_CATCH:
8787 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8788 break;
8789 case GIMPLE_EH_FILTER:
8790 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8791 break;
8792 case GIMPLE_TRY:
8793 lower_omp (gimple_try_eval_ptr (stmt), ctx);
8794 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8795 break;
8796 case GIMPLE_TRANSACTION:
8797 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8798 ctx);
8799 break;
8800 case GIMPLE_BIND:
8801 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8802 break;
8803 case GIMPLE_OMP_PARALLEL:
8804 case GIMPLE_OMP_TASK:
8805 ctx = maybe_lookup_ctx (stmt);
8806 gcc_assert (ctx);
8807 if (ctx->cancellable)
8808 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8809 lower_omp_taskreg (gsi_p, ctx);
8810 break;
8811 case GIMPLE_OMP_FOR:
8812 ctx = maybe_lookup_ctx (stmt);
8813 gcc_assert (ctx);
8814 if (ctx->cancellable)
8815 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8816 lower_omp_for (gsi_p, ctx);
8817 break;
8818 case GIMPLE_OMP_SECTIONS:
8819 ctx = maybe_lookup_ctx (stmt);
8820 gcc_assert (ctx);
8821 if (ctx->cancellable)
8822 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8823 lower_omp_sections (gsi_p, ctx);
8824 break;
8825 case GIMPLE_OMP_SINGLE:
8826 ctx = maybe_lookup_ctx (stmt);
8827 gcc_assert (ctx);
8828 lower_omp_single (gsi_p, ctx);
8829 break;
8830 case GIMPLE_OMP_MASTER:
8831 ctx = maybe_lookup_ctx (stmt);
8832 gcc_assert (ctx);
8833 lower_omp_master (gsi_p, ctx);
8834 break;
8835 case GIMPLE_OMP_TASKGROUP:
8836 ctx = maybe_lookup_ctx (stmt);
8837 gcc_assert (ctx);
8838 lower_omp_taskgroup (gsi_p, ctx);
8839 break;
8840 case GIMPLE_OMP_ORDERED:
8841 ctx = maybe_lookup_ctx (stmt);
8842 gcc_assert (ctx);
8843 lower_omp_ordered (gsi_p, ctx);
8844 break;
8845 case GIMPLE_OMP_CRITICAL:
8846 ctx = maybe_lookup_ctx (stmt);
8847 gcc_assert (ctx);
8848 lower_omp_critical (gsi_p, ctx);
8849 break;
8850 case GIMPLE_OMP_ATOMIC_LOAD:
8851 if ((ctx || task_shared_vars)
8852 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8853 as_a <gomp_atomic_load *> (stmt)),
8854 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8855 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8856 break;
8857 case GIMPLE_OMP_TARGET:
8858 ctx = maybe_lookup_ctx (stmt);
8859 gcc_assert (ctx);
8860 lower_omp_target (gsi_p, ctx);
8861 break;
8862 case GIMPLE_OMP_TEAMS:
8863 ctx = maybe_lookup_ctx (stmt);
8864 gcc_assert (ctx);
8865 lower_omp_teams (gsi_p, ctx);
8866 break;
8867 case GIMPLE_OMP_GRID_BODY:
8868 ctx = maybe_lookup_ctx (stmt);
8869 gcc_assert (ctx);
8870 lower_omp_grid_body (gsi_p, ctx);
8871 break;
8872 case GIMPLE_CALL:
8873 tree fndecl;
8874 call_stmt = as_a <gcall *> (stmt);
8875 fndecl = gimple_call_fndecl (call_stmt);
8876 if (fndecl
8877 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8878 switch (DECL_FUNCTION_CODE (fndecl))
8880 case BUILT_IN_GOMP_BARRIER:
8881 if (ctx == NULL)
8882 break;
8883 /* FALLTHRU */
8884 case BUILT_IN_GOMP_CANCEL:
8885 case BUILT_IN_GOMP_CANCELLATION_POINT:
8886 omp_context *cctx;
8887 cctx = ctx;
8888 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8889 cctx = cctx->outer;
8890 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8891 if (!cctx->cancellable)
8893 if (DECL_FUNCTION_CODE (fndecl)
8894 == BUILT_IN_GOMP_CANCELLATION_POINT)
8896 stmt = gimple_build_nop ();
8897 gsi_replace (gsi_p, stmt, false);
8899 break;
8901 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8903 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8904 gimple_call_set_fndecl (call_stmt, fndecl);
8905 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8907 tree lhs;
8908 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8909 gimple_call_set_lhs (call_stmt, lhs);
8910 tree fallthru_label;
8911 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8912 gimple *g;
8913 g = gimple_build_label (fallthru_label);
8914 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8915 g = gimple_build_cond (NE_EXPR, lhs,
8916 fold_convert (TREE_TYPE (lhs),
8917 boolean_false_node),
8918 cctx->cancel_label, fallthru_label);
8919 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8920 break;
8921 default:
8922 break;
8924 /* FALLTHRU */
8925 default:
8926 if ((ctx || task_shared_vars)
8927 && walk_gimple_op (stmt, lower_omp_regimplify_p,
8928 ctx ? NULL : &wi))
8930 /* Just remove clobbers, this should happen only if we have
8931 "privatized" local addressable variables in SIMD regions,
8932 the clobber isn't needed in that case and gimplifying address
8933 of the ARRAY_REF into a pointer and creating MEM_REF based
8934 clobber would create worse code than we get with the clobber
8935 dropped. */
8936 if (gimple_clobber_p (stmt))
8938 gsi_replace (gsi_p, gimple_build_nop (), true);
8939 break;
8941 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8943 break;
8947 static void
8948 lower_omp (gimple_seq *body, omp_context *ctx)
8950 location_t saved_location = input_location;
8951 gimple_stmt_iterator gsi;
8952 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8953 lower_omp_1 (&gsi, ctx);
8954 /* During gimplification, we haven't folded statments inside offloading
8955 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8956 if (target_nesting_level || taskreg_nesting_level)
8957 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8958 fold_stmt (&gsi);
8959 input_location = saved_location;
8962 /* Main entry point. */
8964 static unsigned int
8965 execute_lower_omp (void)
8967 gimple_seq body;
8968 int i;
8969 omp_context *ctx;
8971 /* This pass always runs, to provide PROP_gimple_lomp.
8972 But often, there is nothing to do. */
8973 if (flag_cilkplus == 0 && flag_openacc == 0 && flag_openmp == 0
8974 && flag_openmp_simd == 0)
8975 return 0;
8977 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
8978 delete_omp_context);
8980 body = gimple_body (current_function_decl);
8982 if (hsa_gen_requested_p ())
8983 omp_grid_gridify_all_targets (&body);
8985 scan_omp (&body, NULL);
8986 gcc_assert (taskreg_nesting_level == 0);
8987 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
8988 finish_taskreg_scan (ctx);
8989 taskreg_contexts.release ();
8991 if (all_contexts->root)
8993 if (task_shared_vars)
8994 push_gimplify_context ();
8995 lower_omp (&body, NULL);
8996 if (task_shared_vars)
8997 pop_gimplify_context (NULL);
9000 if (all_contexts)
9002 splay_tree_delete (all_contexts);
9003 all_contexts = NULL;
9005 BITMAP_FREE (task_shared_vars);
9006 return 0;
9009 namespace {
9011 const pass_data pass_data_lower_omp =
9013 GIMPLE_PASS, /* type */
9014 "omplower", /* name */
9015 OPTGROUP_OMP, /* optinfo_flags */
9016 TV_NONE, /* tv_id */
9017 PROP_gimple_any, /* properties_required */
9018 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
9019 0, /* properties_destroyed */
9020 0, /* todo_flags_start */
9021 0, /* todo_flags_finish */
9024 class pass_lower_omp : public gimple_opt_pass
9026 public:
9027 pass_lower_omp (gcc::context *ctxt)
9028 : gimple_opt_pass (pass_data_lower_omp, ctxt)
9031 /* opt_pass methods: */
9032 virtual unsigned int execute (function *) { return execute_lower_omp (); }
9034 }; // class pass_lower_omp
9036 } // anon namespace
9038 gimple_opt_pass *
9039 make_pass_lower_omp (gcc::context *ctxt)
9041 return new pass_lower_omp (ctxt);
9044 /* The following is a utility to diagnose structured block violations.
9045 It is not part of the "omplower" pass, as that's invoked too late. It
9046 should be invoked by the respective front ends after gimplification. */
9048 static splay_tree all_labels;
9050 /* Check for mismatched contexts and generate an error if needed. Return
9051 true if an error is detected. */
9053 static bool
9054 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9055 gimple *branch_ctx, gimple *label_ctx)
9057 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9058 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9060 if (label_ctx == branch_ctx)
9061 return false;
9063 const char* kind = NULL;
9065 if (flag_cilkplus)
9067 if ((branch_ctx
9068 && gimple_code (branch_ctx) == GIMPLE_OMP_FOR
9069 && gimple_omp_for_kind (branch_ctx) == GF_OMP_FOR_KIND_CILKSIMD)
9070 || (label_ctx
9071 && gimple_code (label_ctx) == GIMPLE_OMP_FOR
9072 && gimple_omp_for_kind (label_ctx) == GF_OMP_FOR_KIND_CILKSIMD))
9073 kind = "Cilk Plus";
9075 if (flag_openacc)
9077 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9078 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9080 gcc_checking_assert (kind == NULL);
9081 kind = "OpenACC";
9084 if (kind == NULL)
9086 gcc_checking_assert (flag_openmp);
9087 kind = "OpenMP";
9090 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9091 so we could traverse it and issue a correct "exit" or "enter" error
9092 message upon a structured block violation.
9094 We built the context by building a list with tree_cons'ing, but there is
9095 no easy counterpart in gimple tuples. It seems like far too much work
9096 for issuing exit/enter error messages. If someone really misses the
9097 distinct error message... patches welcome. */
9099 #if 0
9100 /* Try to avoid confusing the user by producing and error message
9101 with correct "exit" or "enter" verbiage. We prefer "exit"
9102 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9103 if (branch_ctx == NULL)
9104 exit_p = false;
9105 else
9107 while (label_ctx)
9109 if (TREE_VALUE (label_ctx) == branch_ctx)
9111 exit_p = false;
9112 break;
9114 label_ctx = TREE_CHAIN (label_ctx);
9118 if (exit_p)
9119 error ("invalid exit from %s structured block", kind);
9120 else
9121 error ("invalid entry to %s structured block", kind);
9122 #endif
9124 /* If it's obvious we have an invalid entry, be specific about the error. */
9125 if (branch_ctx == NULL)
9126 error ("invalid entry to %s structured block", kind);
9127 else
9129 /* Otherwise, be vague and lazy, but efficient. */
9130 error ("invalid branch to/from %s structured block", kind);
9133 gsi_replace (gsi_p, gimple_build_nop (), false);
9134 return true;
9137 /* Pass 1: Create a minimal tree of structured blocks, and record
9138 where each label is found. */
9140 static tree
9141 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9142 struct walk_stmt_info *wi)
9144 gimple *context = (gimple *) wi->info;
9145 gimple *inner_context;
9146 gimple *stmt = gsi_stmt (*gsi_p);
9148 *handled_ops_p = true;
9150 switch (gimple_code (stmt))
9152 WALK_SUBSTMTS;
9154 case GIMPLE_OMP_PARALLEL:
9155 case GIMPLE_OMP_TASK:
9156 case GIMPLE_OMP_SECTIONS:
9157 case GIMPLE_OMP_SINGLE:
9158 case GIMPLE_OMP_SECTION:
9159 case GIMPLE_OMP_MASTER:
9160 case GIMPLE_OMP_ORDERED:
9161 case GIMPLE_OMP_CRITICAL:
9162 case GIMPLE_OMP_TARGET:
9163 case GIMPLE_OMP_TEAMS:
9164 case GIMPLE_OMP_TASKGROUP:
9165 /* The minimal context here is just the current OMP construct. */
9166 inner_context = stmt;
9167 wi->info = inner_context;
9168 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9169 wi->info = context;
9170 break;
9172 case GIMPLE_OMP_FOR:
9173 inner_context = stmt;
9174 wi->info = inner_context;
9175 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9176 walk them. */
9177 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9178 diagnose_sb_1, NULL, wi);
9179 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9180 wi->info = context;
9181 break;
9183 case GIMPLE_LABEL:
9184 splay_tree_insert (all_labels,
9185 (splay_tree_key) gimple_label_label (
9186 as_a <glabel *> (stmt)),
9187 (splay_tree_value) context);
9188 break;
9190 default:
9191 break;
9194 return NULL_TREE;
9197 /* Pass 2: Check each branch and see if its context differs from that of
9198 the destination label's context. */
9200 static tree
9201 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9202 struct walk_stmt_info *wi)
9204 gimple *context = (gimple *) wi->info;
9205 splay_tree_node n;
9206 gimple *stmt = gsi_stmt (*gsi_p);
9208 *handled_ops_p = true;
9210 switch (gimple_code (stmt))
9212 WALK_SUBSTMTS;
9214 case GIMPLE_OMP_PARALLEL:
9215 case GIMPLE_OMP_TASK:
9216 case GIMPLE_OMP_SECTIONS:
9217 case GIMPLE_OMP_SINGLE:
9218 case GIMPLE_OMP_SECTION:
9219 case GIMPLE_OMP_MASTER:
9220 case GIMPLE_OMP_ORDERED:
9221 case GIMPLE_OMP_CRITICAL:
9222 case GIMPLE_OMP_TARGET:
9223 case GIMPLE_OMP_TEAMS:
9224 case GIMPLE_OMP_TASKGROUP:
9225 wi->info = stmt;
9226 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9227 wi->info = context;
9228 break;
9230 case GIMPLE_OMP_FOR:
9231 wi->info = stmt;
9232 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9233 walk them. */
9234 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9235 diagnose_sb_2, NULL, wi);
9236 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9237 wi->info = context;
9238 break;
9240 case GIMPLE_COND:
9242 gcond *cond_stmt = as_a <gcond *> (stmt);
9243 tree lab = gimple_cond_true_label (cond_stmt);
9244 if (lab)
9246 n = splay_tree_lookup (all_labels,
9247 (splay_tree_key) lab);
9248 diagnose_sb_0 (gsi_p, context,
9249 n ? (gimple *) n->value : NULL);
9251 lab = gimple_cond_false_label (cond_stmt);
9252 if (lab)
9254 n = splay_tree_lookup (all_labels,
9255 (splay_tree_key) lab);
9256 diagnose_sb_0 (gsi_p, context,
9257 n ? (gimple *) n->value : NULL);
9260 break;
9262 case GIMPLE_GOTO:
9264 tree lab = gimple_goto_dest (stmt);
9265 if (TREE_CODE (lab) != LABEL_DECL)
9266 break;
9268 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9269 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9271 break;
9273 case GIMPLE_SWITCH:
9275 gswitch *switch_stmt = as_a <gswitch *> (stmt);
9276 unsigned int i;
9277 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9279 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9280 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9281 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9282 break;
9285 break;
9287 case GIMPLE_RETURN:
9288 diagnose_sb_0 (gsi_p, context, NULL);
9289 break;
9291 default:
9292 break;
9295 return NULL_TREE;
9298 static unsigned int
9299 diagnose_omp_structured_block_errors (void)
9301 struct walk_stmt_info wi;
9302 gimple_seq body = gimple_body (current_function_decl);
9304 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9306 memset (&wi, 0, sizeof (wi));
9307 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9309 memset (&wi, 0, sizeof (wi));
9310 wi.want_locations = true;
9311 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9313 gimple_set_body (current_function_decl, body);
9315 splay_tree_delete (all_labels);
9316 all_labels = NULL;
9318 return 0;
9321 namespace {
9323 const pass_data pass_data_diagnose_omp_blocks =
9325 GIMPLE_PASS, /* type */
9326 "*diagnose_omp_blocks", /* name */
9327 OPTGROUP_OMP, /* optinfo_flags */
9328 TV_NONE, /* tv_id */
9329 PROP_gimple_any, /* properties_required */
9330 0, /* properties_provided */
9331 0, /* properties_destroyed */
9332 0, /* todo_flags_start */
9333 0, /* todo_flags_finish */
9336 class pass_diagnose_omp_blocks : public gimple_opt_pass
9338 public:
9339 pass_diagnose_omp_blocks (gcc::context *ctxt)
9340 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9343 /* opt_pass methods: */
9344 virtual bool gate (function *)
9346 return flag_cilkplus || flag_openacc || flag_openmp;
9348 virtual unsigned int execute (function *)
9350 return diagnose_omp_structured_block_errors ();
9353 }; // class pass_diagnose_omp_blocks
9355 } // anon namespace
9357 gimple_opt_pass *
9358 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9360 return new pass_diagnose_omp_blocks (ctxt);
9364 #include "gt-omp-low.h"