[Ada] Wrong accessibility level under -gnat12
[official-gcc.git] / gcc / omp-low.c
blob7e85d4841b6d60c7c43f604a77bc0f71e41696a9
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2019 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
130 /* Nesting depth of this context. Used to beautify error messages re
131 invalid gotos. The outermost ctx is depth 1, with depth 0 being
132 reserved for the main body of the function. */
133 int depth;
135 /* True if this parallel directive is nested within another. */
136 bool is_nested;
138 /* True if this construct can be cancelled. */
139 bool cancellable;
141 /* True if lower_omp_1 should look up lastprivate conditional in parent
142 context. */
143 bool combined_into_simd_safelen0;
145 /* True if there is nested scan context with inclusive clause. */
146 bool scan_inclusive;
148 /* True if there is nested scan context with exclusive clause. */
149 bool scan_exclusive;
152 static splay_tree all_contexts;
153 static int taskreg_nesting_level;
154 static int target_nesting_level;
155 static bitmap task_shared_vars;
156 static vec<omp_context *> taskreg_contexts;
158 static void scan_omp (gimple_seq *, omp_context *);
159 static tree scan_omp_1_op (tree *, int *, void *);
161 #define WALK_SUBSTMTS \
162 case GIMPLE_BIND: \
163 case GIMPLE_TRY: \
164 case GIMPLE_CATCH: \
165 case GIMPLE_EH_FILTER: \
166 case GIMPLE_TRANSACTION: \
167 /* The sub-statements for these should be walked. */ \
168 *handled_ops_p = false; \
169 break;
171 /* Return true if CTX corresponds to an oacc parallel region. */
173 static bool
174 is_oacc_parallel (omp_context *ctx)
176 enum gimple_code outer_type = gimple_code (ctx->stmt);
177 return ((outer_type == GIMPLE_OMP_TARGET)
178 && (gimple_omp_target_kind (ctx->stmt)
179 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
182 /* Return true if CTX corresponds to an oacc kernels region. */
184 static bool
185 is_oacc_kernels (omp_context *ctx)
187 enum gimple_code outer_type = gimple_code (ctx->stmt);
188 return ((outer_type == GIMPLE_OMP_TARGET)
189 && (gimple_omp_target_kind (ctx->stmt)
190 == GF_OMP_TARGET_KIND_OACC_KERNELS));
193 /* If DECL is the artificial dummy VAR_DECL created for non-static
194 data member privatization, return the underlying "this" parameter,
195 otherwise return NULL. */
197 tree
198 omp_member_access_dummy_var (tree decl)
200 if (!VAR_P (decl)
201 || !DECL_ARTIFICIAL (decl)
202 || !DECL_IGNORED_P (decl)
203 || !DECL_HAS_VALUE_EXPR_P (decl)
204 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
205 return NULL_TREE;
207 tree v = DECL_VALUE_EXPR (decl);
208 if (TREE_CODE (v) != COMPONENT_REF)
209 return NULL_TREE;
211 while (1)
212 switch (TREE_CODE (v))
214 case COMPONENT_REF:
215 case MEM_REF:
216 case INDIRECT_REF:
217 CASE_CONVERT:
218 case POINTER_PLUS_EXPR:
219 v = TREE_OPERAND (v, 0);
220 continue;
221 case PARM_DECL:
222 if (DECL_CONTEXT (v) == current_function_decl
223 && DECL_ARTIFICIAL (v)
224 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
225 return v;
226 return NULL_TREE;
227 default:
228 return NULL_TREE;
232 /* Helper for unshare_and_remap, called through walk_tree. */
234 static tree
235 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
237 tree *pair = (tree *) data;
238 if (*tp == pair[0])
240 *tp = unshare_expr (pair[1]);
241 *walk_subtrees = 0;
243 else if (IS_TYPE_OR_DECL_P (*tp))
244 *walk_subtrees = 0;
245 return NULL_TREE;
248 /* Return unshare_expr (X) with all occurrences of FROM
249 replaced with TO. */
251 static tree
252 unshare_and_remap (tree x, tree from, tree to)
254 tree pair[2] = { from, to };
255 x = unshare_expr (x);
256 walk_tree (&x, unshare_and_remap_1, pair, NULL);
257 return x;
260 /* Convenience function for calling scan_omp_1_op on tree operands. */
262 static inline tree
263 scan_omp_op (tree *tp, omp_context *ctx)
265 struct walk_stmt_info wi;
267 memset (&wi, 0, sizeof (wi));
268 wi.info = ctx;
269 wi.want_locations = true;
271 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
274 static void lower_omp (gimple_seq *, omp_context *);
275 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
276 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
278 /* Return true if CTX is for an omp parallel. */
280 static inline bool
281 is_parallel_ctx (omp_context *ctx)
283 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
287 /* Return true if CTX is for an omp task. */
289 static inline bool
290 is_task_ctx (omp_context *ctx)
292 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
296 /* Return true if CTX is for an omp taskloop. */
298 static inline bool
299 is_taskloop_ctx (omp_context *ctx)
301 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
302 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
306 /* Return true if CTX is for a host omp teams. */
308 static inline bool
309 is_host_teams_ctx (omp_context *ctx)
311 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
312 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
315 /* Return true if CTX is for an omp parallel or omp task or host omp teams
316 (the last one is strictly not a task region in OpenMP speak, but we
317 need to treat it similarly). */
319 static inline bool
320 is_taskreg_ctx (omp_context *ctx)
322 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
325 /* Return true if EXPR is variable sized. */
327 static inline bool
328 is_variable_sized (const_tree expr)
330 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
333 /* Lookup variables. The "maybe" form
334 allows for the variable form to not have been entered, otherwise we
335 assert that the variable must have been entered. */
337 static inline tree
338 lookup_decl (tree var, omp_context *ctx)
340 tree *n = ctx->cb.decl_map->get (var);
341 return *n;
344 static inline tree
345 maybe_lookup_decl (const_tree var, omp_context *ctx)
347 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
348 return n ? *n : NULL_TREE;
351 static inline tree
352 lookup_field (tree var, omp_context *ctx)
354 splay_tree_node n;
355 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
356 return (tree) n->value;
359 static inline tree
360 lookup_sfield (splay_tree_key key, omp_context *ctx)
362 splay_tree_node n;
363 n = splay_tree_lookup (ctx->sfield_map
364 ? ctx->sfield_map : ctx->field_map, key);
365 return (tree) n->value;
368 static inline tree
369 lookup_sfield (tree var, omp_context *ctx)
371 return lookup_sfield ((splay_tree_key) var, ctx);
374 static inline tree
375 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
377 splay_tree_node n;
378 n = splay_tree_lookup (ctx->field_map, key);
379 return n ? (tree) n->value : NULL_TREE;
382 static inline tree
383 maybe_lookup_field (tree var, omp_context *ctx)
385 return maybe_lookup_field ((splay_tree_key) var, ctx);
388 /* Return true if DECL should be copied by pointer. SHARED_CTX is
389 the parallel context if DECL is to be shared. */
391 static bool
392 use_pointer_for_field (tree decl, omp_context *shared_ctx)
394 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
395 || TYPE_ATOMIC (TREE_TYPE (decl)))
396 return true;
398 /* We can only use copy-in/copy-out semantics for shared variables
399 when we know the value is not accessible from an outer scope. */
400 if (shared_ctx)
402 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
404 /* ??? Trivially accessible from anywhere. But why would we even
405 be passing an address in this case? Should we simply assert
406 this to be false, or should we have a cleanup pass that removes
407 these from the list of mappings? */
408 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
409 return true;
411 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
412 without analyzing the expression whether or not its location
413 is accessible to anyone else. In the case of nested parallel
414 regions it certainly may be. */
415 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
416 return true;
418 /* Do not use copy-in/copy-out for variables that have their
419 address taken. */
420 if (TREE_ADDRESSABLE (decl))
421 return true;
423 /* lower_send_shared_vars only uses copy-in, but not copy-out
424 for these. */
425 if (TREE_READONLY (decl)
426 || ((TREE_CODE (decl) == RESULT_DECL
427 || TREE_CODE (decl) == PARM_DECL)
428 && DECL_BY_REFERENCE (decl)))
429 return false;
431 /* Disallow copy-in/out in nested parallel if
432 decl is shared in outer parallel, otherwise
433 each thread could store the shared variable
434 in its own copy-in location, making the
435 variable no longer really shared. */
436 if (shared_ctx->is_nested)
438 omp_context *up;
440 for (up = shared_ctx->outer; up; up = up->outer)
441 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
442 break;
444 if (up)
446 tree c;
448 for (c = gimple_omp_taskreg_clauses (up->stmt);
449 c; c = OMP_CLAUSE_CHAIN (c))
450 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
451 && OMP_CLAUSE_DECL (c) == decl)
452 break;
454 if (c)
455 goto maybe_mark_addressable_and_ret;
459 /* For tasks avoid using copy-in/out. As tasks can be
460 deferred or executed in different thread, when GOMP_task
461 returns, the task hasn't necessarily terminated. */
462 if (is_task_ctx (shared_ctx))
464 tree outer;
465 maybe_mark_addressable_and_ret:
466 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
467 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
469 /* Taking address of OUTER in lower_send_shared_vars
470 might need regimplification of everything that uses the
471 variable. */
472 if (!task_shared_vars)
473 task_shared_vars = BITMAP_ALLOC (NULL);
474 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
475 TREE_ADDRESSABLE (outer) = 1;
477 return true;
481 return false;
484 /* Construct a new automatic decl similar to VAR. */
486 static tree
487 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
489 tree copy = copy_var_decl (var, name, type);
491 DECL_CONTEXT (copy) = current_function_decl;
492 DECL_CHAIN (copy) = ctx->block_vars;
493 /* If VAR is listed in task_shared_vars, it means it wasn't
494 originally addressable and is just because task needs to take
495 it's address. But we don't need to take address of privatizations
496 from that var. */
497 if (TREE_ADDRESSABLE (var)
498 && task_shared_vars
499 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
500 TREE_ADDRESSABLE (copy) = 0;
501 ctx->block_vars = copy;
503 return copy;
506 static tree
507 omp_copy_decl_1 (tree var, omp_context *ctx)
509 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
512 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
513 as appropriate. */
514 static tree
515 omp_build_component_ref (tree obj, tree field)
517 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
518 if (TREE_THIS_VOLATILE (field))
519 TREE_THIS_VOLATILE (ret) |= 1;
520 if (TREE_READONLY (field))
521 TREE_READONLY (ret) |= 1;
522 return ret;
525 /* Build tree nodes to access the field for VAR on the receiver side. */
527 static tree
528 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
530 tree x, field = lookup_field (var, ctx);
532 /* If the receiver record type was remapped in the child function,
533 remap the field into the new record type. */
534 x = maybe_lookup_field (field, ctx);
535 if (x != NULL)
536 field = x;
538 x = build_simple_mem_ref (ctx->receiver_decl);
539 TREE_THIS_NOTRAP (x) = 1;
540 x = omp_build_component_ref (x, field);
541 if (by_ref)
543 x = build_simple_mem_ref (x);
544 TREE_THIS_NOTRAP (x) = 1;
547 return x;
550 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
551 of a parallel, this is a component reference; for workshare constructs
552 this is some variable. */
554 static tree
555 build_outer_var_ref (tree var, omp_context *ctx,
556 enum omp_clause_code code = OMP_CLAUSE_ERROR)
558 tree x;
559 omp_context *outer = ctx->outer;
560 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
561 outer = outer->outer;
563 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
564 x = var;
565 else if (is_variable_sized (var))
567 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
568 x = build_outer_var_ref (x, ctx, code);
569 x = build_simple_mem_ref (x);
571 else if (is_taskreg_ctx (ctx))
573 bool by_ref = use_pointer_for_field (var, NULL);
574 x = build_receiver_ref (var, by_ref, ctx);
576 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
577 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
578 || (code == OMP_CLAUSE_PRIVATE
579 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
580 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
581 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
583 /* #pragma omp simd isn't a worksharing construct, and can reference
584 even private vars in its linear etc. clauses.
585 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
586 to private vars in all worksharing constructs. */
587 x = NULL_TREE;
588 if (outer && is_taskreg_ctx (outer))
589 x = lookup_decl (var, outer);
590 else if (outer)
591 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
592 if (x == NULL_TREE)
593 x = var;
595 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
597 gcc_assert (outer);
598 splay_tree_node n
599 = splay_tree_lookup (outer->field_map,
600 (splay_tree_key) &DECL_UID (var));
601 if (n == NULL)
603 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
604 x = var;
605 else
606 x = lookup_decl (var, outer);
608 else
610 tree field = (tree) n->value;
611 /* If the receiver record type was remapped in the child function,
612 remap the field into the new record type. */
613 x = maybe_lookup_field (field, outer);
614 if (x != NULL)
615 field = x;
617 x = build_simple_mem_ref (outer->receiver_decl);
618 x = omp_build_component_ref (x, field);
619 if (use_pointer_for_field (var, outer))
620 x = build_simple_mem_ref (x);
623 else if (outer)
625 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
627 outer = outer->outer;
628 gcc_assert (outer
629 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
631 x = lookup_decl (var, outer);
633 else if (omp_is_reference (var))
634 /* This can happen with orphaned constructs. If var is reference, it is
635 possible it is shared and as such valid. */
636 x = var;
637 else if (omp_member_access_dummy_var (var))
638 x = var;
639 else
640 gcc_unreachable ();
642 if (x == var)
644 tree t = omp_member_access_dummy_var (var);
645 if (t)
647 x = DECL_VALUE_EXPR (var);
648 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
649 if (o != t)
650 x = unshare_and_remap (x, t, o);
651 else
652 x = unshare_expr (x);
656 if (omp_is_reference (var))
657 x = build_simple_mem_ref (x);
659 return x;
662 /* Build tree nodes to access the field for VAR on the sender side. */
664 static tree
665 build_sender_ref (splay_tree_key key, omp_context *ctx)
667 tree field = lookup_sfield (key, ctx);
668 return omp_build_component_ref (ctx->sender_decl, field);
671 static tree
672 build_sender_ref (tree var, omp_context *ctx)
674 return build_sender_ref ((splay_tree_key) var, ctx);
677 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
678 BASE_POINTERS_RESTRICT, declare the field with restrict. */
680 static void
681 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
683 tree field, type, sfield = NULL_TREE;
684 splay_tree_key key = (splay_tree_key) var;
686 if ((mask & 8) != 0)
688 key = (splay_tree_key) &DECL_UID (var);
689 gcc_checking_assert (key != (splay_tree_key) var);
691 gcc_assert ((mask & 1) == 0
692 || !splay_tree_lookup (ctx->field_map, key));
693 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
694 || !splay_tree_lookup (ctx->sfield_map, key));
695 gcc_assert ((mask & 3) == 3
696 || !is_gimple_omp_oacc (ctx->stmt));
698 type = TREE_TYPE (var);
699 /* Prevent redeclaring the var in the split-off function with a restrict
700 pointer type. Note that we only clear type itself, restrict qualifiers in
701 the pointed-to type will be ignored by points-to analysis. */
702 if (POINTER_TYPE_P (type)
703 && TYPE_RESTRICT (type))
704 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
706 if (mask & 4)
708 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
709 type = build_pointer_type (build_pointer_type (type));
711 else if (by_ref)
712 type = build_pointer_type (type);
713 else if ((mask & 3) == 1 && omp_is_reference (var))
714 type = TREE_TYPE (type);
716 field = build_decl (DECL_SOURCE_LOCATION (var),
717 FIELD_DECL, DECL_NAME (var), type);
719 /* Remember what variable this field was created for. This does have a
720 side effect of making dwarf2out ignore this member, so for helpful
721 debugging we clear it later in delete_omp_context. */
722 DECL_ABSTRACT_ORIGIN (field) = var;
723 if (type == TREE_TYPE (var))
725 SET_DECL_ALIGN (field, DECL_ALIGN (var));
726 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
727 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
729 else
730 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
732 if ((mask & 3) == 3)
734 insert_field_into_struct (ctx->record_type, field);
735 if (ctx->srecord_type)
737 sfield = build_decl (DECL_SOURCE_LOCATION (var),
738 FIELD_DECL, DECL_NAME (var), type);
739 DECL_ABSTRACT_ORIGIN (sfield) = var;
740 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
741 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
742 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
743 insert_field_into_struct (ctx->srecord_type, sfield);
746 else
748 if (ctx->srecord_type == NULL_TREE)
750 tree t;
752 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
753 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
754 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
756 sfield = build_decl (DECL_SOURCE_LOCATION (t),
757 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
758 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
759 insert_field_into_struct (ctx->srecord_type, sfield);
760 splay_tree_insert (ctx->sfield_map,
761 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
762 (splay_tree_value) sfield);
765 sfield = field;
766 insert_field_into_struct ((mask & 1) ? ctx->record_type
767 : ctx->srecord_type, field);
770 if (mask & 1)
771 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
772 if ((mask & 2) && ctx->sfield_map)
773 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
776 static tree
777 install_var_local (tree var, omp_context *ctx)
779 tree new_var = omp_copy_decl_1 (var, ctx);
780 insert_decl_map (&ctx->cb, var, new_var);
781 return new_var;
784 /* Adjust the replacement for DECL in CTX for the new context. This means
785 copying the DECL_VALUE_EXPR, and fixing up the type. */
787 static void
788 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
790 tree new_decl, size;
792 new_decl = lookup_decl (decl, ctx);
794 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
796 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
797 && DECL_HAS_VALUE_EXPR_P (decl))
799 tree ve = DECL_VALUE_EXPR (decl);
800 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
801 SET_DECL_VALUE_EXPR (new_decl, ve);
802 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
805 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
807 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
808 if (size == error_mark_node)
809 size = TYPE_SIZE (TREE_TYPE (new_decl));
810 DECL_SIZE (new_decl) = size;
812 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
813 if (size == error_mark_node)
814 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
815 DECL_SIZE_UNIT (new_decl) = size;
819 /* The callback for remap_decl. Search all containing contexts for a
820 mapping of the variable; this avoids having to duplicate the splay
821 tree ahead of time. We know a mapping doesn't already exist in the
822 given context. Create new mappings to implement default semantics. */
824 static tree
825 omp_copy_decl (tree var, copy_body_data *cb)
827 omp_context *ctx = (omp_context *) cb;
828 tree new_var;
830 if (TREE_CODE (var) == LABEL_DECL)
832 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
833 return var;
834 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
835 DECL_CONTEXT (new_var) = current_function_decl;
836 insert_decl_map (&ctx->cb, var, new_var);
837 return new_var;
840 while (!is_taskreg_ctx (ctx))
842 ctx = ctx->outer;
843 if (ctx == NULL)
844 return var;
845 new_var = maybe_lookup_decl (var, ctx);
846 if (new_var)
847 return new_var;
850 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
851 return var;
853 return error_mark_node;
856 /* Create a new context, with OUTER_CTX being the surrounding context. */
858 static omp_context *
859 new_omp_context (gimple *stmt, omp_context *outer_ctx)
861 omp_context *ctx = XCNEW (omp_context);
863 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
864 (splay_tree_value) ctx);
865 ctx->stmt = stmt;
867 if (outer_ctx)
869 ctx->outer = outer_ctx;
870 ctx->cb = outer_ctx->cb;
871 ctx->cb.block = NULL;
872 ctx->depth = outer_ctx->depth + 1;
874 else
876 ctx->cb.src_fn = current_function_decl;
877 ctx->cb.dst_fn = current_function_decl;
878 ctx->cb.src_node = cgraph_node::get (current_function_decl);
879 gcc_checking_assert (ctx->cb.src_node);
880 ctx->cb.dst_node = ctx->cb.src_node;
881 ctx->cb.src_cfun = cfun;
882 ctx->cb.copy_decl = omp_copy_decl;
883 ctx->cb.eh_lp_nr = 0;
884 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
885 ctx->cb.adjust_array_error_bounds = true;
886 ctx->cb.dont_remap_vla_if_no_change = true;
887 ctx->depth = 1;
890 ctx->cb.decl_map = new hash_map<tree, tree>;
892 return ctx;
895 static gimple_seq maybe_catch_exception (gimple_seq);
897 /* Finalize task copyfn. */
899 static void
900 finalize_task_copyfn (gomp_task *task_stmt)
902 struct function *child_cfun;
903 tree child_fn;
904 gimple_seq seq = NULL, new_seq;
905 gbind *bind;
907 child_fn = gimple_omp_task_copy_fn (task_stmt);
908 if (child_fn == NULL_TREE)
909 return;
911 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
912 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
914 push_cfun (child_cfun);
915 bind = gimplify_body (child_fn, false);
916 gimple_seq_add_stmt (&seq, bind);
917 new_seq = maybe_catch_exception (seq);
918 if (new_seq != seq)
920 bind = gimple_build_bind (NULL, new_seq, NULL);
921 seq = NULL;
922 gimple_seq_add_stmt (&seq, bind);
924 gimple_set_body (child_fn, seq);
925 pop_cfun ();
927 /* Inform the callgraph about the new function. */
928 cgraph_node *node = cgraph_node::get_create (child_fn);
929 node->parallelized_function = 1;
930 cgraph_node::add_new_function (child_fn, false);
933 /* Destroy a omp_context data structures. Called through the splay tree
934 value delete callback. */
936 static void
937 delete_omp_context (splay_tree_value value)
939 omp_context *ctx = (omp_context *) value;
941 delete ctx->cb.decl_map;
943 if (ctx->field_map)
944 splay_tree_delete (ctx->field_map);
945 if (ctx->sfield_map)
946 splay_tree_delete (ctx->sfield_map);
948 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
949 it produces corrupt debug information. */
950 if (ctx->record_type)
952 tree t;
953 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
954 DECL_ABSTRACT_ORIGIN (t) = NULL;
956 if (ctx->srecord_type)
958 tree t;
959 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
960 DECL_ABSTRACT_ORIGIN (t) = NULL;
963 if (is_task_ctx (ctx))
964 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
966 if (ctx->task_reduction_map)
968 ctx->task_reductions.release ();
969 delete ctx->task_reduction_map;
972 delete ctx->lastprivate_conditional_map;
974 XDELETE (ctx);
977 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
978 context. */
980 static void
981 fixup_child_record_type (omp_context *ctx)
983 tree f, type = ctx->record_type;
985 if (!ctx->receiver_decl)
986 return;
987 /* ??? It isn't sufficient to just call remap_type here, because
988 variably_modified_type_p doesn't work the way we expect for
989 record types. Testing each field for whether it needs remapping
990 and creating a new record by hand works, however. */
991 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
992 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
993 break;
994 if (f)
996 tree name, new_fields = NULL;
998 type = lang_hooks.types.make_type (RECORD_TYPE);
999 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1000 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1001 TYPE_DECL, name, type);
1002 TYPE_NAME (type) = name;
1004 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1006 tree new_f = copy_node (f);
1007 DECL_CONTEXT (new_f) = type;
1008 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1009 DECL_CHAIN (new_f) = new_fields;
1010 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1011 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1012 &ctx->cb, NULL);
1013 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1014 &ctx->cb, NULL);
1015 new_fields = new_f;
1017 /* Arrange to be able to look up the receiver field
1018 given the sender field. */
1019 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1020 (splay_tree_value) new_f);
1022 TYPE_FIELDS (type) = nreverse (new_fields);
1023 layout_type (type);
1026 /* In a target region we never modify any of the pointers in *.omp_data_i,
1027 so attempt to help the optimizers. */
1028 if (is_gimple_omp_offloaded (ctx->stmt))
1029 type = build_qualified_type (type, TYPE_QUAL_CONST);
1031 TREE_TYPE (ctx->receiver_decl)
1032 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1035 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1036 specified by CLAUSES. */
1038 static void
1039 scan_sharing_clauses (tree clauses, omp_context *ctx)
1041 tree c, decl;
1042 bool scan_array_reductions = false;
1044 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1046 bool by_ref;
1048 switch (OMP_CLAUSE_CODE (c))
1050 case OMP_CLAUSE_PRIVATE:
1051 decl = OMP_CLAUSE_DECL (c);
1052 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1053 goto do_private;
1054 else if (!is_variable_sized (decl))
1055 install_var_local (decl, ctx);
1056 break;
1058 case OMP_CLAUSE_SHARED:
1059 decl = OMP_CLAUSE_DECL (c);
1060 /* Ignore shared directives in teams construct inside of
1061 target construct. */
1062 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1063 && !is_host_teams_ctx (ctx))
1065 /* Global variables don't need to be copied,
1066 the receiver side will use them directly. */
1067 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1068 if (is_global_var (odecl))
1069 break;
1070 insert_decl_map (&ctx->cb, decl, odecl);
1071 break;
1073 gcc_assert (is_taskreg_ctx (ctx));
1074 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1075 || !is_variable_sized (decl));
1076 /* Global variables don't need to be copied,
1077 the receiver side will use them directly. */
1078 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1079 break;
1080 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1082 use_pointer_for_field (decl, ctx);
1083 break;
1085 by_ref = use_pointer_for_field (decl, NULL);
1086 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1087 || TREE_ADDRESSABLE (decl)
1088 || by_ref
1089 || omp_is_reference (decl))
1091 by_ref = use_pointer_for_field (decl, ctx);
1092 install_var_field (decl, by_ref, 3, ctx);
1093 install_var_local (decl, ctx);
1094 break;
1096 /* We don't need to copy const scalar vars back. */
1097 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1098 goto do_private;
1100 case OMP_CLAUSE_REDUCTION:
1101 case OMP_CLAUSE_IN_REDUCTION:
1102 decl = OMP_CLAUSE_DECL (c);
1103 if (TREE_CODE (decl) == MEM_REF)
1105 tree t = TREE_OPERAND (decl, 0);
1106 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1107 t = TREE_OPERAND (t, 0);
1108 if (TREE_CODE (t) == INDIRECT_REF
1109 || TREE_CODE (t) == ADDR_EXPR)
1110 t = TREE_OPERAND (t, 0);
1111 install_var_local (t, ctx);
1112 if (is_taskreg_ctx (ctx)
1113 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1114 || (is_task_ctx (ctx)
1115 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1116 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1117 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1118 == POINTER_TYPE)))))
1119 && !is_variable_sized (t)
1120 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1121 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1122 && !is_task_ctx (ctx))))
1124 by_ref = use_pointer_for_field (t, NULL);
1125 if (is_task_ctx (ctx)
1126 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1127 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1129 install_var_field (t, false, 1, ctx);
1130 install_var_field (t, by_ref, 2, ctx);
1132 else
1133 install_var_field (t, by_ref, 3, ctx);
1135 break;
1137 if (is_task_ctx (ctx)
1138 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1139 && OMP_CLAUSE_REDUCTION_TASK (c)
1140 && is_parallel_ctx (ctx)))
1142 /* Global variables don't need to be copied,
1143 the receiver side will use them directly. */
1144 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1146 by_ref = use_pointer_for_field (decl, ctx);
1147 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1148 install_var_field (decl, by_ref, 3, ctx);
1150 install_var_local (decl, ctx);
1151 break;
1153 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1154 && OMP_CLAUSE_REDUCTION_TASK (c))
1156 install_var_local (decl, ctx);
1157 break;
1159 goto do_private;
1161 case OMP_CLAUSE_LASTPRIVATE:
1162 /* Let the corresponding firstprivate clause create
1163 the variable. */
1164 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1165 break;
1166 /* FALLTHRU */
1168 case OMP_CLAUSE_FIRSTPRIVATE:
1169 case OMP_CLAUSE_LINEAR:
1170 decl = OMP_CLAUSE_DECL (c);
1171 do_private:
1172 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1173 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1174 && is_gimple_omp_offloaded (ctx->stmt))
1176 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1177 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1178 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1179 install_var_field (decl, true, 3, ctx);
1180 else
1181 install_var_field (decl, false, 3, ctx);
1183 if (is_variable_sized (decl))
1185 if (is_task_ctx (ctx))
1186 install_var_field (decl, false, 1, ctx);
1187 break;
1189 else if (is_taskreg_ctx (ctx))
1191 bool global
1192 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1193 by_ref = use_pointer_for_field (decl, NULL);
1195 if (is_task_ctx (ctx)
1196 && (global || by_ref || omp_is_reference (decl)))
1198 install_var_field (decl, false, 1, ctx);
1199 if (!global)
1200 install_var_field (decl, by_ref, 2, ctx);
1202 else if (!global)
1203 install_var_field (decl, by_ref, 3, ctx);
1205 install_var_local (decl, ctx);
1206 break;
1208 case OMP_CLAUSE_USE_DEVICE_PTR:
1209 decl = OMP_CLAUSE_DECL (c);
1210 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1211 install_var_field (decl, true, 3, ctx);
1212 else
1213 install_var_field (decl, false, 3, ctx);
1214 if (DECL_SIZE (decl)
1215 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1217 tree decl2 = DECL_VALUE_EXPR (decl);
1218 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1219 decl2 = TREE_OPERAND (decl2, 0);
1220 gcc_assert (DECL_P (decl2));
1221 install_var_local (decl2, ctx);
1223 install_var_local (decl, ctx);
1224 break;
1226 case OMP_CLAUSE_IS_DEVICE_PTR:
1227 decl = OMP_CLAUSE_DECL (c);
1228 goto do_private;
1230 case OMP_CLAUSE__LOOPTEMP_:
1231 case OMP_CLAUSE__REDUCTEMP_:
1232 gcc_assert (is_taskreg_ctx (ctx));
1233 decl = OMP_CLAUSE_DECL (c);
1234 install_var_field (decl, false, 3, ctx);
1235 install_var_local (decl, ctx);
1236 break;
1238 case OMP_CLAUSE_COPYPRIVATE:
1239 case OMP_CLAUSE_COPYIN:
1240 decl = OMP_CLAUSE_DECL (c);
1241 by_ref = use_pointer_for_field (decl, NULL);
1242 install_var_field (decl, by_ref, 3, ctx);
1243 break;
1245 case OMP_CLAUSE_FINAL:
1246 case OMP_CLAUSE_IF:
1247 case OMP_CLAUSE_NUM_THREADS:
1248 case OMP_CLAUSE_NUM_TEAMS:
1249 case OMP_CLAUSE_THREAD_LIMIT:
1250 case OMP_CLAUSE_DEVICE:
1251 case OMP_CLAUSE_SCHEDULE:
1252 case OMP_CLAUSE_DIST_SCHEDULE:
1253 case OMP_CLAUSE_DEPEND:
1254 case OMP_CLAUSE_PRIORITY:
1255 case OMP_CLAUSE_GRAINSIZE:
1256 case OMP_CLAUSE_NUM_TASKS:
1257 case OMP_CLAUSE_NUM_GANGS:
1258 case OMP_CLAUSE_NUM_WORKERS:
1259 case OMP_CLAUSE_VECTOR_LENGTH:
1260 if (ctx->outer)
1261 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1262 break;
1264 case OMP_CLAUSE_TO:
1265 case OMP_CLAUSE_FROM:
1266 case OMP_CLAUSE_MAP:
1267 if (ctx->outer)
1268 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1269 decl = OMP_CLAUSE_DECL (c);
1270 /* Global variables with "omp declare target" attribute
1271 don't need to be copied, the receiver side will use them
1272 directly. However, global variables with "omp declare target link"
1273 attribute need to be copied. Or when ALWAYS modifier is used. */
1274 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1275 && DECL_P (decl)
1276 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1277 && (OMP_CLAUSE_MAP_KIND (c)
1278 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1279 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1280 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1281 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1282 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1283 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1284 && varpool_node::get_create (decl)->offloadable
1285 && !lookup_attribute ("omp declare target link",
1286 DECL_ATTRIBUTES (decl)))
1287 break;
1288 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1289 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1291 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1292 not offloaded; there is nothing to map for those. */
1293 if (!is_gimple_omp_offloaded (ctx->stmt)
1294 && !POINTER_TYPE_P (TREE_TYPE (decl))
1295 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1296 break;
1298 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1299 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1300 || (OMP_CLAUSE_MAP_KIND (c)
1301 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1303 if (TREE_CODE (decl) == COMPONENT_REF
1304 || (TREE_CODE (decl) == INDIRECT_REF
1305 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1306 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1307 == REFERENCE_TYPE)))
1308 break;
1309 if (DECL_SIZE (decl)
1310 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1312 tree decl2 = DECL_VALUE_EXPR (decl);
1313 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1314 decl2 = TREE_OPERAND (decl2, 0);
1315 gcc_assert (DECL_P (decl2));
1316 install_var_local (decl2, ctx);
1318 install_var_local (decl, ctx);
1319 break;
1321 if (DECL_P (decl))
1323 if (DECL_SIZE (decl)
1324 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1326 tree decl2 = DECL_VALUE_EXPR (decl);
1327 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1328 decl2 = TREE_OPERAND (decl2, 0);
1329 gcc_assert (DECL_P (decl2));
1330 install_var_field (decl2, true, 3, ctx);
1331 install_var_local (decl2, ctx);
1332 install_var_local (decl, ctx);
1334 else
1336 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1337 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1338 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1339 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1340 install_var_field (decl, true, 7, ctx);
1341 else
1342 install_var_field (decl, true, 3, ctx);
1343 if (is_gimple_omp_offloaded (ctx->stmt)
1344 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1345 install_var_local (decl, ctx);
1348 else
1350 tree base = get_base_address (decl);
1351 tree nc = OMP_CLAUSE_CHAIN (c);
1352 if (DECL_P (base)
1353 && nc != NULL_TREE
1354 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1355 && OMP_CLAUSE_DECL (nc) == base
1356 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1357 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1359 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1360 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1362 else
1364 if (ctx->outer)
1366 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1367 decl = OMP_CLAUSE_DECL (c);
1369 gcc_assert (!splay_tree_lookup (ctx->field_map,
1370 (splay_tree_key) decl));
1371 tree field
1372 = build_decl (OMP_CLAUSE_LOCATION (c),
1373 FIELD_DECL, NULL_TREE, ptr_type_node);
1374 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1375 insert_field_into_struct (ctx->record_type, field);
1376 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1377 (splay_tree_value) field);
1380 break;
1382 case OMP_CLAUSE__GRIDDIM_:
1383 if (ctx->outer)
1385 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1386 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1388 break;
1390 case OMP_CLAUSE_NOWAIT:
1391 case OMP_CLAUSE_ORDERED:
1392 case OMP_CLAUSE_COLLAPSE:
1393 case OMP_CLAUSE_UNTIED:
1394 case OMP_CLAUSE_MERGEABLE:
1395 case OMP_CLAUSE_PROC_BIND:
1396 case OMP_CLAUSE_SAFELEN:
1397 case OMP_CLAUSE_SIMDLEN:
1398 case OMP_CLAUSE_THREADS:
1399 case OMP_CLAUSE_SIMD:
1400 case OMP_CLAUSE_NOGROUP:
1401 case OMP_CLAUSE_DEFAULTMAP:
1402 case OMP_CLAUSE_ASYNC:
1403 case OMP_CLAUSE_WAIT:
1404 case OMP_CLAUSE_GANG:
1405 case OMP_CLAUSE_WORKER:
1406 case OMP_CLAUSE_VECTOR:
1407 case OMP_CLAUSE_INDEPENDENT:
1408 case OMP_CLAUSE_AUTO:
1409 case OMP_CLAUSE_SEQ:
1410 case OMP_CLAUSE_TILE:
1411 case OMP_CLAUSE__SIMT_:
1412 case OMP_CLAUSE_DEFAULT:
1413 case OMP_CLAUSE_NONTEMPORAL:
1414 case OMP_CLAUSE_IF_PRESENT:
1415 case OMP_CLAUSE_FINALIZE:
1416 case OMP_CLAUSE_TASK_REDUCTION:
1417 break;
1419 case OMP_CLAUSE_ALIGNED:
1420 decl = OMP_CLAUSE_DECL (c);
1421 if (is_global_var (decl)
1422 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1423 install_var_local (decl, ctx);
1424 break;
1426 case OMP_CLAUSE__CONDTEMP_:
1427 decl = OMP_CLAUSE_DECL (c);
1428 if (is_parallel_ctx (ctx))
1430 install_var_field (decl, false, 3, ctx);
1431 install_var_local (decl, ctx);
1433 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1434 && (gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
1435 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1436 install_var_local (decl, ctx);
1437 break;
1439 case OMP_CLAUSE__CACHE_:
1440 default:
1441 gcc_unreachable ();
1445 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1447 switch (OMP_CLAUSE_CODE (c))
1449 case OMP_CLAUSE_LASTPRIVATE:
1450 /* Let the corresponding firstprivate clause create
1451 the variable. */
1452 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1453 scan_array_reductions = true;
1454 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1455 break;
1456 /* FALLTHRU */
1458 case OMP_CLAUSE_FIRSTPRIVATE:
1459 case OMP_CLAUSE_PRIVATE:
1460 case OMP_CLAUSE_LINEAR:
1461 case OMP_CLAUSE_IS_DEVICE_PTR:
1462 decl = OMP_CLAUSE_DECL (c);
1463 if (is_variable_sized (decl))
1465 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1466 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1467 && is_gimple_omp_offloaded (ctx->stmt))
1469 tree decl2 = DECL_VALUE_EXPR (decl);
1470 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1471 decl2 = TREE_OPERAND (decl2, 0);
1472 gcc_assert (DECL_P (decl2));
1473 install_var_local (decl2, ctx);
1474 fixup_remapped_decl (decl2, ctx, false);
1476 install_var_local (decl, ctx);
1478 fixup_remapped_decl (decl, ctx,
1479 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1480 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1481 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1482 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1483 scan_array_reductions = true;
1484 break;
1486 case OMP_CLAUSE_REDUCTION:
1487 case OMP_CLAUSE_IN_REDUCTION:
1488 decl = OMP_CLAUSE_DECL (c);
1489 if (TREE_CODE (decl) != MEM_REF)
1491 if (is_variable_sized (decl))
1492 install_var_local (decl, ctx);
1493 fixup_remapped_decl (decl, ctx, false);
1495 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1496 scan_array_reductions = true;
1497 break;
1499 case OMP_CLAUSE_TASK_REDUCTION:
1500 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1501 scan_array_reductions = true;
1502 break;
1504 case OMP_CLAUSE_SHARED:
1505 /* Ignore shared directives in teams construct inside of
1506 target construct. */
1507 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1508 && !is_host_teams_ctx (ctx))
1509 break;
1510 decl = OMP_CLAUSE_DECL (c);
1511 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1512 break;
1513 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1515 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1516 ctx->outer)))
1517 break;
1518 bool by_ref = use_pointer_for_field (decl, ctx);
1519 install_var_field (decl, by_ref, 11, ctx);
1520 break;
1522 fixup_remapped_decl (decl, ctx, false);
1523 break;
1525 case OMP_CLAUSE_MAP:
1526 if (!is_gimple_omp_offloaded (ctx->stmt))
1527 break;
1528 decl = OMP_CLAUSE_DECL (c);
1529 if (DECL_P (decl)
1530 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1531 && (OMP_CLAUSE_MAP_KIND (c)
1532 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1533 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1534 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1535 && varpool_node::get_create (decl)->offloadable)
1536 break;
1537 if (DECL_P (decl))
1539 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1540 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1541 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1542 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1544 tree new_decl = lookup_decl (decl, ctx);
1545 TREE_TYPE (new_decl)
1546 = remap_type (TREE_TYPE (decl), &ctx->cb);
1548 else if (DECL_SIZE (decl)
1549 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1551 tree decl2 = DECL_VALUE_EXPR (decl);
1552 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1553 decl2 = TREE_OPERAND (decl2, 0);
1554 gcc_assert (DECL_P (decl2));
1555 fixup_remapped_decl (decl2, ctx, false);
1556 fixup_remapped_decl (decl, ctx, true);
1558 else
1559 fixup_remapped_decl (decl, ctx, false);
1561 break;
1563 case OMP_CLAUSE_COPYPRIVATE:
1564 case OMP_CLAUSE_COPYIN:
1565 case OMP_CLAUSE_DEFAULT:
1566 case OMP_CLAUSE_IF:
1567 case OMP_CLAUSE_NUM_THREADS:
1568 case OMP_CLAUSE_NUM_TEAMS:
1569 case OMP_CLAUSE_THREAD_LIMIT:
1570 case OMP_CLAUSE_DEVICE:
1571 case OMP_CLAUSE_SCHEDULE:
1572 case OMP_CLAUSE_DIST_SCHEDULE:
1573 case OMP_CLAUSE_NOWAIT:
1574 case OMP_CLAUSE_ORDERED:
1575 case OMP_CLAUSE_COLLAPSE:
1576 case OMP_CLAUSE_UNTIED:
1577 case OMP_CLAUSE_FINAL:
1578 case OMP_CLAUSE_MERGEABLE:
1579 case OMP_CLAUSE_PROC_BIND:
1580 case OMP_CLAUSE_SAFELEN:
1581 case OMP_CLAUSE_SIMDLEN:
1582 case OMP_CLAUSE_ALIGNED:
1583 case OMP_CLAUSE_DEPEND:
1584 case OMP_CLAUSE__LOOPTEMP_:
1585 case OMP_CLAUSE__REDUCTEMP_:
1586 case OMP_CLAUSE_TO:
1587 case OMP_CLAUSE_FROM:
1588 case OMP_CLAUSE_PRIORITY:
1589 case OMP_CLAUSE_GRAINSIZE:
1590 case OMP_CLAUSE_NUM_TASKS:
1591 case OMP_CLAUSE_THREADS:
1592 case OMP_CLAUSE_SIMD:
1593 case OMP_CLAUSE_NOGROUP:
1594 case OMP_CLAUSE_DEFAULTMAP:
1595 case OMP_CLAUSE_USE_DEVICE_PTR:
1596 case OMP_CLAUSE_NONTEMPORAL:
1597 case OMP_CLAUSE_ASYNC:
1598 case OMP_CLAUSE_WAIT:
1599 case OMP_CLAUSE_NUM_GANGS:
1600 case OMP_CLAUSE_NUM_WORKERS:
1601 case OMP_CLAUSE_VECTOR_LENGTH:
1602 case OMP_CLAUSE_GANG:
1603 case OMP_CLAUSE_WORKER:
1604 case OMP_CLAUSE_VECTOR:
1605 case OMP_CLAUSE_INDEPENDENT:
1606 case OMP_CLAUSE_AUTO:
1607 case OMP_CLAUSE_SEQ:
1608 case OMP_CLAUSE_TILE:
1609 case OMP_CLAUSE__GRIDDIM_:
1610 case OMP_CLAUSE__SIMT_:
1611 case OMP_CLAUSE_IF_PRESENT:
1612 case OMP_CLAUSE_FINALIZE:
1613 case OMP_CLAUSE__CONDTEMP_:
1614 break;
1616 case OMP_CLAUSE__CACHE_:
1617 default:
1618 gcc_unreachable ();
1622 gcc_checking_assert (!scan_array_reductions
1623 || !is_gimple_omp_oacc (ctx->stmt));
1624 if (scan_array_reductions)
1626 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1627 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1628 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1629 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1630 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1632 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1633 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1635 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1636 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1637 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1638 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1639 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1640 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1644 /* Create a new name for omp child function. Returns an identifier. */
1646 static tree
1647 create_omp_child_function_name (bool task_copy)
1649 return clone_function_name_numbered (current_function_decl,
1650 task_copy ? "_omp_cpyfn" : "_omp_fn");
1653 /* Return true if CTX may belong to offloaded code: either if current function
1654 is offloaded, or any enclosing context corresponds to a target region. */
1656 static bool
1657 omp_maybe_offloaded_ctx (omp_context *ctx)
1659 if (cgraph_node::get (current_function_decl)->offloadable)
1660 return true;
1661 for (; ctx; ctx = ctx->outer)
1662 if (is_gimple_omp_offloaded (ctx->stmt))
1663 return true;
1664 return false;
1667 /* Build a decl for the omp child function. It'll not contain a body
1668 yet, just the bare decl. */
1670 static void
1671 create_omp_child_function (omp_context *ctx, bool task_copy)
1673 tree decl, type, name, t;
1675 name = create_omp_child_function_name (task_copy);
1676 if (task_copy)
1677 type = build_function_type_list (void_type_node, ptr_type_node,
1678 ptr_type_node, NULL_TREE);
1679 else
1680 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1682 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1684 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1685 || !task_copy);
1686 if (!task_copy)
1687 ctx->cb.dst_fn = decl;
1688 else
1689 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1691 TREE_STATIC (decl) = 1;
1692 TREE_USED (decl) = 1;
1693 DECL_ARTIFICIAL (decl) = 1;
1694 DECL_IGNORED_P (decl) = 0;
1695 TREE_PUBLIC (decl) = 0;
1696 DECL_UNINLINABLE (decl) = 1;
1697 DECL_EXTERNAL (decl) = 0;
1698 DECL_CONTEXT (decl) = NULL_TREE;
1699 DECL_INITIAL (decl) = make_node (BLOCK);
1700 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1701 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1702 /* Remove omp declare simd attribute from the new attributes. */
1703 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1705 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1706 a = a2;
1707 a = TREE_CHAIN (a);
1708 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1709 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1710 *p = TREE_CHAIN (*p);
1711 else
1713 tree chain = TREE_CHAIN (*p);
1714 *p = copy_node (*p);
1715 p = &TREE_CHAIN (*p);
1716 *p = chain;
1719 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1720 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1721 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1722 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1723 DECL_FUNCTION_VERSIONED (decl)
1724 = DECL_FUNCTION_VERSIONED (current_function_decl);
1726 if (omp_maybe_offloaded_ctx (ctx))
1728 cgraph_node::get_create (decl)->offloadable = 1;
1729 if (ENABLE_OFFLOADING)
1730 g->have_offload = true;
1733 if (cgraph_node::get_create (decl)->offloadable
1734 && !lookup_attribute ("omp declare target",
1735 DECL_ATTRIBUTES (current_function_decl)))
1737 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1738 ? "omp target entrypoint"
1739 : "omp declare target");
1740 DECL_ATTRIBUTES (decl)
1741 = tree_cons (get_identifier (target_attr),
1742 NULL_TREE, DECL_ATTRIBUTES (decl));
1745 t = build_decl (DECL_SOURCE_LOCATION (decl),
1746 RESULT_DECL, NULL_TREE, void_type_node);
1747 DECL_ARTIFICIAL (t) = 1;
1748 DECL_IGNORED_P (t) = 1;
1749 DECL_CONTEXT (t) = decl;
1750 DECL_RESULT (decl) = t;
1752 tree data_name = get_identifier (".omp_data_i");
1753 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1754 ptr_type_node);
1755 DECL_ARTIFICIAL (t) = 1;
1756 DECL_NAMELESS (t) = 1;
1757 DECL_ARG_TYPE (t) = ptr_type_node;
1758 DECL_CONTEXT (t) = current_function_decl;
1759 TREE_USED (t) = 1;
1760 TREE_READONLY (t) = 1;
1761 DECL_ARGUMENTS (decl) = t;
1762 if (!task_copy)
1763 ctx->receiver_decl = t;
1764 else
1766 t = build_decl (DECL_SOURCE_LOCATION (decl),
1767 PARM_DECL, get_identifier (".omp_data_o"),
1768 ptr_type_node);
1769 DECL_ARTIFICIAL (t) = 1;
1770 DECL_NAMELESS (t) = 1;
1771 DECL_ARG_TYPE (t) = ptr_type_node;
1772 DECL_CONTEXT (t) = current_function_decl;
1773 TREE_USED (t) = 1;
1774 TREE_ADDRESSABLE (t) = 1;
1775 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1776 DECL_ARGUMENTS (decl) = t;
1779 /* Allocate memory for the function structure. The call to
1780 allocate_struct_function clobbers CFUN, so we need to restore
1781 it afterward. */
1782 push_struct_function (decl);
1783 cfun->function_end_locus = gimple_location (ctx->stmt);
1784 init_tree_ssa (cfun);
1785 pop_cfun ();
1788 /* Callback for walk_gimple_seq. Check if combined parallel
1789 contains gimple_omp_for_combined_into_p OMP_FOR. */
1791 tree
1792 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1793 bool *handled_ops_p,
1794 struct walk_stmt_info *wi)
1796 gimple *stmt = gsi_stmt (*gsi_p);
1798 *handled_ops_p = true;
1799 switch (gimple_code (stmt))
1801 WALK_SUBSTMTS;
1803 case GIMPLE_OMP_FOR:
1804 if (gimple_omp_for_combined_into_p (stmt)
1805 && gimple_omp_for_kind (stmt)
1806 == *(const enum gf_mask *) (wi->info))
1808 wi->info = stmt;
1809 return integer_zero_node;
1811 break;
1812 default:
1813 break;
1815 return NULL;
1818 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1820 static void
1821 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1822 omp_context *outer_ctx)
1824 struct walk_stmt_info wi;
1826 memset (&wi, 0, sizeof (wi));
1827 wi.val_only = true;
1828 wi.info = (void *) &msk;
1829 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1830 if (wi.info != (void *) &msk)
1832 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1833 struct omp_for_data fd;
1834 omp_extract_for_data (for_stmt, &fd, NULL);
1835 /* We need two temporaries with fd.loop.v type (istart/iend)
1836 and then (fd.collapse - 1) temporaries with the same
1837 type for count2 ... countN-1 vars if not constant. */
1838 size_t count = 2, i;
1839 tree type = fd.iter_type;
1840 if (fd.collapse > 1
1841 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1843 count += fd.collapse - 1;
1844 /* If there are lastprivate clauses on the inner
1845 GIMPLE_OMP_FOR, add one more temporaries for the total number
1846 of iterations (product of count1 ... countN-1). */
1847 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1848 OMP_CLAUSE_LASTPRIVATE))
1849 count++;
1850 else if (msk == GF_OMP_FOR_KIND_FOR
1851 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1852 OMP_CLAUSE_LASTPRIVATE))
1853 count++;
1855 for (i = 0; i < count; i++)
1857 tree temp = create_tmp_var (type);
1858 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1859 insert_decl_map (&outer_ctx->cb, temp, temp);
1860 OMP_CLAUSE_DECL (c) = temp;
1861 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1862 gimple_omp_taskreg_set_clauses (stmt, c);
1865 if (msk == GF_OMP_FOR_KIND_TASKLOOP
1866 && omp_find_clause (gimple_omp_task_clauses (stmt),
1867 OMP_CLAUSE_REDUCTION))
1869 tree type = build_pointer_type (pointer_sized_int_node);
1870 tree temp = create_tmp_var (type);
1871 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1872 insert_decl_map (&outer_ctx->cb, temp, temp);
1873 OMP_CLAUSE_DECL (c) = temp;
1874 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
1875 gimple_omp_task_set_clauses (stmt, c);
1879 /* Scan an OpenMP parallel directive. */
1881 static void
1882 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1884 omp_context *ctx;
1885 tree name;
1886 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1888 /* Ignore parallel directives with empty bodies, unless there
1889 are copyin clauses. */
1890 if (optimize > 0
1891 && empty_body_p (gimple_omp_body (stmt))
1892 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1893 OMP_CLAUSE_COPYIN) == NULL)
1895 gsi_replace (gsi, gimple_build_nop (), false);
1896 return;
1899 if (gimple_omp_parallel_combined_p (stmt))
1900 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1901 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
1902 OMP_CLAUSE_REDUCTION);
1903 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
1904 if (OMP_CLAUSE_REDUCTION_TASK (c))
1906 tree type = build_pointer_type (pointer_sized_int_node);
1907 tree temp = create_tmp_var (type);
1908 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1909 if (outer_ctx)
1910 insert_decl_map (&outer_ctx->cb, temp, temp);
1911 OMP_CLAUSE_DECL (c) = temp;
1912 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1913 gimple_omp_parallel_set_clauses (stmt, c);
1914 break;
1916 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
1917 break;
1919 ctx = new_omp_context (stmt, outer_ctx);
1920 taskreg_contexts.safe_push (ctx);
1921 if (taskreg_nesting_level > 1)
1922 ctx->is_nested = true;
1923 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1924 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1925 name = create_tmp_var_name (".omp_data_s");
1926 name = build_decl (gimple_location (stmt),
1927 TYPE_DECL, name, ctx->record_type);
1928 DECL_ARTIFICIAL (name) = 1;
1929 DECL_NAMELESS (name) = 1;
1930 TYPE_NAME (ctx->record_type) = name;
1931 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1932 if (!gimple_omp_parallel_grid_phony (stmt))
1934 create_omp_child_function (ctx, false);
1935 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1938 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1939 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1941 if (TYPE_FIELDS (ctx->record_type) == NULL)
1942 ctx->record_type = ctx->receiver_decl = NULL;
1945 /* Scan an OpenMP task directive. */
1947 static void
1948 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1950 omp_context *ctx;
1951 tree name, t;
1952 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1954 /* Ignore task directives with empty bodies, unless they have depend
1955 clause. */
1956 if (optimize > 0
1957 && gimple_omp_body (stmt)
1958 && empty_body_p (gimple_omp_body (stmt))
1959 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1961 gsi_replace (gsi, gimple_build_nop (), false);
1962 return;
1965 if (gimple_omp_task_taskloop_p (stmt))
1966 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1968 ctx = new_omp_context (stmt, outer_ctx);
1970 if (gimple_omp_task_taskwait_p (stmt))
1972 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1973 return;
1976 taskreg_contexts.safe_push (ctx);
1977 if (taskreg_nesting_level > 1)
1978 ctx->is_nested = true;
1979 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1980 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1981 name = create_tmp_var_name (".omp_data_s");
1982 name = build_decl (gimple_location (stmt),
1983 TYPE_DECL, name, ctx->record_type);
1984 DECL_ARTIFICIAL (name) = 1;
1985 DECL_NAMELESS (name) = 1;
1986 TYPE_NAME (ctx->record_type) = name;
1987 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1988 create_omp_child_function (ctx, false);
1989 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1991 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1993 if (ctx->srecord_type)
1995 name = create_tmp_var_name (".omp_data_a");
1996 name = build_decl (gimple_location (stmt),
1997 TYPE_DECL, name, ctx->srecord_type);
1998 DECL_ARTIFICIAL (name) = 1;
1999 DECL_NAMELESS (name) = 1;
2000 TYPE_NAME (ctx->srecord_type) = name;
2001 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2002 create_omp_child_function (ctx, true);
2005 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2007 if (TYPE_FIELDS (ctx->record_type) == NULL)
2009 ctx->record_type = ctx->receiver_decl = NULL;
2010 t = build_int_cst (long_integer_type_node, 0);
2011 gimple_omp_task_set_arg_size (stmt, t);
2012 t = build_int_cst (long_integer_type_node, 1);
2013 gimple_omp_task_set_arg_align (stmt, t);
2017 /* Helper function for finish_taskreg_scan, called through walk_tree.
2018 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2019 tree, replace it in the expression. */
2021 static tree
2022 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2024 if (VAR_P (*tp))
2026 omp_context *ctx = (omp_context *) data;
2027 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2028 if (t != *tp)
2030 if (DECL_HAS_VALUE_EXPR_P (t))
2031 t = unshare_expr (DECL_VALUE_EXPR (t));
2032 *tp = t;
2034 *walk_subtrees = 0;
2036 else if (IS_TYPE_OR_DECL_P (*tp))
2037 *walk_subtrees = 0;
2038 return NULL_TREE;
2041 /* If any decls have been made addressable during scan_omp,
2042 adjust their fields if needed, and layout record types
2043 of parallel/task constructs. */
2045 static void
2046 finish_taskreg_scan (omp_context *ctx)
2048 if (ctx->record_type == NULL_TREE)
2049 return;
2051 /* If any task_shared_vars were needed, verify all
2052 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2053 statements if use_pointer_for_field hasn't changed
2054 because of that. If it did, update field types now. */
2055 if (task_shared_vars)
2057 tree c;
2059 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2060 c; c = OMP_CLAUSE_CHAIN (c))
2061 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2062 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2064 tree decl = OMP_CLAUSE_DECL (c);
2066 /* Global variables don't need to be copied,
2067 the receiver side will use them directly. */
2068 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2069 continue;
2070 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2071 || !use_pointer_for_field (decl, ctx))
2072 continue;
2073 tree field = lookup_field (decl, ctx);
2074 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2075 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2076 continue;
2077 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2078 TREE_THIS_VOLATILE (field) = 0;
2079 DECL_USER_ALIGN (field) = 0;
2080 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2081 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2082 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2083 if (ctx->srecord_type)
2085 tree sfield = lookup_sfield (decl, ctx);
2086 TREE_TYPE (sfield) = TREE_TYPE (field);
2087 TREE_THIS_VOLATILE (sfield) = 0;
2088 DECL_USER_ALIGN (sfield) = 0;
2089 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2090 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2091 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2096 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2098 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2099 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2100 if (c)
2102 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2103 expects to find it at the start of data. */
2104 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2105 tree *p = &TYPE_FIELDS (ctx->record_type);
2106 while (*p)
2107 if (*p == f)
2109 *p = DECL_CHAIN (*p);
2110 break;
2112 else
2113 p = &DECL_CHAIN (*p);
2114 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2115 TYPE_FIELDS (ctx->record_type) = f;
2117 layout_type (ctx->record_type);
2118 fixup_child_record_type (ctx);
2120 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2122 layout_type (ctx->record_type);
2123 fixup_child_record_type (ctx);
2125 else
2127 location_t loc = gimple_location (ctx->stmt);
2128 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2129 /* Move VLA fields to the end. */
2130 p = &TYPE_FIELDS (ctx->record_type);
2131 while (*p)
2132 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2133 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2135 *q = *p;
2136 *p = TREE_CHAIN (*p);
2137 TREE_CHAIN (*q) = NULL_TREE;
2138 q = &TREE_CHAIN (*q);
2140 else
2141 p = &DECL_CHAIN (*p);
2142 *p = vla_fields;
2143 if (gimple_omp_task_taskloop_p (ctx->stmt))
2145 /* Move fields corresponding to first and second _looptemp_
2146 clause first. There are filled by GOMP_taskloop
2147 and thus need to be in specific positions. */
2148 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2149 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2150 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2151 OMP_CLAUSE__LOOPTEMP_);
2152 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2153 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2154 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2155 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2156 p = &TYPE_FIELDS (ctx->record_type);
2157 while (*p)
2158 if (*p == f1 || *p == f2 || *p == f3)
2159 *p = DECL_CHAIN (*p);
2160 else
2161 p = &DECL_CHAIN (*p);
2162 DECL_CHAIN (f1) = f2;
2163 if (c3)
2165 DECL_CHAIN (f2) = f3;
2166 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2168 else
2169 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2170 TYPE_FIELDS (ctx->record_type) = f1;
2171 if (ctx->srecord_type)
2173 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2174 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2175 if (c3)
2176 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2177 p = &TYPE_FIELDS (ctx->srecord_type);
2178 while (*p)
2179 if (*p == f1 || *p == f2 || *p == f3)
2180 *p = DECL_CHAIN (*p);
2181 else
2182 p = &DECL_CHAIN (*p);
2183 DECL_CHAIN (f1) = f2;
2184 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2185 if (c3)
2187 DECL_CHAIN (f2) = f3;
2188 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2190 else
2191 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2192 TYPE_FIELDS (ctx->srecord_type) = f1;
2195 layout_type (ctx->record_type);
2196 fixup_child_record_type (ctx);
2197 if (ctx->srecord_type)
2198 layout_type (ctx->srecord_type);
2199 tree t = fold_convert_loc (loc, long_integer_type_node,
2200 TYPE_SIZE_UNIT (ctx->record_type));
2201 if (TREE_CODE (t) != INTEGER_CST)
2203 t = unshare_expr (t);
2204 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2206 gimple_omp_task_set_arg_size (ctx->stmt, t);
2207 t = build_int_cst (long_integer_type_node,
2208 TYPE_ALIGN_UNIT (ctx->record_type));
2209 gimple_omp_task_set_arg_align (ctx->stmt, t);
2213 /* Find the enclosing offload context. */
2215 static omp_context *
2216 enclosing_target_ctx (omp_context *ctx)
2218 for (; ctx; ctx = ctx->outer)
2219 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2220 break;
2222 return ctx;
2225 /* Return true if ctx is part of an oacc kernels region. */
2227 static bool
2228 ctx_in_oacc_kernels_region (omp_context *ctx)
2230 for (;ctx != NULL; ctx = ctx->outer)
2232 gimple *stmt = ctx->stmt;
2233 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2234 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2235 return true;
2238 return false;
2241 /* Check the parallelism clauses inside a kernels regions.
2242 Until kernels handling moves to use the same loop indirection
2243 scheme as parallel, we need to do this checking early. */
2245 static unsigned
2246 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2248 bool checking = true;
2249 unsigned outer_mask = 0;
2250 unsigned this_mask = 0;
2251 bool has_seq = false, has_auto = false;
2253 if (ctx->outer)
2254 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2255 if (!stmt)
2257 checking = false;
2258 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2259 return outer_mask;
2260 stmt = as_a <gomp_for *> (ctx->stmt);
2263 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2265 switch (OMP_CLAUSE_CODE (c))
2267 case OMP_CLAUSE_GANG:
2268 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2269 break;
2270 case OMP_CLAUSE_WORKER:
2271 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2272 break;
2273 case OMP_CLAUSE_VECTOR:
2274 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2275 break;
2276 case OMP_CLAUSE_SEQ:
2277 has_seq = true;
2278 break;
2279 case OMP_CLAUSE_AUTO:
2280 has_auto = true;
2281 break;
2282 default:
2283 break;
2287 if (checking)
2289 if (has_seq && (this_mask || has_auto))
2290 error_at (gimple_location (stmt), "%<seq%> overrides other"
2291 " OpenACC loop specifiers");
2292 else if (has_auto && this_mask)
2293 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2294 " OpenACC loop specifiers");
2296 if (this_mask & outer_mask)
2297 error_at (gimple_location (stmt), "inner loop uses same"
2298 " OpenACC parallelism as containing loop");
2301 return outer_mask | this_mask;
2304 /* Scan a GIMPLE_OMP_FOR. */
2306 static omp_context *
2307 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2309 omp_context *ctx;
2310 size_t i;
2311 tree clauses = gimple_omp_for_clauses (stmt);
2313 ctx = new_omp_context (stmt, outer_ctx);
2315 if (is_gimple_omp_oacc (stmt))
2317 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2319 if (!tgt || is_oacc_parallel (tgt))
2320 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2322 char const *check = NULL;
2324 switch (OMP_CLAUSE_CODE (c))
2326 case OMP_CLAUSE_GANG:
2327 check = "gang";
2328 break;
2330 case OMP_CLAUSE_WORKER:
2331 check = "worker";
2332 break;
2334 case OMP_CLAUSE_VECTOR:
2335 check = "vector";
2336 break;
2338 default:
2339 break;
2342 if (check && OMP_CLAUSE_OPERAND (c, 0))
2343 error_at (gimple_location (stmt),
2344 "argument not permitted on %qs clause in"
2345 " OpenACC %<parallel%>", check);
2348 if (tgt && is_oacc_kernels (tgt))
2350 /* Strip out reductions, as they are not handled yet. */
2351 tree *prev_ptr = &clauses;
2353 while (tree probe = *prev_ptr)
2355 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2357 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2358 *prev_ptr = *next_ptr;
2359 else
2360 prev_ptr = next_ptr;
2363 gimple_omp_for_set_clauses (stmt, clauses);
2364 check_oacc_kernel_gwv (stmt, ctx);
2368 scan_sharing_clauses (clauses, ctx);
2370 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2371 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2373 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2374 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2375 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2376 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2378 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2379 return ctx;
2382 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2384 static void
2385 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2386 omp_context *outer_ctx)
2388 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2389 gsi_replace (gsi, bind, false);
2390 gimple_seq seq = NULL;
2391 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2392 tree cond = create_tmp_var_raw (integer_type_node);
2393 DECL_CONTEXT (cond) = current_function_decl;
2394 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2395 gimple_bind_set_vars (bind, cond);
2396 gimple_call_set_lhs (g, cond);
2397 gimple_seq_add_stmt (&seq, g);
2398 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2399 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2400 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2401 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2402 gimple_seq_add_stmt (&seq, g);
2403 g = gimple_build_label (lab1);
2404 gimple_seq_add_stmt (&seq, g);
2405 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2406 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2407 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2408 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2409 gimple_omp_for_set_clauses (new_stmt, clause);
2410 gimple_seq_add_stmt (&seq, new_stmt);
2411 g = gimple_build_goto (lab3);
2412 gimple_seq_add_stmt (&seq, g);
2413 g = gimple_build_label (lab2);
2414 gimple_seq_add_stmt (&seq, g);
2415 gimple_seq_add_stmt (&seq, stmt);
2416 g = gimple_build_label (lab3);
2417 gimple_seq_add_stmt (&seq, g);
2418 gimple_bind_set_body (bind, seq);
2419 update_stmt (bind);
2420 scan_omp_for (new_stmt, outer_ctx);
2421 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2424 /* Scan an OpenMP sections directive. */
2426 static void
2427 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2429 omp_context *ctx;
2431 ctx = new_omp_context (stmt, outer_ctx);
2432 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2433 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2436 /* Scan an OpenMP single directive. */
2438 static void
2439 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2441 omp_context *ctx;
2442 tree name;
2444 ctx = new_omp_context (stmt, outer_ctx);
2445 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2446 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2447 name = create_tmp_var_name (".omp_copy_s");
2448 name = build_decl (gimple_location (stmt),
2449 TYPE_DECL, name, ctx->record_type);
2450 TYPE_NAME (ctx->record_type) = name;
2452 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2453 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2455 if (TYPE_FIELDS (ctx->record_type) == NULL)
2456 ctx->record_type = NULL;
2457 else
2458 layout_type (ctx->record_type);
2461 /* Scan a GIMPLE_OMP_TARGET. */
2463 static void
2464 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2466 omp_context *ctx;
2467 tree name;
2468 bool offloaded = is_gimple_omp_offloaded (stmt);
2469 tree clauses = gimple_omp_target_clauses (stmt);
2471 ctx = new_omp_context (stmt, outer_ctx);
2472 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2473 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2474 name = create_tmp_var_name (".omp_data_t");
2475 name = build_decl (gimple_location (stmt),
2476 TYPE_DECL, name, ctx->record_type);
2477 DECL_ARTIFICIAL (name) = 1;
2478 DECL_NAMELESS (name) = 1;
2479 TYPE_NAME (ctx->record_type) = name;
2480 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2482 if (offloaded)
2484 create_omp_child_function (ctx, false);
2485 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2488 scan_sharing_clauses (clauses, ctx);
2489 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2491 if (TYPE_FIELDS (ctx->record_type) == NULL)
2492 ctx->record_type = ctx->receiver_decl = NULL;
2493 else
2495 TYPE_FIELDS (ctx->record_type)
2496 = nreverse (TYPE_FIELDS (ctx->record_type));
2497 if (flag_checking)
2499 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2500 for (tree field = TYPE_FIELDS (ctx->record_type);
2501 field;
2502 field = DECL_CHAIN (field))
2503 gcc_assert (DECL_ALIGN (field) == align);
2505 layout_type (ctx->record_type);
2506 if (offloaded)
2507 fixup_child_record_type (ctx);
2511 /* Scan an OpenMP teams directive. */
2513 static void
2514 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2516 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2518 if (!gimple_omp_teams_host (stmt))
2520 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2521 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2522 return;
2524 taskreg_contexts.safe_push (ctx);
2525 gcc_assert (taskreg_nesting_level == 1);
2526 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2527 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2528 tree name = create_tmp_var_name (".omp_data_s");
2529 name = build_decl (gimple_location (stmt),
2530 TYPE_DECL, name, ctx->record_type);
2531 DECL_ARTIFICIAL (name) = 1;
2532 DECL_NAMELESS (name) = 1;
2533 TYPE_NAME (ctx->record_type) = name;
2534 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2535 create_omp_child_function (ctx, false);
2536 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2538 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2539 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2541 if (TYPE_FIELDS (ctx->record_type) == NULL)
2542 ctx->record_type = ctx->receiver_decl = NULL;
2545 /* Check nesting restrictions. */
2546 static bool
2547 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2549 tree c;
2551 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2552 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2553 the original copy of its contents. */
2554 return true;
2556 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2557 inside an OpenACC CTX. */
2558 if (!(is_gimple_omp (stmt)
2559 && is_gimple_omp_oacc (stmt))
2560 /* Except for atomic codes that we share with OpenMP. */
2561 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2562 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2564 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2566 error_at (gimple_location (stmt),
2567 "non-OpenACC construct inside of OpenACC routine");
2568 return false;
2570 else
2571 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2572 if (is_gimple_omp (octx->stmt)
2573 && is_gimple_omp_oacc (octx->stmt))
2575 error_at (gimple_location (stmt),
2576 "non-OpenACC construct inside of OpenACC region");
2577 return false;
2581 if (ctx != NULL)
2583 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
2584 && ctx->outer
2585 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
2586 ctx = ctx->outer;
2587 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2588 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2590 c = NULL_TREE;
2591 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2593 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2594 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2596 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2597 && (ctx->outer == NULL
2598 || !gimple_omp_for_combined_into_p (ctx->stmt)
2599 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2600 || (gimple_omp_for_kind (ctx->outer->stmt)
2601 != GF_OMP_FOR_KIND_FOR)
2602 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2604 error_at (gimple_location (stmt),
2605 "%<ordered simd threads%> must be closely "
2606 "nested inside of %<for simd%> region");
2607 return false;
2609 return true;
2612 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2613 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
2614 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
2615 return true;
2616 error_at (gimple_location (stmt),
2617 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2618 " or %<#pragma omp atomic%> may not be nested inside"
2619 " %<simd%> region");
2620 return false;
2622 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2624 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2625 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2626 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2627 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2629 error_at (gimple_location (stmt),
2630 "only %<distribute%> or %<parallel%> regions are "
2631 "allowed to be strictly nested inside %<teams%> "
2632 "region");
2633 return false;
2637 switch (gimple_code (stmt))
2639 case GIMPLE_OMP_FOR:
2640 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2641 return true;
2642 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2644 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2646 error_at (gimple_location (stmt),
2647 "%<distribute%> region must be strictly nested "
2648 "inside %<teams%> construct");
2649 return false;
2651 return true;
2653 /* We split taskloop into task and nested taskloop in it. */
2654 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2655 return true;
2656 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2658 bool ok = false;
2660 if (ctx)
2661 switch (gimple_code (ctx->stmt))
2663 case GIMPLE_OMP_FOR:
2664 ok = (gimple_omp_for_kind (ctx->stmt)
2665 == GF_OMP_FOR_KIND_OACC_LOOP);
2666 break;
2668 case GIMPLE_OMP_TARGET:
2669 switch (gimple_omp_target_kind (ctx->stmt))
2671 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2672 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2673 ok = true;
2674 break;
2676 default:
2677 break;
2680 default:
2681 break;
2683 else if (oacc_get_fn_attrib (current_function_decl))
2684 ok = true;
2685 if (!ok)
2687 error_at (gimple_location (stmt),
2688 "OpenACC loop directive must be associated with"
2689 " an OpenACC compute region");
2690 return false;
2693 /* FALLTHRU */
2694 case GIMPLE_CALL:
2695 if (is_gimple_call (stmt)
2696 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2697 == BUILT_IN_GOMP_CANCEL
2698 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2699 == BUILT_IN_GOMP_CANCELLATION_POINT))
2701 const char *bad = NULL;
2702 const char *kind = NULL;
2703 const char *construct
2704 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2705 == BUILT_IN_GOMP_CANCEL)
2706 ? "#pragma omp cancel"
2707 : "#pragma omp cancellation point";
2708 if (ctx == NULL)
2710 error_at (gimple_location (stmt), "orphaned %qs construct",
2711 construct);
2712 return false;
2714 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2715 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2716 : 0)
2718 case 1:
2719 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2720 bad = "#pragma omp parallel";
2721 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2722 == BUILT_IN_GOMP_CANCEL
2723 && !integer_zerop (gimple_call_arg (stmt, 1)))
2724 ctx->cancellable = true;
2725 kind = "parallel";
2726 break;
2727 case 2:
2728 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2729 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2730 bad = "#pragma omp for";
2731 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2732 == BUILT_IN_GOMP_CANCEL
2733 && !integer_zerop (gimple_call_arg (stmt, 1)))
2735 ctx->cancellable = true;
2736 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2737 OMP_CLAUSE_NOWAIT))
2738 warning_at (gimple_location (stmt), 0,
2739 "%<#pragma omp cancel for%> inside "
2740 "%<nowait%> for construct");
2741 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2742 OMP_CLAUSE_ORDERED))
2743 warning_at (gimple_location (stmt), 0,
2744 "%<#pragma omp cancel for%> inside "
2745 "%<ordered%> for construct");
2747 kind = "for";
2748 break;
2749 case 4:
2750 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2751 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2752 bad = "#pragma omp sections";
2753 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2754 == BUILT_IN_GOMP_CANCEL
2755 && !integer_zerop (gimple_call_arg (stmt, 1)))
2757 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2759 ctx->cancellable = true;
2760 if (omp_find_clause (gimple_omp_sections_clauses
2761 (ctx->stmt),
2762 OMP_CLAUSE_NOWAIT))
2763 warning_at (gimple_location (stmt), 0,
2764 "%<#pragma omp cancel sections%> inside "
2765 "%<nowait%> sections construct");
2767 else
2769 gcc_assert (ctx->outer
2770 && gimple_code (ctx->outer->stmt)
2771 == GIMPLE_OMP_SECTIONS);
2772 ctx->outer->cancellable = true;
2773 if (omp_find_clause (gimple_omp_sections_clauses
2774 (ctx->outer->stmt),
2775 OMP_CLAUSE_NOWAIT))
2776 warning_at (gimple_location (stmt), 0,
2777 "%<#pragma omp cancel sections%> inside "
2778 "%<nowait%> sections construct");
2781 kind = "sections";
2782 break;
2783 case 8:
2784 if (!is_task_ctx (ctx)
2785 && (!is_taskloop_ctx (ctx)
2786 || ctx->outer == NULL
2787 || !is_task_ctx (ctx->outer)))
2788 bad = "#pragma omp task";
2789 else
2791 for (omp_context *octx = ctx->outer;
2792 octx; octx = octx->outer)
2794 switch (gimple_code (octx->stmt))
2796 case GIMPLE_OMP_TASKGROUP:
2797 break;
2798 case GIMPLE_OMP_TARGET:
2799 if (gimple_omp_target_kind (octx->stmt)
2800 != GF_OMP_TARGET_KIND_REGION)
2801 continue;
2802 /* FALLTHRU */
2803 case GIMPLE_OMP_PARALLEL:
2804 case GIMPLE_OMP_TEAMS:
2805 error_at (gimple_location (stmt),
2806 "%<%s taskgroup%> construct not closely "
2807 "nested inside of %<taskgroup%> region",
2808 construct);
2809 return false;
2810 case GIMPLE_OMP_TASK:
2811 if (gimple_omp_task_taskloop_p (octx->stmt)
2812 && octx->outer
2813 && is_taskloop_ctx (octx->outer))
2815 tree clauses
2816 = gimple_omp_for_clauses (octx->outer->stmt);
2817 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
2818 break;
2820 continue;
2821 default:
2822 continue;
2824 break;
2826 ctx->cancellable = true;
2828 kind = "taskgroup";
2829 break;
2830 default:
2831 error_at (gimple_location (stmt), "invalid arguments");
2832 return false;
2834 if (bad)
2836 error_at (gimple_location (stmt),
2837 "%<%s %s%> construct not closely nested inside of %qs",
2838 construct, kind, bad);
2839 return false;
2842 /* FALLTHRU */
2843 case GIMPLE_OMP_SECTIONS:
2844 case GIMPLE_OMP_SINGLE:
2845 for (; ctx != NULL; ctx = ctx->outer)
2846 switch (gimple_code (ctx->stmt))
2848 case GIMPLE_OMP_FOR:
2849 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2850 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2851 break;
2852 /* FALLTHRU */
2853 case GIMPLE_OMP_SECTIONS:
2854 case GIMPLE_OMP_SINGLE:
2855 case GIMPLE_OMP_ORDERED:
2856 case GIMPLE_OMP_MASTER:
2857 case GIMPLE_OMP_TASK:
2858 case GIMPLE_OMP_CRITICAL:
2859 if (is_gimple_call (stmt))
2861 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2862 != BUILT_IN_GOMP_BARRIER)
2863 return true;
2864 error_at (gimple_location (stmt),
2865 "barrier region may not be closely nested inside "
2866 "of work-sharing, %<critical%>, %<ordered%>, "
2867 "%<master%>, explicit %<task%> or %<taskloop%> "
2868 "region");
2869 return false;
2871 error_at (gimple_location (stmt),
2872 "work-sharing region may not be closely nested inside "
2873 "of work-sharing, %<critical%>, %<ordered%>, "
2874 "%<master%>, explicit %<task%> or %<taskloop%> region");
2875 return false;
2876 case GIMPLE_OMP_PARALLEL:
2877 case GIMPLE_OMP_TEAMS:
2878 return true;
2879 case GIMPLE_OMP_TARGET:
2880 if (gimple_omp_target_kind (ctx->stmt)
2881 == GF_OMP_TARGET_KIND_REGION)
2882 return true;
2883 break;
2884 default:
2885 break;
2887 break;
2888 case GIMPLE_OMP_MASTER:
2889 for (; ctx != NULL; ctx = ctx->outer)
2890 switch (gimple_code (ctx->stmt))
2892 case GIMPLE_OMP_FOR:
2893 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2894 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2895 break;
2896 /* FALLTHRU */
2897 case GIMPLE_OMP_SECTIONS:
2898 case GIMPLE_OMP_SINGLE:
2899 case GIMPLE_OMP_TASK:
2900 error_at (gimple_location (stmt),
2901 "%<master%> region may not be closely nested inside "
2902 "of work-sharing, explicit %<task%> or %<taskloop%> "
2903 "region");
2904 return false;
2905 case GIMPLE_OMP_PARALLEL:
2906 case GIMPLE_OMP_TEAMS:
2907 return true;
2908 case GIMPLE_OMP_TARGET:
2909 if (gimple_omp_target_kind (ctx->stmt)
2910 == GF_OMP_TARGET_KIND_REGION)
2911 return true;
2912 break;
2913 default:
2914 break;
2916 break;
2917 case GIMPLE_OMP_TASK:
2918 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2919 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2920 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2921 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2923 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2924 error_at (OMP_CLAUSE_LOCATION (c),
2925 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2926 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2927 return false;
2929 break;
2930 case GIMPLE_OMP_ORDERED:
2931 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2932 c; c = OMP_CLAUSE_CHAIN (c))
2934 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2936 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2937 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2938 continue;
2940 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2941 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2942 || kind == OMP_CLAUSE_DEPEND_SINK)
2944 tree oclause;
2945 /* Look for containing ordered(N) loop. */
2946 if (ctx == NULL
2947 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2948 || (oclause
2949 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2950 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2952 error_at (OMP_CLAUSE_LOCATION (c),
2953 "%<ordered%> construct with %<depend%> clause "
2954 "must be closely nested inside an %<ordered%> "
2955 "loop");
2956 return false;
2958 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2960 error_at (OMP_CLAUSE_LOCATION (c),
2961 "%<ordered%> construct with %<depend%> clause "
2962 "must be closely nested inside a loop with "
2963 "%<ordered%> clause with a parameter");
2964 return false;
2967 else
2969 error_at (OMP_CLAUSE_LOCATION (c),
2970 "invalid depend kind in omp %<ordered%> %<depend%>");
2971 return false;
2974 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2975 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2977 /* ordered simd must be closely nested inside of simd region,
2978 and simd region must not encounter constructs other than
2979 ordered simd, therefore ordered simd may be either orphaned,
2980 or ctx->stmt must be simd. The latter case is handled already
2981 earlier. */
2982 if (ctx != NULL)
2984 error_at (gimple_location (stmt),
2985 "%<ordered%> %<simd%> must be closely nested inside "
2986 "%<simd%> region");
2987 return false;
2990 for (; ctx != NULL; ctx = ctx->outer)
2991 switch (gimple_code (ctx->stmt))
2993 case GIMPLE_OMP_CRITICAL:
2994 case GIMPLE_OMP_TASK:
2995 case GIMPLE_OMP_ORDERED:
2996 ordered_in_taskloop:
2997 error_at (gimple_location (stmt),
2998 "%<ordered%> region may not be closely nested inside "
2999 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3000 "%<taskloop%> region");
3001 return false;
3002 case GIMPLE_OMP_FOR:
3003 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3004 goto ordered_in_taskloop;
3005 tree o;
3006 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3007 OMP_CLAUSE_ORDERED);
3008 if (o == NULL)
3010 error_at (gimple_location (stmt),
3011 "%<ordered%> region must be closely nested inside "
3012 "a loop region with an %<ordered%> clause");
3013 return false;
3015 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3016 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3018 error_at (gimple_location (stmt),
3019 "%<ordered%> region without %<depend%> clause may "
3020 "not be closely nested inside a loop region with "
3021 "an %<ordered%> clause with a parameter");
3022 return false;
3024 return true;
3025 case GIMPLE_OMP_TARGET:
3026 if (gimple_omp_target_kind (ctx->stmt)
3027 != GF_OMP_TARGET_KIND_REGION)
3028 break;
3029 /* FALLTHRU */
3030 case GIMPLE_OMP_PARALLEL:
3031 case GIMPLE_OMP_TEAMS:
3032 error_at (gimple_location (stmt),
3033 "%<ordered%> region must be closely nested inside "
3034 "a loop region with an %<ordered%> clause");
3035 return false;
3036 default:
3037 break;
3039 break;
3040 case GIMPLE_OMP_CRITICAL:
3042 tree this_stmt_name
3043 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3044 for (; ctx != NULL; ctx = ctx->outer)
3045 if (gomp_critical *other_crit
3046 = dyn_cast <gomp_critical *> (ctx->stmt))
3047 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3049 error_at (gimple_location (stmt),
3050 "%<critical%> region may not be nested inside "
3051 "a %<critical%> region with the same name");
3052 return false;
3055 break;
3056 case GIMPLE_OMP_TEAMS:
3057 if (ctx == NULL)
3058 break;
3059 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3060 || (gimple_omp_target_kind (ctx->stmt)
3061 != GF_OMP_TARGET_KIND_REGION))
3063 /* Teams construct can appear either strictly nested inside of
3064 target construct with no intervening stmts, or can be encountered
3065 only by initial task (so must not appear inside any OpenMP
3066 construct. */
3067 error_at (gimple_location (stmt),
3068 "%<teams%> construct must be closely nested inside of "
3069 "%<target%> construct or not nested in any OpenMP "
3070 "construct");
3071 return false;
3073 break;
3074 case GIMPLE_OMP_TARGET:
3075 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3076 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3077 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3078 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3080 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3081 error_at (OMP_CLAUSE_LOCATION (c),
3082 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3083 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3084 return false;
3086 if (is_gimple_omp_offloaded (stmt)
3087 && oacc_get_fn_attrib (cfun->decl) != NULL)
3089 error_at (gimple_location (stmt),
3090 "OpenACC region inside of OpenACC routine, nested "
3091 "parallelism not supported yet");
3092 return false;
3094 for (; ctx != NULL; ctx = ctx->outer)
3096 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3098 if (is_gimple_omp (stmt)
3099 && is_gimple_omp_oacc (stmt)
3100 && is_gimple_omp (ctx->stmt))
3102 error_at (gimple_location (stmt),
3103 "OpenACC construct inside of non-OpenACC region");
3104 return false;
3106 continue;
3109 const char *stmt_name, *ctx_stmt_name;
3110 switch (gimple_omp_target_kind (stmt))
3112 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3113 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3114 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3115 case GF_OMP_TARGET_KIND_ENTER_DATA:
3116 stmt_name = "target enter data"; break;
3117 case GF_OMP_TARGET_KIND_EXIT_DATA:
3118 stmt_name = "target exit data"; break;
3119 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3120 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3121 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3122 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3123 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3124 stmt_name = "enter/exit data"; break;
3125 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3126 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3127 break;
3128 default: gcc_unreachable ();
3130 switch (gimple_omp_target_kind (ctx->stmt))
3132 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3133 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3134 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3135 ctx_stmt_name = "parallel"; break;
3136 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3137 ctx_stmt_name = "kernels"; break;
3138 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3139 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3140 ctx_stmt_name = "host_data"; break;
3141 default: gcc_unreachable ();
3144 /* OpenACC/OpenMP mismatch? */
3145 if (is_gimple_omp_oacc (stmt)
3146 != is_gimple_omp_oacc (ctx->stmt))
3148 error_at (gimple_location (stmt),
3149 "%s %qs construct inside of %s %qs region",
3150 (is_gimple_omp_oacc (stmt)
3151 ? "OpenACC" : "OpenMP"), stmt_name,
3152 (is_gimple_omp_oacc (ctx->stmt)
3153 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3154 return false;
3156 if (is_gimple_omp_offloaded (ctx->stmt))
3158 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3159 if (is_gimple_omp_oacc (ctx->stmt))
3161 error_at (gimple_location (stmt),
3162 "%qs construct inside of %qs region",
3163 stmt_name, ctx_stmt_name);
3164 return false;
3166 else
3168 warning_at (gimple_location (stmt), 0,
3169 "%qs construct inside of %qs region",
3170 stmt_name, ctx_stmt_name);
3174 break;
3175 default:
3176 break;
3178 return true;
3182 /* Helper function scan_omp.
3184 Callback for walk_tree or operators in walk_gimple_stmt used to
3185 scan for OMP directives in TP. */
3187 static tree
3188 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3190 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3191 omp_context *ctx = (omp_context *) wi->info;
3192 tree t = *tp;
3194 switch (TREE_CODE (t))
3196 case VAR_DECL:
3197 case PARM_DECL:
3198 case LABEL_DECL:
3199 case RESULT_DECL:
3200 if (ctx)
3202 tree repl = remap_decl (t, &ctx->cb);
3203 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3204 *tp = repl;
3206 break;
3208 default:
3209 if (ctx && TYPE_P (t))
3210 *tp = remap_type (t, &ctx->cb);
3211 else if (!DECL_P (t))
3213 *walk_subtrees = 1;
3214 if (ctx)
3216 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3217 if (tem != TREE_TYPE (t))
3219 if (TREE_CODE (t) == INTEGER_CST)
3220 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3221 else
3222 TREE_TYPE (t) = tem;
3226 break;
3229 return NULL_TREE;
3232 /* Return true if FNDECL is a setjmp or a longjmp. */
3234 static bool
3235 setjmp_or_longjmp_p (const_tree fndecl)
3237 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3238 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3239 return true;
3241 tree declname = DECL_NAME (fndecl);
3242 if (!declname)
3243 return false;
3244 const char *name = IDENTIFIER_POINTER (declname);
3245 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3249 /* Helper function for scan_omp.
3251 Callback for walk_gimple_stmt used to scan for OMP directives in
3252 the current statement in GSI. */
3254 static tree
3255 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3256 struct walk_stmt_info *wi)
3258 gimple *stmt = gsi_stmt (*gsi);
3259 omp_context *ctx = (omp_context *) wi->info;
3261 if (gimple_has_location (stmt))
3262 input_location = gimple_location (stmt);
3264 /* Check the nesting restrictions. */
3265 bool remove = false;
3266 if (is_gimple_omp (stmt))
3267 remove = !check_omp_nesting_restrictions (stmt, ctx);
3268 else if (is_gimple_call (stmt))
3270 tree fndecl = gimple_call_fndecl (stmt);
3271 if (fndecl)
3273 if (setjmp_or_longjmp_p (fndecl)
3274 && ctx
3275 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3276 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3278 remove = true;
3279 error_at (gimple_location (stmt),
3280 "setjmp/longjmp inside simd construct");
3282 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3283 switch (DECL_FUNCTION_CODE (fndecl))
3285 case BUILT_IN_GOMP_BARRIER:
3286 case BUILT_IN_GOMP_CANCEL:
3287 case BUILT_IN_GOMP_CANCELLATION_POINT:
3288 case BUILT_IN_GOMP_TASKYIELD:
3289 case BUILT_IN_GOMP_TASKWAIT:
3290 case BUILT_IN_GOMP_TASKGROUP_START:
3291 case BUILT_IN_GOMP_TASKGROUP_END:
3292 remove = !check_omp_nesting_restrictions (stmt, ctx);
3293 break;
3294 default:
3295 break;
3299 if (remove)
3301 stmt = gimple_build_nop ();
3302 gsi_replace (gsi, stmt, false);
3305 *handled_ops_p = true;
3307 switch (gimple_code (stmt))
3309 case GIMPLE_OMP_PARALLEL:
3310 taskreg_nesting_level++;
3311 scan_omp_parallel (gsi, ctx);
3312 taskreg_nesting_level--;
3313 break;
3315 case GIMPLE_OMP_TASK:
3316 taskreg_nesting_level++;
3317 scan_omp_task (gsi, ctx);
3318 taskreg_nesting_level--;
3319 break;
3321 case GIMPLE_OMP_FOR:
3322 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3323 == GF_OMP_FOR_KIND_SIMD)
3324 && omp_maybe_offloaded_ctx (ctx)
3325 && omp_max_simt_vf ())
3326 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3327 else
3328 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3329 break;
3331 case GIMPLE_OMP_SECTIONS:
3332 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3333 break;
3335 case GIMPLE_OMP_SINGLE:
3336 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3337 break;
3339 case GIMPLE_OMP_SCAN:
3340 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
3342 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
3343 ctx->scan_inclusive = true;
3344 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
3345 ctx->scan_exclusive = true;
3347 /* FALLTHRU */
3348 case GIMPLE_OMP_SECTION:
3349 case GIMPLE_OMP_MASTER:
3350 case GIMPLE_OMP_ORDERED:
3351 case GIMPLE_OMP_CRITICAL:
3352 case GIMPLE_OMP_GRID_BODY:
3353 ctx = new_omp_context (stmt, ctx);
3354 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3355 break;
3357 case GIMPLE_OMP_TASKGROUP:
3358 ctx = new_omp_context (stmt, ctx);
3359 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3360 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3361 break;
3363 case GIMPLE_OMP_TARGET:
3364 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3365 break;
3367 case GIMPLE_OMP_TEAMS:
3368 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3370 taskreg_nesting_level++;
3371 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3372 taskreg_nesting_level--;
3374 else
3375 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3376 break;
3378 case GIMPLE_BIND:
3380 tree var;
3382 *handled_ops_p = false;
3383 if (ctx)
3384 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3385 var ;
3386 var = DECL_CHAIN (var))
3387 insert_decl_map (&ctx->cb, var, var);
3389 break;
3390 default:
3391 *handled_ops_p = false;
3392 break;
3395 return NULL_TREE;
3399 /* Scan all the statements starting at the current statement. CTX
3400 contains context information about the OMP directives and
3401 clauses found during the scan. */
3403 static void
3404 scan_omp (gimple_seq *body_p, omp_context *ctx)
3406 location_t saved_location;
3407 struct walk_stmt_info wi;
3409 memset (&wi, 0, sizeof (wi));
3410 wi.info = ctx;
3411 wi.want_locations = true;
3413 saved_location = input_location;
3414 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3415 input_location = saved_location;
3418 /* Re-gimplification and code generation routines. */
3420 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3421 of BIND if in a method. */
3423 static void
3424 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3426 if (DECL_ARGUMENTS (current_function_decl)
3427 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3428 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3429 == POINTER_TYPE))
3431 tree vars = gimple_bind_vars (bind);
3432 for (tree *pvar = &vars; *pvar; )
3433 if (omp_member_access_dummy_var (*pvar))
3434 *pvar = DECL_CHAIN (*pvar);
3435 else
3436 pvar = &DECL_CHAIN (*pvar);
3437 gimple_bind_set_vars (bind, vars);
3441 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3442 block and its subblocks. */
3444 static void
3445 remove_member_access_dummy_vars (tree block)
3447 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3448 if (omp_member_access_dummy_var (*pvar))
3449 *pvar = DECL_CHAIN (*pvar);
3450 else
3451 pvar = &DECL_CHAIN (*pvar);
3453 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3454 remove_member_access_dummy_vars (block);
3457 /* If a context was created for STMT when it was scanned, return it. */
3459 static omp_context *
3460 maybe_lookup_ctx (gimple *stmt)
3462 splay_tree_node n;
3463 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3464 return n ? (omp_context *) n->value : NULL;
3468 /* Find the mapping for DECL in CTX or the immediately enclosing
3469 context that has a mapping for DECL.
3471 If CTX is a nested parallel directive, we may have to use the decl
3472 mappings created in CTX's parent context. Suppose that we have the
3473 following parallel nesting (variable UIDs showed for clarity):
3475 iD.1562 = 0;
3476 #omp parallel shared(iD.1562) -> outer parallel
3477 iD.1562 = iD.1562 + 1;
3479 #omp parallel shared (iD.1562) -> inner parallel
3480 iD.1562 = iD.1562 - 1;
3482 Each parallel structure will create a distinct .omp_data_s structure
3483 for copying iD.1562 in/out of the directive:
3485 outer parallel .omp_data_s.1.i -> iD.1562
3486 inner parallel .omp_data_s.2.i -> iD.1562
3488 A shared variable mapping will produce a copy-out operation before
3489 the parallel directive and a copy-in operation after it. So, in
3490 this case we would have:
3492 iD.1562 = 0;
3493 .omp_data_o.1.i = iD.1562;
3494 #omp parallel shared(iD.1562) -> outer parallel
3495 .omp_data_i.1 = &.omp_data_o.1
3496 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3498 .omp_data_o.2.i = iD.1562; -> **
3499 #omp parallel shared(iD.1562) -> inner parallel
3500 .omp_data_i.2 = &.omp_data_o.2
3501 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3504 ** This is a problem. The symbol iD.1562 cannot be referenced
3505 inside the body of the outer parallel region. But since we are
3506 emitting this copy operation while expanding the inner parallel
3507 directive, we need to access the CTX structure of the outer
3508 parallel directive to get the correct mapping:
3510 .omp_data_o.2.i = .omp_data_i.1->i
3512 Since there may be other workshare or parallel directives enclosing
3513 the parallel directive, it may be necessary to walk up the context
3514 parent chain. This is not a problem in general because nested
3515 parallelism happens only rarely. */
3517 static tree
3518 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3520 tree t;
3521 omp_context *up;
3523 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3524 t = maybe_lookup_decl (decl, up);
3526 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3528 return t ? t : decl;
3532 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3533 in outer contexts. */
3535 static tree
3536 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3538 tree t = NULL;
3539 omp_context *up;
3541 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3542 t = maybe_lookup_decl (decl, up);
3544 return t ? t : decl;
3548 /* Construct the initialization value for reduction operation OP. */
3550 tree
3551 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3553 switch (op)
3555 case PLUS_EXPR:
3556 case MINUS_EXPR:
3557 case BIT_IOR_EXPR:
3558 case BIT_XOR_EXPR:
3559 case TRUTH_OR_EXPR:
3560 case TRUTH_ORIF_EXPR:
3561 case TRUTH_XOR_EXPR:
3562 case NE_EXPR:
3563 return build_zero_cst (type);
3565 case MULT_EXPR:
3566 case TRUTH_AND_EXPR:
3567 case TRUTH_ANDIF_EXPR:
3568 case EQ_EXPR:
3569 return fold_convert_loc (loc, type, integer_one_node);
3571 case BIT_AND_EXPR:
3572 return fold_convert_loc (loc, type, integer_minus_one_node);
3574 case MAX_EXPR:
3575 if (SCALAR_FLOAT_TYPE_P (type))
3577 REAL_VALUE_TYPE max, min;
3578 if (HONOR_INFINITIES (type))
3580 real_inf (&max);
3581 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3583 else
3584 real_maxval (&min, 1, TYPE_MODE (type));
3585 return build_real (type, min);
3587 else if (POINTER_TYPE_P (type))
3589 wide_int min
3590 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3591 return wide_int_to_tree (type, min);
3593 else
3595 gcc_assert (INTEGRAL_TYPE_P (type));
3596 return TYPE_MIN_VALUE (type);
3599 case MIN_EXPR:
3600 if (SCALAR_FLOAT_TYPE_P (type))
3602 REAL_VALUE_TYPE max;
3603 if (HONOR_INFINITIES (type))
3604 real_inf (&max);
3605 else
3606 real_maxval (&max, 0, TYPE_MODE (type));
3607 return build_real (type, max);
3609 else if (POINTER_TYPE_P (type))
3611 wide_int max
3612 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3613 return wide_int_to_tree (type, max);
3615 else
3617 gcc_assert (INTEGRAL_TYPE_P (type));
3618 return TYPE_MAX_VALUE (type);
3621 default:
3622 gcc_unreachable ();
3626 /* Construct the initialization value for reduction CLAUSE. */
3628 tree
3629 omp_reduction_init (tree clause, tree type)
3631 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3632 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3635 /* Return alignment to be assumed for var in CLAUSE, which should be
3636 OMP_CLAUSE_ALIGNED. */
3638 static tree
3639 omp_clause_aligned_alignment (tree clause)
3641 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3642 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3644 /* Otherwise return implementation defined alignment. */
3645 unsigned int al = 1;
3646 opt_scalar_mode mode_iter;
3647 auto_vector_sizes sizes;
3648 targetm.vectorize.autovectorize_vector_sizes (&sizes, true);
3649 poly_uint64 vs = 0;
3650 for (unsigned int i = 0; i < sizes.length (); ++i)
3651 vs = ordered_max (vs, sizes[i]);
3652 static enum mode_class classes[]
3653 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3654 for (int i = 0; i < 4; i += 2)
3655 /* The for loop above dictates that we only walk through scalar classes. */
3656 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3658 scalar_mode mode = mode_iter.require ();
3659 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3660 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3661 continue;
3662 while (maybe_ne (vs, 0U)
3663 && known_lt (GET_MODE_SIZE (vmode), vs)
3664 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3665 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3667 tree type = lang_hooks.types.type_for_mode (mode, 1);
3668 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3669 continue;
3670 poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3671 GET_MODE_SIZE (mode));
3672 type = build_vector_type (type, nelts);
3673 if (TYPE_MODE (type) != vmode)
3674 continue;
3675 if (TYPE_ALIGN_UNIT (type) > al)
3676 al = TYPE_ALIGN_UNIT (type);
3678 return build_int_cst (integer_type_node, al);
3682 /* This structure is part of the interface between lower_rec_simd_input_clauses
3683 and lower_rec_input_clauses. */
3685 struct omplow_simd_context {
3686 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3687 tree idx;
3688 tree lane;
3689 tree lastlane;
3690 vec<tree, va_heap> simt_eargs;
3691 gimple_seq simt_dlist;
3692 poly_uint64_pod max_vf;
3693 bool is_simt;
3696 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3697 privatization. */
3699 static bool
3700 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3701 omplow_simd_context *sctx, tree &ivar,
3702 tree &lvar, tree *rvar = NULL,
3703 tree *rvar2 = NULL)
3705 if (known_eq (sctx->max_vf, 0U))
3707 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3708 if (maybe_gt (sctx->max_vf, 1U))
3710 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3711 OMP_CLAUSE_SAFELEN);
3712 if (c)
3714 poly_uint64 safe_len;
3715 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3716 || maybe_lt (safe_len, 1U))
3717 sctx->max_vf = 1;
3718 else
3719 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3722 if (maybe_gt (sctx->max_vf, 1U))
3724 sctx->idx = create_tmp_var (unsigned_type_node);
3725 sctx->lane = create_tmp_var (unsigned_type_node);
3728 if (known_eq (sctx->max_vf, 1U))
3729 return false;
3731 if (sctx->is_simt)
3733 if (is_gimple_reg (new_var))
3735 ivar = lvar = new_var;
3736 return true;
3738 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3739 ivar = lvar = create_tmp_var (type);
3740 TREE_ADDRESSABLE (ivar) = 1;
3741 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3742 NULL, DECL_ATTRIBUTES (ivar));
3743 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3744 tree clobber = build_constructor (type, NULL);
3745 TREE_THIS_VOLATILE (clobber) = 1;
3746 gimple *g = gimple_build_assign (ivar, clobber);
3747 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3749 else
3751 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3752 tree avar = create_tmp_var_raw (atype);
3753 if (TREE_ADDRESSABLE (new_var))
3754 TREE_ADDRESSABLE (avar) = 1;
3755 DECL_ATTRIBUTES (avar)
3756 = tree_cons (get_identifier ("omp simd array"), NULL,
3757 DECL_ATTRIBUTES (avar));
3758 gimple_add_tmp_var (avar);
3759 tree iavar = avar;
3760 if (rvar)
3762 /* For inscan reductions, create another array temporary,
3763 which will hold the reduced value. */
3764 iavar = create_tmp_var_raw (atype);
3765 if (TREE_ADDRESSABLE (new_var))
3766 TREE_ADDRESSABLE (iavar) = 1;
3767 DECL_ATTRIBUTES (iavar)
3768 = tree_cons (get_identifier ("omp simd array"), NULL,
3769 tree_cons (get_identifier ("omp simd inscan"), NULL,
3770 DECL_ATTRIBUTES (iavar)));
3771 gimple_add_tmp_var (iavar);
3772 ctx->cb.decl_map->put (avar, iavar);
3773 if (sctx->lastlane == NULL_TREE)
3774 sctx->lastlane = create_tmp_var (unsigned_type_node);
3775 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
3776 sctx->lastlane, NULL_TREE, NULL_TREE);
3777 TREE_THIS_NOTRAP (*rvar) = 1;
3779 if (ctx->scan_exclusive)
3781 /* And for exclusive scan yet another one, which will
3782 hold the value during the scan phase. */
3783 tree savar = create_tmp_var_raw (atype);
3784 if (TREE_ADDRESSABLE (new_var))
3785 TREE_ADDRESSABLE (savar) = 1;
3786 DECL_ATTRIBUTES (savar)
3787 = tree_cons (get_identifier ("omp simd array"), NULL,
3788 tree_cons (get_identifier ("omp simd inscan "
3789 "exclusive"), NULL,
3790 DECL_ATTRIBUTES (savar)));
3791 gimple_add_tmp_var (savar);
3792 ctx->cb.decl_map->put (iavar, savar);
3793 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
3794 sctx->idx, NULL_TREE, NULL_TREE);
3795 TREE_THIS_NOTRAP (*rvar2) = 1;
3798 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
3799 NULL_TREE, NULL_TREE);
3800 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3801 NULL_TREE, NULL_TREE);
3802 TREE_THIS_NOTRAP (ivar) = 1;
3803 TREE_THIS_NOTRAP (lvar) = 1;
3805 if (DECL_P (new_var))
3807 SET_DECL_VALUE_EXPR (new_var, lvar);
3808 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3810 return true;
3813 /* Helper function of lower_rec_input_clauses. For a reference
3814 in simd reduction, add an underlying variable it will reference. */
3816 static void
3817 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3819 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3820 if (TREE_CONSTANT (z))
3822 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3823 get_name (new_vard));
3824 gimple_add_tmp_var (z);
3825 TREE_ADDRESSABLE (z) = 1;
3826 z = build_fold_addr_expr_loc (loc, z);
3827 gimplify_assign (new_vard, z, ilist);
3831 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
3832 code to emit (type) (tskred_temp[idx]). */
3834 static tree
3835 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
3836 unsigned idx)
3838 unsigned HOST_WIDE_INT sz
3839 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
3840 tree r = build2 (MEM_REF, pointer_sized_int_node,
3841 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
3842 idx * sz));
3843 tree v = create_tmp_var (pointer_sized_int_node);
3844 gimple *g = gimple_build_assign (v, r);
3845 gimple_seq_add_stmt (ilist, g);
3846 if (!useless_type_conversion_p (type, pointer_sized_int_node))
3848 v = create_tmp_var (type);
3849 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
3850 gimple_seq_add_stmt (ilist, g);
3852 return v;
3855 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3856 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3857 private variables. Initialization statements go in ILIST, while calls
3858 to destructors go in DLIST. */
3860 static void
3861 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3862 omp_context *ctx, struct omp_for_data *fd)
3864 tree c, copyin_seq, x, ptr;
3865 bool copyin_by_ref = false;
3866 bool lastprivate_firstprivate = false;
3867 bool reduction_omp_orig_ref = false;
3868 int pass;
3869 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3870 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3871 omplow_simd_context sctx = omplow_simd_context ();
3872 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3873 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3874 gimple_seq llist[4] = { };
3875 tree nonconst_simd_if = NULL_TREE;
3877 copyin_seq = NULL;
3878 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3880 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3881 with data sharing clauses referencing variable sized vars. That
3882 is unnecessarily hard to support and very unlikely to result in
3883 vectorized code anyway. */
3884 if (is_simd)
3885 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3886 switch (OMP_CLAUSE_CODE (c))
3888 case OMP_CLAUSE_LINEAR:
3889 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3890 sctx.max_vf = 1;
3891 /* FALLTHRU */
3892 case OMP_CLAUSE_PRIVATE:
3893 case OMP_CLAUSE_FIRSTPRIVATE:
3894 case OMP_CLAUSE_LASTPRIVATE:
3895 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3896 sctx.max_vf = 1;
3897 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
3899 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
3900 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
3901 sctx.max_vf = 1;
3903 break;
3904 case OMP_CLAUSE_REDUCTION:
3905 case OMP_CLAUSE_IN_REDUCTION:
3906 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3907 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3908 sctx.max_vf = 1;
3909 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
3911 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
3912 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
3913 sctx.max_vf = 1;
3915 break;
3916 case OMP_CLAUSE_IF:
3917 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
3918 sctx.max_vf = 1;
3919 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
3920 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
3921 break;
3922 case OMP_CLAUSE_SIMDLEN:
3923 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
3924 sctx.max_vf = 1;
3925 break;
3926 case OMP_CLAUSE__CONDTEMP_:
3927 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
3928 if (sctx.is_simt)
3929 sctx.max_vf = 1;
3930 break;
3931 default:
3932 continue;
3935 /* Add a placeholder for simduid. */
3936 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
3937 sctx.simt_eargs.safe_push (NULL_TREE);
3939 unsigned task_reduction_cnt = 0;
3940 unsigned task_reduction_cntorig = 0;
3941 unsigned task_reduction_cnt_full = 0;
3942 unsigned task_reduction_cntorig_full = 0;
3943 unsigned task_reduction_other_cnt = 0;
3944 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
3945 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
3946 /* Do all the fixed sized types in the first pass, and the variable sized
3947 types in the second pass. This makes sure that the scalar arguments to
3948 the variable sized types are processed before we use them in the
3949 variable sized operations. For task reductions we use 4 passes, in the
3950 first two we ignore them, in the third one gather arguments for
3951 GOMP_task_reduction_remap call and in the last pass actually handle
3952 the task reductions. */
3953 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
3954 ? 4 : 2); ++pass)
3956 if (pass == 2 && task_reduction_cnt)
3958 tskred_atype
3959 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
3960 + task_reduction_cntorig);
3961 tskred_avar = create_tmp_var_raw (tskred_atype);
3962 gimple_add_tmp_var (tskred_avar);
3963 TREE_ADDRESSABLE (tskred_avar) = 1;
3964 task_reduction_cnt_full = task_reduction_cnt;
3965 task_reduction_cntorig_full = task_reduction_cntorig;
3967 else if (pass == 3 && task_reduction_cnt)
3969 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
3970 gimple *g
3971 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
3972 size_int (task_reduction_cntorig),
3973 build_fold_addr_expr (tskred_avar));
3974 gimple_seq_add_stmt (ilist, g);
3976 if (pass == 3 && task_reduction_other_cnt)
3978 /* For reduction clauses, build
3979 tskred_base = (void *) tskred_temp[2]
3980 + omp_get_thread_num () * tskred_temp[1]
3981 or if tskred_temp[1] is known to be constant, that constant
3982 directly. This is the start of the private reduction copy block
3983 for the current thread. */
3984 tree v = create_tmp_var (integer_type_node);
3985 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
3986 gimple *g = gimple_build_call (x, 0);
3987 gimple_call_set_lhs (g, v);
3988 gimple_seq_add_stmt (ilist, g);
3989 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
3990 tskred_temp = OMP_CLAUSE_DECL (c);
3991 if (is_taskreg_ctx (ctx))
3992 tskred_temp = lookup_decl (tskred_temp, ctx);
3993 tree v2 = create_tmp_var (sizetype);
3994 g = gimple_build_assign (v2, NOP_EXPR, v);
3995 gimple_seq_add_stmt (ilist, g);
3996 if (ctx->task_reductions[0])
3997 v = fold_convert (sizetype, ctx->task_reductions[0]);
3998 else
3999 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4000 tree v3 = create_tmp_var (sizetype);
4001 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4002 gimple_seq_add_stmt (ilist, g);
4003 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4004 tskred_base = create_tmp_var (ptr_type_node);
4005 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4006 gimple_seq_add_stmt (ilist, g);
4008 task_reduction_cnt = 0;
4009 task_reduction_cntorig = 0;
4010 task_reduction_other_cnt = 0;
4011 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4013 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4014 tree var, new_var;
4015 bool by_ref;
4016 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4017 bool task_reduction_p = false;
4018 bool task_reduction_needs_orig_p = false;
4019 tree cond = NULL_TREE;
4021 switch (c_kind)
4023 case OMP_CLAUSE_PRIVATE:
4024 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4025 continue;
4026 break;
4027 case OMP_CLAUSE_SHARED:
4028 /* Ignore shared directives in teams construct inside
4029 of target construct. */
4030 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4031 && !is_host_teams_ctx (ctx))
4032 continue;
4033 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4035 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4036 || is_global_var (OMP_CLAUSE_DECL (c)));
4037 continue;
4039 case OMP_CLAUSE_FIRSTPRIVATE:
4040 case OMP_CLAUSE_COPYIN:
4041 break;
4042 case OMP_CLAUSE_LINEAR:
4043 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4044 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4045 lastprivate_firstprivate = true;
4046 break;
4047 case OMP_CLAUSE_REDUCTION:
4048 case OMP_CLAUSE_IN_REDUCTION:
4049 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
4051 task_reduction_p = true;
4052 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4054 task_reduction_other_cnt++;
4055 if (pass == 2)
4056 continue;
4058 else
4059 task_reduction_cnt++;
4060 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4062 var = OMP_CLAUSE_DECL (c);
4063 /* If var is a global variable that isn't privatized
4064 in outer contexts, we don't need to look up the
4065 original address, it is always the address of the
4066 global variable itself. */
4067 if (!DECL_P (var)
4068 || omp_is_reference (var)
4069 || !is_global_var
4070 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4072 task_reduction_needs_orig_p = true;
4073 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4074 task_reduction_cntorig++;
4078 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4079 reduction_omp_orig_ref = true;
4080 break;
4081 case OMP_CLAUSE__REDUCTEMP_:
4082 if (!is_taskreg_ctx (ctx))
4083 continue;
4084 /* FALLTHRU */
4085 case OMP_CLAUSE__LOOPTEMP_:
4086 /* Handle _looptemp_/_reductemp_ clauses only on
4087 parallel/task. */
4088 if (fd)
4089 continue;
4090 break;
4091 case OMP_CLAUSE_LASTPRIVATE:
4092 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4094 lastprivate_firstprivate = true;
4095 if (pass != 0 || is_taskloop_ctx (ctx))
4096 continue;
4098 /* Even without corresponding firstprivate, if
4099 decl is Fortran allocatable, it needs outer var
4100 reference. */
4101 else if (pass == 0
4102 && lang_hooks.decls.omp_private_outer_ref
4103 (OMP_CLAUSE_DECL (c)))
4104 lastprivate_firstprivate = true;
4105 break;
4106 case OMP_CLAUSE_ALIGNED:
4107 if (pass != 1)
4108 continue;
4109 var = OMP_CLAUSE_DECL (c);
4110 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4111 && !is_global_var (var))
4113 new_var = maybe_lookup_decl (var, ctx);
4114 if (new_var == NULL_TREE)
4115 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4116 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4117 tree alarg = omp_clause_aligned_alignment (c);
4118 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4119 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4120 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4121 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4122 gimplify_and_add (x, ilist);
4124 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4125 && is_global_var (var))
4127 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4128 new_var = lookup_decl (var, ctx);
4129 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4130 t = build_fold_addr_expr_loc (clause_loc, t);
4131 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4132 tree alarg = omp_clause_aligned_alignment (c);
4133 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4134 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4135 t = fold_convert_loc (clause_loc, ptype, t);
4136 x = create_tmp_var (ptype);
4137 t = build2 (MODIFY_EXPR, ptype, x, t);
4138 gimplify_and_add (t, ilist);
4139 t = build_simple_mem_ref_loc (clause_loc, x);
4140 SET_DECL_VALUE_EXPR (new_var, t);
4141 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4143 continue;
4144 case OMP_CLAUSE__CONDTEMP_:
4145 if (is_parallel_ctx (ctx)
4146 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
4147 break;
4148 continue;
4149 default:
4150 continue;
4153 if (task_reduction_p != (pass >= 2))
4154 continue;
4156 new_var = var = OMP_CLAUSE_DECL (c);
4157 if ((c_kind == OMP_CLAUSE_REDUCTION
4158 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4159 && TREE_CODE (var) == MEM_REF)
4161 var = TREE_OPERAND (var, 0);
4162 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4163 var = TREE_OPERAND (var, 0);
4164 if (TREE_CODE (var) == INDIRECT_REF
4165 || TREE_CODE (var) == ADDR_EXPR)
4166 var = TREE_OPERAND (var, 0);
4167 if (is_variable_sized (var))
4169 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4170 var = DECL_VALUE_EXPR (var);
4171 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4172 var = TREE_OPERAND (var, 0);
4173 gcc_assert (DECL_P (var));
4175 new_var = var;
4177 if (c_kind != OMP_CLAUSE_COPYIN)
4178 new_var = lookup_decl (var, ctx);
4180 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4182 if (pass != 0)
4183 continue;
4185 /* C/C++ array section reductions. */
4186 else if ((c_kind == OMP_CLAUSE_REDUCTION
4187 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4188 && var != OMP_CLAUSE_DECL (c))
4190 if (pass == 0)
4191 continue;
4193 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4194 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4196 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4198 tree b = TREE_OPERAND (orig_var, 1);
4199 b = maybe_lookup_decl (b, ctx);
4200 if (b == NULL)
4202 b = TREE_OPERAND (orig_var, 1);
4203 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4205 if (integer_zerop (bias))
4206 bias = b;
4207 else
4209 bias = fold_convert_loc (clause_loc,
4210 TREE_TYPE (b), bias);
4211 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4212 TREE_TYPE (b), b, bias);
4214 orig_var = TREE_OPERAND (orig_var, 0);
4216 if (pass == 2)
4218 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4219 if (is_global_var (out)
4220 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4221 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4222 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4223 != POINTER_TYPE)))
4224 x = var;
4225 else
4227 bool by_ref = use_pointer_for_field (var, NULL);
4228 x = build_receiver_ref (var, by_ref, ctx);
4229 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4230 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4231 == POINTER_TYPE))
4232 x = build_fold_addr_expr (x);
4234 if (TREE_CODE (orig_var) == INDIRECT_REF)
4235 x = build_simple_mem_ref (x);
4236 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4238 if (var == TREE_OPERAND (orig_var, 0))
4239 x = build_fold_addr_expr (x);
4241 bias = fold_convert (sizetype, bias);
4242 x = fold_convert (ptr_type_node, x);
4243 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4244 TREE_TYPE (x), x, bias);
4245 unsigned cnt = task_reduction_cnt - 1;
4246 if (!task_reduction_needs_orig_p)
4247 cnt += (task_reduction_cntorig_full
4248 - task_reduction_cntorig);
4249 else
4250 cnt = task_reduction_cntorig - 1;
4251 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4252 size_int (cnt), NULL_TREE, NULL_TREE);
4253 gimplify_assign (r, x, ilist);
4254 continue;
4257 if (TREE_CODE (orig_var) == INDIRECT_REF
4258 || TREE_CODE (orig_var) == ADDR_EXPR)
4259 orig_var = TREE_OPERAND (orig_var, 0);
4260 tree d = OMP_CLAUSE_DECL (c);
4261 tree type = TREE_TYPE (d);
4262 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4263 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4264 const char *name = get_name (orig_var);
4265 if (pass == 3)
4267 tree xv = create_tmp_var (ptr_type_node);
4268 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4270 unsigned cnt = task_reduction_cnt - 1;
4271 if (!task_reduction_needs_orig_p)
4272 cnt += (task_reduction_cntorig_full
4273 - task_reduction_cntorig);
4274 else
4275 cnt = task_reduction_cntorig - 1;
4276 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4277 size_int (cnt), NULL_TREE, NULL_TREE);
4279 gimple *g = gimple_build_assign (xv, x);
4280 gimple_seq_add_stmt (ilist, g);
4282 else
4284 unsigned int idx = *ctx->task_reduction_map->get (c);
4285 tree off;
4286 if (ctx->task_reductions[1 + idx])
4287 off = fold_convert (sizetype,
4288 ctx->task_reductions[1 + idx]);
4289 else
4290 off = task_reduction_read (ilist, tskred_temp, sizetype,
4291 7 + 3 * idx + 1);
4292 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4293 tskred_base, off);
4294 gimple_seq_add_stmt (ilist, g);
4296 x = fold_convert (build_pointer_type (boolean_type_node),
4297 xv);
4298 if (TREE_CONSTANT (v))
4299 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4300 TYPE_SIZE_UNIT (type));
4301 else
4303 tree t = maybe_lookup_decl (v, ctx);
4304 if (t)
4305 v = t;
4306 else
4307 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4308 gimplify_expr (&v, ilist, NULL, is_gimple_val,
4309 fb_rvalue);
4310 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4311 TREE_TYPE (v), v,
4312 build_int_cst (TREE_TYPE (v), 1));
4313 t = fold_build2_loc (clause_loc, MULT_EXPR,
4314 TREE_TYPE (v), t,
4315 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4316 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4318 cond = create_tmp_var (TREE_TYPE (x));
4319 gimplify_assign (cond, x, ilist);
4320 x = xv;
4322 else if (TREE_CONSTANT (v))
4324 x = create_tmp_var_raw (type, name);
4325 gimple_add_tmp_var (x);
4326 TREE_ADDRESSABLE (x) = 1;
4327 x = build_fold_addr_expr_loc (clause_loc, x);
4329 else
4331 tree atmp
4332 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4333 tree t = maybe_lookup_decl (v, ctx);
4334 if (t)
4335 v = t;
4336 else
4337 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4338 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4339 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4340 TREE_TYPE (v), v,
4341 build_int_cst (TREE_TYPE (v), 1));
4342 t = fold_build2_loc (clause_loc, MULT_EXPR,
4343 TREE_TYPE (v), t,
4344 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4345 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4346 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4349 tree ptype = build_pointer_type (TREE_TYPE (type));
4350 x = fold_convert_loc (clause_loc, ptype, x);
4351 tree y = create_tmp_var (ptype, name);
4352 gimplify_assign (y, x, ilist);
4353 x = y;
4354 tree yb = y;
4356 if (!integer_zerop (bias))
4358 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4359 bias);
4360 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4362 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4363 pointer_sized_int_node, yb, bias);
4364 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
4365 yb = create_tmp_var (ptype, name);
4366 gimplify_assign (yb, x, ilist);
4367 x = yb;
4370 d = TREE_OPERAND (d, 0);
4371 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4372 d = TREE_OPERAND (d, 0);
4373 if (TREE_CODE (d) == ADDR_EXPR)
4375 if (orig_var != var)
4377 gcc_assert (is_variable_sized (orig_var));
4378 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4380 gimplify_assign (new_var, x, ilist);
4381 tree new_orig_var = lookup_decl (orig_var, ctx);
4382 tree t = build_fold_indirect_ref (new_var);
4383 DECL_IGNORED_P (new_var) = 0;
4384 TREE_THIS_NOTRAP (t) = 1;
4385 SET_DECL_VALUE_EXPR (new_orig_var, t);
4386 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4388 else
4390 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4391 build_int_cst (ptype, 0));
4392 SET_DECL_VALUE_EXPR (new_var, x);
4393 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4396 else
4398 gcc_assert (orig_var == var);
4399 if (TREE_CODE (d) == INDIRECT_REF)
4401 x = create_tmp_var (ptype, name);
4402 TREE_ADDRESSABLE (x) = 1;
4403 gimplify_assign (x, yb, ilist);
4404 x = build_fold_addr_expr_loc (clause_loc, x);
4406 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4407 gimplify_assign (new_var, x, ilist);
4409 /* GOMP_taskgroup_reduction_register memsets the whole
4410 array to zero. If the initializer is zero, we don't
4411 need to initialize it again, just mark it as ever
4412 used unconditionally, i.e. cond = true. */
4413 if (cond
4414 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4415 && initializer_zerop (omp_reduction_init (c,
4416 TREE_TYPE (type))))
4418 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4419 boolean_true_node);
4420 gimple_seq_add_stmt (ilist, g);
4421 continue;
4423 tree end = create_artificial_label (UNKNOWN_LOCATION);
4424 if (cond)
4426 gimple *g;
4427 if (!is_parallel_ctx (ctx))
4429 tree condv = create_tmp_var (boolean_type_node);
4430 g = gimple_build_assign (condv,
4431 build_simple_mem_ref (cond));
4432 gimple_seq_add_stmt (ilist, g);
4433 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4434 g = gimple_build_cond (NE_EXPR, condv,
4435 boolean_false_node, end, lab1);
4436 gimple_seq_add_stmt (ilist, g);
4437 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4439 g = gimple_build_assign (build_simple_mem_ref (cond),
4440 boolean_true_node);
4441 gimple_seq_add_stmt (ilist, g);
4444 tree y1 = create_tmp_var (ptype);
4445 gimplify_assign (y1, y, ilist);
4446 tree i2 = NULL_TREE, y2 = NULL_TREE;
4447 tree body2 = NULL_TREE, end2 = NULL_TREE;
4448 tree y3 = NULL_TREE, y4 = NULL_TREE;
4449 if (task_reduction_needs_orig_p)
4451 y3 = create_tmp_var (ptype);
4452 tree ref;
4453 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4454 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4455 size_int (task_reduction_cnt_full
4456 + task_reduction_cntorig - 1),
4457 NULL_TREE, NULL_TREE);
4458 else
4460 unsigned int idx = *ctx->task_reduction_map->get (c);
4461 ref = task_reduction_read (ilist, tskred_temp, ptype,
4462 7 + 3 * idx);
4464 gimplify_assign (y3, ref, ilist);
4466 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4468 if (pass != 3)
4470 y2 = create_tmp_var (ptype);
4471 gimplify_assign (y2, y, ilist);
4473 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4475 tree ref = build_outer_var_ref (var, ctx);
4476 /* For ref build_outer_var_ref already performs this. */
4477 if (TREE_CODE (d) == INDIRECT_REF)
4478 gcc_assert (omp_is_reference (var));
4479 else if (TREE_CODE (d) == ADDR_EXPR)
4480 ref = build_fold_addr_expr (ref);
4481 else if (omp_is_reference (var))
4482 ref = build_fold_addr_expr (ref);
4483 ref = fold_convert_loc (clause_loc, ptype, ref);
4484 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4485 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4487 y3 = create_tmp_var (ptype);
4488 gimplify_assign (y3, unshare_expr (ref), ilist);
4490 if (is_simd)
4492 y4 = create_tmp_var (ptype);
4493 gimplify_assign (y4, ref, dlist);
4497 tree i = create_tmp_var (TREE_TYPE (v));
4498 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
4499 tree body = create_artificial_label (UNKNOWN_LOCATION);
4500 gimple_seq_add_stmt (ilist, gimple_build_label (body));
4501 if (y2)
4503 i2 = create_tmp_var (TREE_TYPE (v));
4504 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
4505 body2 = create_artificial_label (UNKNOWN_LOCATION);
4506 end2 = create_artificial_label (UNKNOWN_LOCATION);
4507 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
4509 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4511 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4512 tree decl_placeholder
4513 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
4514 SET_DECL_VALUE_EXPR (decl_placeholder,
4515 build_simple_mem_ref (y1));
4516 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4517 SET_DECL_VALUE_EXPR (placeholder,
4518 y3 ? build_simple_mem_ref (y3)
4519 : error_mark_node);
4520 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4521 x = lang_hooks.decls.omp_clause_default_ctor
4522 (c, build_simple_mem_ref (y1),
4523 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4524 if (x)
4525 gimplify_and_add (x, ilist);
4526 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4528 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4529 lower_omp (&tseq, ctx);
4530 gimple_seq_add_seq (ilist, tseq);
4532 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4533 if (is_simd)
4535 SET_DECL_VALUE_EXPR (decl_placeholder,
4536 build_simple_mem_ref (y2));
4537 SET_DECL_VALUE_EXPR (placeholder,
4538 build_simple_mem_ref (y4));
4539 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4540 lower_omp (&tseq, ctx);
4541 gimple_seq_add_seq (dlist, tseq);
4542 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4544 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4545 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
4546 if (y2)
4548 x = lang_hooks.decls.omp_clause_dtor
4549 (c, build_simple_mem_ref (y2));
4550 if (x)
4551 gimplify_and_add (x, dlist);
4554 else
4556 x = omp_reduction_init (c, TREE_TYPE (type));
4557 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4559 /* reduction(-:var) sums up the partial results, so it
4560 acts identically to reduction(+:var). */
4561 if (code == MINUS_EXPR)
4562 code = PLUS_EXPR;
4564 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4565 if (is_simd)
4567 x = build2 (code, TREE_TYPE (type),
4568 build_simple_mem_ref (y4),
4569 build_simple_mem_ref (y2));
4570 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4573 gimple *g
4574 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4575 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4576 gimple_seq_add_stmt (ilist, g);
4577 if (y3)
4579 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4580 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4581 gimple_seq_add_stmt (ilist, g);
4583 g = gimple_build_assign (i, PLUS_EXPR, i,
4584 build_int_cst (TREE_TYPE (i), 1));
4585 gimple_seq_add_stmt (ilist, g);
4586 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4587 gimple_seq_add_stmt (ilist, g);
4588 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4589 if (y2)
4591 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4592 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4593 gimple_seq_add_stmt (dlist, g);
4594 if (y4)
4596 g = gimple_build_assign
4597 (y4, POINTER_PLUS_EXPR, y4,
4598 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4599 gimple_seq_add_stmt (dlist, g);
4601 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4602 build_int_cst (TREE_TYPE (i2), 1));
4603 gimple_seq_add_stmt (dlist, g);
4604 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4605 gimple_seq_add_stmt (dlist, g);
4606 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4608 continue;
4610 else if (pass == 2)
4612 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4613 x = var;
4614 else
4616 bool by_ref = use_pointer_for_field (var, ctx);
4617 x = build_receiver_ref (var, by_ref, ctx);
4619 if (!omp_is_reference (var))
4620 x = build_fold_addr_expr (x);
4621 x = fold_convert (ptr_type_node, x);
4622 unsigned cnt = task_reduction_cnt - 1;
4623 if (!task_reduction_needs_orig_p)
4624 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
4625 else
4626 cnt = task_reduction_cntorig - 1;
4627 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4628 size_int (cnt), NULL_TREE, NULL_TREE);
4629 gimplify_assign (r, x, ilist);
4630 continue;
4632 else if (pass == 3)
4634 tree type = TREE_TYPE (new_var);
4635 if (!omp_is_reference (var))
4636 type = build_pointer_type (type);
4637 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4639 unsigned cnt = task_reduction_cnt - 1;
4640 if (!task_reduction_needs_orig_p)
4641 cnt += (task_reduction_cntorig_full
4642 - task_reduction_cntorig);
4643 else
4644 cnt = task_reduction_cntorig - 1;
4645 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4646 size_int (cnt), NULL_TREE, NULL_TREE);
4648 else
4650 unsigned int idx = *ctx->task_reduction_map->get (c);
4651 tree off;
4652 if (ctx->task_reductions[1 + idx])
4653 off = fold_convert (sizetype,
4654 ctx->task_reductions[1 + idx]);
4655 else
4656 off = task_reduction_read (ilist, tskred_temp, sizetype,
4657 7 + 3 * idx + 1);
4658 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
4659 tskred_base, off);
4661 x = fold_convert (type, x);
4662 tree t;
4663 if (omp_is_reference (var))
4665 gimplify_assign (new_var, x, ilist);
4666 t = new_var;
4667 new_var = build_simple_mem_ref (new_var);
4669 else
4671 t = create_tmp_var (type);
4672 gimplify_assign (t, x, ilist);
4673 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
4674 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4676 t = fold_convert (build_pointer_type (boolean_type_node), t);
4677 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
4678 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4679 cond = create_tmp_var (TREE_TYPE (t));
4680 gimplify_assign (cond, t, ilist);
4682 else if (is_variable_sized (var))
4684 /* For variable sized types, we need to allocate the
4685 actual storage here. Call alloca and store the
4686 result in the pointer decl that we created elsewhere. */
4687 if (pass == 0)
4688 continue;
4690 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4692 gcall *stmt;
4693 tree tmp, atmp;
4695 ptr = DECL_VALUE_EXPR (new_var);
4696 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4697 ptr = TREE_OPERAND (ptr, 0);
4698 gcc_assert (DECL_P (ptr));
4699 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4701 /* void *tmp = __builtin_alloca */
4702 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4703 stmt = gimple_build_call (atmp, 2, x,
4704 size_int (DECL_ALIGN (var)));
4705 tmp = create_tmp_var_raw (ptr_type_node);
4706 gimple_add_tmp_var (tmp);
4707 gimple_call_set_lhs (stmt, tmp);
4709 gimple_seq_add_stmt (ilist, stmt);
4711 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4712 gimplify_assign (ptr, x, ilist);
4715 else if (omp_is_reference (var)
4716 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
4717 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
4719 /* For references that are being privatized for Fortran,
4720 allocate new backing storage for the new pointer
4721 variable. This allows us to avoid changing all the
4722 code that expects a pointer to something that expects
4723 a direct variable. */
4724 if (pass == 0)
4725 continue;
4727 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4728 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4730 x = build_receiver_ref (var, false, ctx);
4731 x = build_fold_addr_expr_loc (clause_loc, x);
4733 else if (TREE_CONSTANT (x))
4735 /* For reduction in SIMD loop, defer adding the
4736 initialization of the reference, because if we decide
4737 to use SIMD array for it, the initilization could cause
4738 expansion ICE. Ditto for other privatization clauses. */
4739 if (is_simd)
4740 x = NULL_TREE;
4741 else
4743 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4744 get_name (var));
4745 gimple_add_tmp_var (x);
4746 TREE_ADDRESSABLE (x) = 1;
4747 x = build_fold_addr_expr_loc (clause_loc, x);
4750 else
4752 tree atmp
4753 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4754 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4755 tree al = size_int (TYPE_ALIGN (rtype));
4756 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4759 if (x)
4761 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4762 gimplify_assign (new_var, x, ilist);
4765 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4767 else if ((c_kind == OMP_CLAUSE_REDUCTION
4768 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4769 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4771 if (pass == 0)
4772 continue;
4774 else if (pass != 0)
4775 continue;
4777 switch (OMP_CLAUSE_CODE (c))
4779 case OMP_CLAUSE_SHARED:
4780 /* Ignore shared directives in teams construct inside
4781 target construct. */
4782 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4783 && !is_host_teams_ctx (ctx))
4784 continue;
4785 /* Shared global vars are just accessed directly. */
4786 if (is_global_var (new_var))
4787 break;
4788 /* For taskloop firstprivate/lastprivate, represented
4789 as firstprivate and shared clause on the task, new_var
4790 is the firstprivate var. */
4791 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4792 break;
4793 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4794 needs to be delayed until after fixup_child_record_type so
4795 that we get the correct type during the dereference. */
4796 by_ref = use_pointer_for_field (var, ctx);
4797 x = build_receiver_ref (var, by_ref, ctx);
4798 SET_DECL_VALUE_EXPR (new_var, x);
4799 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4801 /* ??? If VAR is not passed by reference, and the variable
4802 hasn't been initialized yet, then we'll get a warning for
4803 the store into the omp_data_s structure. Ideally, we'd be
4804 able to notice this and not store anything at all, but
4805 we're generating code too early. Suppress the warning. */
4806 if (!by_ref)
4807 TREE_NO_WARNING (var) = 1;
4808 break;
4810 case OMP_CLAUSE__CONDTEMP_:
4811 if (is_parallel_ctx (ctx))
4813 x = build_receiver_ref (var, false, ctx);
4814 SET_DECL_VALUE_EXPR (new_var, x);
4815 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4817 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
4819 x = build_zero_cst (TREE_TYPE (var));
4820 goto do_private;
4822 break;
4824 case OMP_CLAUSE_LASTPRIVATE:
4825 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4826 break;
4827 /* FALLTHRU */
4829 case OMP_CLAUSE_PRIVATE:
4830 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4831 x = build_outer_var_ref (var, ctx);
4832 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4834 if (is_task_ctx (ctx))
4835 x = build_receiver_ref (var, false, ctx);
4836 else
4837 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4839 else
4840 x = NULL;
4841 do_private:
4842 tree nx;
4843 nx = lang_hooks.decls.omp_clause_default_ctor
4844 (c, unshare_expr (new_var), x);
4845 if (is_simd)
4847 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4848 if ((TREE_ADDRESSABLE (new_var) || nx || y
4849 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4850 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
4851 || omp_is_reference (var))
4852 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4853 ivar, lvar))
4855 if (omp_is_reference (var))
4857 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4858 tree new_vard = TREE_OPERAND (new_var, 0);
4859 gcc_assert (DECL_P (new_vard));
4860 SET_DECL_VALUE_EXPR (new_vard,
4861 build_fold_addr_expr (lvar));
4862 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4865 if (nx)
4866 x = lang_hooks.decls.omp_clause_default_ctor
4867 (c, unshare_expr (ivar), x);
4868 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
4870 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
4871 unshare_expr (ivar), x);
4872 nx = x;
4874 if (nx && x)
4875 gimplify_and_add (x, &llist[0]);
4876 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4877 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
4879 tree v = new_var;
4880 if (!DECL_P (v))
4882 gcc_assert (TREE_CODE (v) == MEM_REF);
4883 v = TREE_OPERAND (v, 0);
4884 gcc_assert (DECL_P (v));
4886 v = *ctx->lastprivate_conditional_map->get (v);
4887 tree t = create_tmp_var (TREE_TYPE (v));
4888 tree z = build_zero_cst (TREE_TYPE (v));
4889 tree orig_v
4890 = build_outer_var_ref (var, ctx,
4891 OMP_CLAUSE_LASTPRIVATE);
4892 gimple_seq_add_stmt (dlist,
4893 gimple_build_assign (t, z));
4894 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
4895 tree civar = DECL_VALUE_EXPR (v);
4896 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
4897 civar = unshare_expr (civar);
4898 TREE_OPERAND (civar, 1) = sctx.idx;
4899 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
4900 unshare_expr (civar));
4901 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
4902 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
4903 orig_v, unshare_expr (ivar)));
4904 tree cond = build2 (LT_EXPR, boolean_type_node, t,
4905 civar);
4906 x = build3 (COND_EXPR, void_type_node, cond, x,
4907 void_node);
4908 gimple_seq tseq = NULL;
4909 gimplify_and_add (x, &tseq);
4910 if (ctx->outer)
4911 lower_omp (&tseq, ctx->outer);
4912 gimple_seq_add_seq (&llist[1], tseq);
4914 if (y)
4916 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4917 if (y)
4918 gimplify_and_add (y, &llist[1]);
4920 break;
4922 if (omp_is_reference (var))
4924 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4925 tree new_vard = TREE_OPERAND (new_var, 0);
4926 gcc_assert (DECL_P (new_vard));
4927 tree type = TREE_TYPE (TREE_TYPE (new_vard));
4928 x = TYPE_SIZE_UNIT (type);
4929 if (TREE_CONSTANT (x))
4931 x = create_tmp_var_raw (type, get_name (var));
4932 gimple_add_tmp_var (x);
4933 TREE_ADDRESSABLE (x) = 1;
4934 x = build_fold_addr_expr_loc (clause_loc, x);
4935 x = fold_convert_loc (clause_loc,
4936 TREE_TYPE (new_vard), x);
4937 gimplify_assign (new_vard, x, ilist);
4941 if (nx)
4942 gimplify_and_add (nx, ilist);
4943 /* FALLTHRU */
4945 do_dtor:
4946 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4947 if (x)
4948 gimplify_and_add (x, dlist);
4949 break;
4951 case OMP_CLAUSE_LINEAR:
4952 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4953 goto do_firstprivate;
4954 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4955 x = NULL;
4956 else
4957 x = build_outer_var_ref (var, ctx);
4958 goto do_private;
4960 case OMP_CLAUSE_FIRSTPRIVATE:
4961 if (is_task_ctx (ctx))
4963 if ((omp_is_reference (var)
4964 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
4965 || is_variable_sized (var))
4966 goto do_dtor;
4967 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4968 ctx))
4969 || use_pointer_for_field (var, NULL))
4971 x = build_receiver_ref (var, false, ctx);
4972 SET_DECL_VALUE_EXPR (new_var, x);
4973 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4974 goto do_dtor;
4977 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
4978 && omp_is_reference (var))
4980 x = build_outer_var_ref (var, ctx);
4981 gcc_assert (TREE_CODE (x) == MEM_REF
4982 && integer_zerop (TREE_OPERAND (x, 1)));
4983 x = TREE_OPERAND (x, 0);
4984 x = lang_hooks.decls.omp_clause_copy_ctor
4985 (c, unshare_expr (new_var), x);
4986 gimplify_and_add (x, ilist);
4987 goto do_dtor;
4989 do_firstprivate:
4990 x = build_outer_var_ref (var, ctx);
4991 if (is_simd)
4993 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4994 && gimple_omp_for_combined_into_p (ctx->stmt))
4996 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4997 tree stept = TREE_TYPE (t);
4998 tree ct = omp_find_clause (clauses,
4999 OMP_CLAUSE__LOOPTEMP_);
5000 gcc_assert (ct);
5001 tree l = OMP_CLAUSE_DECL (ct);
5002 tree n1 = fd->loop.n1;
5003 tree step = fd->loop.step;
5004 tree itype = TREE_TYPE (l);
5005 if (POINTER_TYPE_P (itype))
5006 itype = signed_type_for (itype);
5007 l = fold_build2 (MINUS_EXPR, itype, l, n1);
5008 if (TYPE_UNSIGNED (itype)
5009 && fd->loop.cond_code == GT_EXPR)
5010 l = fold_build2 (TRUNC_DIV_EXPR, itype,
5011 fold_build1 (NEGATE_EXPR, itype, l),
5012 fold_build1 (NEGATE_EXPR,
5013 itype, step));
5014 else
5015 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
5016 t = fold_build2 (MULT_EXPR, stept,
5017 fold_convert (stept, l), t);
5019 if (OMP_CLAUSE_LINEAR_ARRAY (c))
5021 if (omp_is_reference (var))
5023 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5024 tree new_vard = TREE_OPERAND (new_var, 0);
5025 gcc_assert (DECL_P (new_vard));
5026 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5027 nx = TYPE_SIZE_UNIT (type);
5028 if (TREE_CONSTANT (nx))
5030 nx = create_tmp_var_raw (type,
5031 get_name (var));
5032 gimple_add_tmp_var (nx);
5033 TREE_ADDRESSABLE (nx) = 1;
5034 nx = build_fold_addr_expr_loc (clause_loc,
5035 nx);
5036 nx = fold_convert_loc (clause_loc,
5037 TREE_TYPE (new_vard),
5038 nx);
5039 gimplify_assign (new_vard, nx, ilist);
5043 x = lang_hooks.decls.omp_clause_linear_ctor
5044 (c, new_var, x, t);
5045 gimplify_and_add (x, ilist);
5046 goto do_dtor;
5049 if (POINTER_TYPE_P (TREE_TYPE (x)))
5050 x = fold_build2 (POINTER_PLUS_EXPR,
5051 TREE_TYPE (x), x, t);
5052 else
5053 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
5056 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
5057 || TREE_ADDRESSABLE (new_var)
5058 || omp_is_reference (var))
5059 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5060 ivar, lvar))
5062 if (omp_is_reference (var))
5064 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5065 tree new_vard = TREE_OPERAND (new_var, 0);
5066 gcc_assert (DECL_P (new_vard));
5067 SET_DECL_VALUE_EXPR (new_vard,
5068 build_fold_addr_expr (lvar));
5069 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5071 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
5073 tree iv = create_tmp_var (TREE_TYPE (new_var));
5074 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
5075 gimplify_and_add (x, ilist);
5076 gimple_stmt_iterator gsi
5077 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5078 gassign *g
5079 = gimple_build_assign (unshare_expr (lvar), iv);
5080 gsi_insert_before_without_update (&gsi, g,
5081 GSI_SAME_STMT);
5082 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5083 enum tree_code code = PLUS_EXPR;
5084 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
5085 code = POINTER_PLUS_EXPR;
5086 g = gimple_build_assign (iv, code, iv, t);
5087 gsi_insert_before_without_update (&gsi, g,
5088 GSI_SAME_STMT);
5089 break;
5091 x = lang_hooks.decls.omp_clause_copy_ctor
5092 (c, unshare_expr (ivar), x);
5093 gimplify_and_add (x, &llist[0]);
5094 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5095 if (x)
5096 gimplify_and_add (x, &llist[1]);
5097 break;
5099 if (omp_is_reference (var))
5101 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5102 tree new_vard = TREE_OPERAND (new_var, 0);
5103 gcc_assert (DECL_P (new_vard));
5104 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5105 nx = TYPE_SIZE_UNIT (type);
5106 if (TREE_CONSTANT (nx))
5108 nx = create_tmp_var_raw (type, get_name (var));
5109 gimple_add_tmp_var (nx);
5110 TREE_ADDRESSABLE (nx) = 1;
5111 nx = build_fold_addr_expr_loc (clause_loc, nx);
5112 nx = fold_convert_loc (clause_loc,
5113 TREE_TYPE (new_vard), nx);
5114 gimplify_assign (new_vard, nx, ilist);
5118 x = lang_hooks.decls.omp_clause_copy_ctor
5119 (c, unshare_expr (new_var), x);
5120 gimplify_and_add (x, ilist);
5121 goto do_dtor;
5123 case OMP_CLAUSE__LOOPTEMP_:
5124 case OMP_CLAUSE__REDUCTEMP_:
5125 gcc_assert (is_taskreg_ctx (ctx));
5126 x = build_outer_var_ref (var, ctx);
5127 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5128 gimplify_and_add (x, ilist);
5129 break;
5131 case OMP_CLAUSE_COPYIN:
5132 by_ref = use_pointer_for_field (var, NULL);
5133 x = build_receiver_ref (var, by_ref, ctx);
5134 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
5135 append_to_statement_list (x, &copyin_seq);
5136 copyin_by_ref |= by_ref;
5137 break;
5139 case OMP_CLAUSE_REDUCTION:
5140 case OMP_CLAUSE_IN_REDUCTION:
5141 /* OpenACC reductions are initialized using the
5142 GOACC_REDUCTION internal function. */
5143 if (is_gimple_omp_oacc (ctx->stmt))
5144 break;
5145 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5147 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5148 gimple *tseq;
5149 tree ptype = TREE_TYPE (placeholder);
5150 if (cond)
5152 x = error_mark_node;
5153 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
5154 && !task_reduction_needs_orig_p)
5155 x = var;
5156 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5158 tree pptype = build_pointer_type (ptype);
5159 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5160 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5161 size_int (task_reduction_cnt_full
5162 + task_reduction_cntorig - 1),
5163 NULL_TREE, NULL_TREE);
5164 else
5166 unsigned int idx
5167 = *ctx->task_reduction_map->get (c);
5168 x = task_reduction_read (ilist, tskred_temp,
5169 pptype, 7 + 3 * idx);
5171 x = fold_convert (pptype, x);
5172 x = build_simple_mem_ref (x);
5175 else
5177 x = build_outer_var_ref (var, ctx);
5179 if (omp_is_reference (var)
5180 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
5181 x = build_fold_addr_expr_loc (clause_loc, x);
5183 SET_DECL_VALUE_EXPR (placeholder, x);
5184 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5185 tree new_vard = new_var;
5186 if (omp_is_reference (var))
5188 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5189 new_vard = TREE_OPERAND (new_var, 0);
5190 gcc_assert (DECL_P (new_vard));
5192 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5193 if (is_simd
5194 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5195 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5196 rvarp = &rvar;
5197 if (is_simd
5198 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5199 ivar, lvar, rvarp,
5200 &rvar2))
5202 if (new_vard == new_var)
5204 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
5205 SET_DECL_VALUE_EXPR (new_var, ivar);
5207 else
5209 SET_DECL_VALUE_EXPR (new_vard,
5210 build_fold_addr_expr (ivar));
5211 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5213 x = lang_hooks.decls.omp_clause_default_ctor
5214 (c, unshare_expr (ivar),
5215 build_outer_var_ref (var, ctx));
5216 if (rvarp)
5218 if (x)
5220 gimplify_and_add (x, &llist[0]);
5222 tree ivar2 = unshare_expr (lvar);
5223 TREE_OPERAND (ivar2, 1) = sctx.idx;
5224 x = lang_hooks.decls.omp_clause_default_ctor
5225 (c, ivar2, build_outer_var_ref (var, ctx));
5226 gimplify_and_add (x, &llist[0]);
5228 if (rvar2)
5230 x = lang_hooks.decls.omp_clause_default_ctor
5231 (c, unshare_expr (rvar2),
5232 build_outer_var_ref (var, ctx));
5233 gimplify_and_add (x, &llist[0]);
5236 /* For types that need construction, add another
5237 private var which will be default constructed
5238 and optionally initialized with
5239 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5240 loop we want to assign this value instead of
5241 constructing and destructing it in each
5242 iteration. */
5243 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
5244 gimple_add_tmp_var (nv);
5245 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
5246 ? rvar2
5247 : ivar, 0),
5248 nv);
5249 x = lang_hooks.decls.omp_clause_default_ctor
5250 (c, nv, build_outer_var_ref (var, ctx));
5251 gimplify_and_add (x, ilist);
5253 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5255 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5256 x = DECL_VALUE_EXPR (new_vard);
5257 tree vexpr = nv;
5258 if (new_vard != new_var)
5259 vexpr = build_fold_addr_expr (nv);
5260 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5261 lower_omp (&tseq, ctx);
5262 SET_DECL_VALUE_EXPR (new_vard, x);
5263 gimple_seq_add_seq (ilist, tseq);
5264 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5267 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5268 if (x)
5269 gimplify_and_add (x, dlist);
5272 tree ref = build_outer_var_ref (var, ctx);
5273 x = unshare_expr (ivar);
5274 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5275 ref);
5276 gimplify_and_add (x, &llist[0]);
5278 ref = build_outer_var_ref (var, ctx);
5279 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
5280 rvar);
5281 gimplify_and_add (x, &llist[3]);
5283 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5284 if (new_vard == new_var)
5285 SET_DECL_VALUE_EXPR (new_var, lvar);
5286 else
5287 SET_DECL_VALUE_EXPR (new_vard,
5288 build_fold_addr_expr (lvar));
5290 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5291 if (x)
5292 gimplify_and_add (x, &llist[1]);
5294 tree ivar2 = unshare_expr (lvar);
5295 TREE_OPERAND (ivar2, 1) = sctx.idx;
5296 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
5297 if (x)
5298 gimplify_and_add (x, &llist[1]);
5300 if (rvar2)
5302 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
5303 if (x)
5304 gimplify_and_add (x, &llist[1]);
5306 break;
5308 if (x)
5309 gimplify_and_add (x, &llist[0]);
5310 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5312 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5313 lower_omp (&tseq, ctx);
5314 gimple_seq_add_seq (&llist[0], tseq);
5316 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5317 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5318 lower_omp (&tseq, ctx);
5319 gimple_seq_add_seq (&llist[1], tseq);
5320 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5321 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5322 if (new_vard == new_var)
5323 SET_DECL_VALUE_EXPR (new_var, lvar);
5324 else
5325 SET_DECL_VALUE_EXPR (new_vard,
5326 build_fold_addr_expr (lvar));
5327 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5328 if (x)
5329 gimplify_and_add (x, &llist[1]);
5330 break;
5332 /* If this is a reference to constant size reduction var
5333 with placeholder, we haven't emitted the initializer
5334 for it because it is undesirable if SIMD arrays are used.
5335 But if they aren't used, we need to emit the deferred
5336 initialization now. */
5337 else if (omp_is_reference (var) && is_simd)
5338 handle_simd_reference (clause_loc, new_vard, ilist);
5340 tree lab2 = NULL_TREE;
5341 if (cond)
5343 gimple *g;
5344 if (!is_parallel_ctx (ctx))
5346 tree condv = create_tmp_var (boolean_type_node);
5347 tree m = build_simple_mem_ref (cond);
5348 g = gimple_build_assign (condv, m);
5349 gimple_seq_add_stmt (ilist, g);
5350 tree lab1
5351 = create_artificial_label (UNKNOWN_LOCATION);
5352 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5353 g = gimple_build_cond (NE_EXPR, condv,
5354 boolean_false_node,
5355 lab2, lab1);
5356 gimple_seq_add_stmt (ilist, g);
5357 gimple_seq_add_stmt (ilist,
5358 gimple_build_label (lab1));
5360 g = gimple_build_assign (build_simple_mem_ref (cond),
5361 boolean_true_node);
5362 gimple_seq_add_stmt (ilist, g);
5364 x = lang_hooks.decls.omp_clause_default_ctor
5365 (c, unshare_expr (new_var),
5366 cond ? NULL_TREE
5367 : build_outer_var_ref (var, ctx));
5368 if (x)
5369 gimplify_and_add (x, ilist);
5371 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5372 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5374 if (x || (!is_simd
5375 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
5377 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
5378 gimple_add_tmp_var (nv);
5379 ctx->cb.decl_map->put (new_vard, nv);
5380 x = lang_hooks.decls.omp_clause_default_ctor
5381 (c, nv, build_outer_var_ref (var, ctx));
5382 if (x)
5383 gimplify_and_add (x, ilist);
5384 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5386 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5387 tree vexpr = nv;
5388 if (new_vard != new_var)
5389 vexpr = build_fold_addr_expr (nv);
5390 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5391 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5392 lower_omp (&tseq, ctx);
5393 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
5394 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
5395 gimple_seq_add_seq (ilist, tseq);
5397 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5398 if (is_simd && ctx->scan_exclusive)
5400 tree nv2
5401 = create_tmp_var_raw (TREE_TYPE (new_var));
5402 gimple_add_tmp_var (nv2);
5403 ctx->cb.decl_map->put (nv, nv2);
5404 x = lang_hooks.decls.omp_clause_default_ctor
5405 (c, nv2, build_outer_var_ref (var, ctx));
5406 gimplify_and_add (x, ilist);
5407 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5408 if (x)
5409 gimplify_and_add (x, dlist);
5411 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5412 if (x)
5413 gimplify_and_add (x, dlist);
5415 else if (is_simd
5416 && ctx->scan_exclusive
5417 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
5419 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
5420 gimple_add_tmp_var (nv2);
5421 ctx->cb.decl_map->put (new_vard, nv2);
5422 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5423 if (x)
5424 gimplify_and_add (x, dlist);
5426 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5427 goto do_dtor;
5430 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5432 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5433 lower_omp (&tseq, ctx);
5434 gimple_seq_add_seq (ilist, tseq);
5436 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5437 if (is_simd)
5439 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5440 lower_omp (&tseq, ctx);
5441 gimple_seq_add_seq (dlist, tseq);
5442 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5444 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5445 if (cond)
5447 if (lab2)
5448 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5449 break;
5451 goto do_dtor;
5453 else
5455 x = omp_reduction_init (c, TREE_TYPE (new_var));
5456 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
5457 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5459 if (cond)
5461 gimple *g;
5462 tree lab2 = NULL_TREE;
5463 /* GOMP_taskgroup_reduction_register memsets the whole
5464 array to zero. If the initializer is zero, we don't
5465 need to initialize it again, just mark it as ever
5466 used unconditionally, i.e. cond = true. */
5467 if (initializer_zerop (x))
5469 g = gimple_build_assign (build_simple_mem_ref (cond),
5470 boolean_true_node);
5471 gimple_seq_add_stmt (ilist, g);
5472 break;
5475 /* Otherwise, emit
5476 if (!cond) { cond = true; new_var = x; } */
5477 if (!is_parallel_ctx (ctx))
5479 tree condv = create_tmp_var (boolean_type_node);
5480 tree m = build_simple_mem_ref (cond);
5481 g = gimple_build_assign (condv, m);
5482 gimple_seq_add_stmt (ilist, g);
5483 tree lab1
5484 = create_artificial_label (UNKNOWN_LOCATION);
5485 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5486 g = gimple_build_cond (NE_EXPR, condv,
5487 boolean_false_node,
5488 lab2, lab1);
5489 gimple_seq_add_stmt (ilist, g);
5490 gimple_seq_add_stmt (ilist,
5491 gimple_build_label (lab1));
5493 g = gimple_build_assign (build_simple_mem_ref (cond),
5494 boolean_true_node);
5495 gimple_seq_add_stmt (ilist, g);
5496 gimplify_assign (new_var, x, ilist);
5497 if (lab2)
5498 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5499 break;
5502 /* reduction(-:var) sums up the partial results, so it
5503 acts identically to reduction(+:var). */
5504 if (code == MINUS_EXPR)
5505 code = PLUS_EXPR;
5507 tree new_vard = new_var;
5508 if (is_simd && omp_is_reference (var))
5510 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5511 new_vard = TREE_OPERAND (new_var, 0);
5512 gcc_assert (DECL_P (new_vard));
5514 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5515 if (is_simd
5516 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5517 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5518 rvarp = &rvar;
5519 if (is_simd
5520 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5521 ivar, lvar, rvarp,
5522 &rvar2))
5524 if (new_vard != new_var)
5526 SET_DECL_VALUE_EXPR (new_vard,
5527 build_fold_addr_expr (lvar));
5528 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5531 tree ref = build_outer_var_ref (var, ctx);
5533 if (rvarp)
5535 gimplify_assign (ivar, ref, &llist[0]);
5536 ref = build_outer_var_ref (var, ctx);
5537 gimplify_assign (ref, rvar, &llist[3]);
5538 break;
5541 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
5543 if (sctx.is_simt)
5545 if (!simt_lane)
5546 simt_lane = create_tmp_var (unsigned_type_node);
5547 x = build_call_expr_internal_loc
5548 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
5549 TREE_TYPE (ivar), 2, ivar, simt_lane);
5550 x = build2 (code, TREE_TYPE (ivar), ivar, x);
5551 gimplify_assign (ivar, x, &llist[2]);
5553 x = build2 (code, TREE_TYPE (ref), ref, ivar);
5554 ref = build_outer_var_ref (var, ctx);
5555 gimplify_assign (ref, x, &llist[1]);
5558 else
5560 if (omp_is_reference (var) && is_simd)
5561 handle_simd_reference (clause_loc, new_vard, ilist);
5562 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5563 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5564 break;
5565 gimplify_assign (new_var, x, ilist);
5566 if (is_simd)
5568 tree ref = build_outer_var_ref (var, ctx);
5570 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5571 ref = build_outer_var_ref (var, ctx);
5572 gimplify_assign (ref, x, dlist);
5576 break;
5578 default:
5579 gcc_unreachable ();
5583 if (tskred_avar)
5585 tree clobber = build_constructor (TREE_TYPE (tskred_avar), NULL);
5586 TREE_THIS_VOLATILE (clobber) = 1;
5587 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
5590 if (known_eq (sctx.max_vf, 1U))
5592 sctx.is_simt = false;
5593 if (ctx->lastprivate_conditional_map)
5595 if (gimple_omp_for_combined_into_p (ctx->stmt))
5597 /* Signal to lower_omp_1 that it should use parent context. */
5598 ctx->combined_into_simd_safelen0 = true;
5599 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5600 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5601 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5603 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
5604 tree *v
5605 = ctx->lastprivate_conditional_map->get (o);
5606 tree po = lookup_decl (OMP_CLAUSE_DECL (c), ctx->outer);
5607 tree *pv
5608 = ctx->outer->lastprivate_conditional_map->get (po);
5609 *v = *pv;
5612 else
5614 /* When not vectorized, treat lastprivate(conditional:) like
5615 normal lastprivate, as there will be just one simd lane
5616 writing the privatized variable. */
5617 delete ctx->lastprivate_conditional_map;
5618 ctx->lastprivate_conditional_map = NULL;
5623 if (nonconst_simd_if)
5625 if (sctx.lane == NULL_TREE)
5627 sctx.idx = create_tmp_var (unsigned_type_node);
5628 sctx.lane = create_tmp_var (unsigned_type_node);
5630 /* FIXME: For now. */
5631 sctx.is_simt = false;
5634 if (sctx.lane || sctx.is_simt)
5636 uid = create_tmp_var (ptr_type_node, "simduid");
5637 /* Don't want uninit warnings on simduid, it is always uninitialized,
5638 but we use it not for the value, but for the DECL_UID only. */
5639 TREE_NO_WARNING (uid) = 1;
5640 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
5641 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
5642 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5643 gimple_omp_for_set_clauses (ctx->stmt, c);
5645 /* Emit calls denoting privatized variables and initializing a pointer to
5646 structure that holds private variables as fields after ompdevlow pass. */
5647 if (sctx.is_simt)
5649 sctx.simt_eargs[0] = uid;
5650 gimple *g
5651 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
5652 gimple_call_set_lhs (g, uid);
5653 gimple_seq_add_stmt (ilist, g);
5654 sctx.simt_eargs.release ();
5656 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
5657 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
5658 gimple_call_set_lhs (g, simtrec);
5659 gimple_seq_add_stmt (ilist, g);
5661 if (sctx.lane)
5663 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
5664 2 + (nonconst_simd_if != NULL),
5665 uid, integer_zero_node,
5666 nonconst_simd_if);
5667 gimple_call_set_lhs (g, sctx.lane);
5668 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5669 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
5670 g = gimple_build_assign (sctx.lane, INTEGER_CST,
5671 build_int_cst (unsigned_type_node, 0));
5672 gimple_seq_add_stmt (ilist, g);
5673 if (sctx.lastlane)
5675 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
5676 2, uid, sctx.lane);
5677 gimple_call_set_lhs (g, sctx.lastlane);
5678 gimple_seq_add_stmt (dlist, g);
5679 gimple_seq_add_seq (dlist, llist[3]);
5681 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
5682 if (llist[2])
5684 tree simt_vf = create_tmp_var (unsigned_type_node);
5685 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
5686 gimple_call_set_lhs (g, simt_vf);
5687 gimple_seq_add_stmt (dlist, g);
5689 tree t = build_int_cst (unsigned_type_node, 1);
5690 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
5691 gimple_seq_add_stmt (dlist, g);
5693 t = build_int_cst (unsigned_type_node, 0);
5694 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
5695 gimple_seq_add_stmt (dlist, g);
5697 tree body = create_artificial_label (UNKNOWN_LOCATION);
5698 tree header = create_artificial_label (UNKNOWN_LOCATION);
5699 tree end = create_artificial_label (UNKNOWN_LOCATION);
5700 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
5701 gimple_seq_add_stmt (dlist, gimple_build_label (body));
5703 gimple_seq_add_seq (dlist, llist[2]);
5705 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
5706 gimple_seq_add_stmt (dlist, g);
5708 gimple_seq_add_stmt (dlist, gimple_build_label (header));
5709 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
5710 gimple_seq_add_stmt (dlist, g);
5712 gimple_seq_add_stmt (dlist, gimple_build_label (end));
5714 for (int i = 0; i < 2; i++)
5715 if (llist[i])
5717 tree vf = create_tmp_var (unsigned_type_node);
5718 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
5719 gimple_call_set_lhs (g, vf);
5720 gimple_seq *seq = i == 0 ? ilist : dlist;
5721 gimple_seq_add_stmt (seq, g);
5722 tree t = build_int_cst (unsigned_type_node, 0);
5723 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
5724 gimple_seq_add_stmt (seq, g);
5725 tree body = create_artificial_label (UNKNOWN_LOCATION);
5726 tree header = create_artificial_label (UNKNOWN_LOCATION);
5727 tree end = create_artificial_label (UNKNOWN_LOCATION);
5728 gimple_seq_add_stmt (seq, gimple_build_goto (header));
5729 gimple_seq_add_stmt (seq, gimple_build_label (body));
5730 gimple_seq_add_seq (seq, llist[i]);
5731 t = build_int_cst (unsigned_type_node, 1);
5732 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
5733 gimple_seq_add_stmt (seq, g);
5734 gimple_seq_add_stmt (seq, gimple_build_label (header));
5735 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
5736 gimple_seq_add_stmt (seq, g);
5737 gimple_seq_add_stmt (seq, gimple_build_label (end));
5740 if (sctx.is_simt)
5742 gimple_seq_add_seq (dlist, sctx.simt_dlist);
5743 gimple *g
5744 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
5745 gimple_seq_add_stmt (dlist, g);
5748 /* The copyin sequence is not to be executed by the main thread, since
5749 that would result in self-copies. Perhaps not visible to scalars,
5750 but it certainly is to C++ operator=. */
5751 if (copyin_seq)
5753 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
5755 x = build2 (NE_EXPR, boolean_type_node, x,
5756 build_int_cst (TREE_TYPE (x), 0));
5757 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
5758 gimplify_and_add (x, ilist);
5761 /* If any copyin variable is passed by reference, we must ensure the
5762 master thread doesn't modify it before it is copied over in all
5763 threads. Similarly for variables in both firstprivate and
5764 lastprivate clauses we need to ensure the lastprivate copying
5765 happens after firstprivate copying in all threads. And similarly
5766 for UDRs if initializer expression refers to omp_orig. */
5767 if (copyin_by_ref || lastprivate_firstprivate
5768 || (reduction_omp_orig_ref
5769 && !ctx->scan_inclusive
5770 && !ctx->scan_exclusive))
5772 /* Don't add any barrier for #pragma omp simd or
5773 #pragma omp distribute. */
5774 if (!is_task_ctx (ctx)
5775 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
5776 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
5777 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
5780 /* If max_vf is non-zero, then we can use only a vectorization factor
5781 up to the max_vf we chose. So stick it into the safelen clause. */
5782 if (maybe_ne (sctx.max_vf, 0U))
5784 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
5785 OMP_CLAUSE_SAFELEN);
5786 poly_uint64 safe_len;
5787 if (c == NULL_TREE
5788 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
5789 && maybe_gt (safe_len, sctx.max_vf)))
5791 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
5792 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
5793 sctx.max_vf);
5794 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5795 gimple_omp_for_set_clauses (ctx->stmt, c);
5800 /* Create temporary variables for lastprivate(conditional:) implementation
5801 in context CTX with CLAUSES. */
5803 static void
5804 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
5806 tree iter_type = NULL_TREE;
5807 tree cond_ptr = NULL_TREE;
5808 tree iter_var = NULL_TREE;
5809 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5810 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
5811 tree next = *clauses;
5812 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
5813 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5814 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5816 if (is_simd)
5818 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
5819 gcc_assert (cc);
5820 if (iter_type == NULL_TREE)
5822 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
5823 iter_var = create_tmp_var_raw (iter_type);
5824 DECL_CONTEXT (iter_var) = current_function_decl;
5825 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
5826 DECL_CHAIN (iter_var) = ctx->block_vars;
5827 ctx->block_vars = iter_var;
5828 tree c3
5829 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
5830 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
5831 OMP_CLAUSE_DECL (c3) = iter_var;
5832 OMP_CLAUSE_CHAIN (c3) = *clauses;
5833 *clauses = c3;
5834 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
5836 next = OMP_CLAUSE_CHAIN (cc);
5837 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
5838 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
5839 ctx->lastprivate_conditional_map->put (o, v);
5840 continue;
5842 if (iter_type == NULL)
5844 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
5846 struct omp_for_data fd;
5847 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
5848 NULL);
5849 iter_type = unsigned_type_for (fd.iter_type);
5851 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
5852 iter_type = unsigned_type_node;
5853 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
5854 if (c2)
5856 cond_ptr
5857 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
5858 OMP_CLAUSE_DECL (c2) = cond_ptr;
5860 else
5862 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
5863 DECL_CONTEXT (cond_ptr) = current_function_decl;
5864 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
5865 DECL_CHAIN (cond_ptr) = ctx->block_vars;
5866 ctx->block_vars = cond_ptr;
5867 c2 = build_omp_clause (UNKNOWN_LOCATION,
5868 OMP_CLAUSE__CONDTEMP_);
5869 OMP_CLAUSE_DECL (c2) = cond_ptr;
5870 OMP_CLAUSE_CHAIN (c2) = *clauses;
5871 *clauses = c2;
5873 iter_var = create_tmp_var_raw (iter_type);
5874 DECL_CONTEXT (iter_var) = current_function_decl;
5875 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
5876 DECL_CHAIN (iter_var) = ctx->block_vars;
5877 ctx->block_vars = iter_var;
5878 tree c3
5879 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
5880 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
5881 OMP_CLAUSE_DECL (c3) = iter_var;
5882 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
5883 OMP_CLAUSE_CHAIN (c2) = c3;
5884 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
5886 tree v = create_tmp_var_raw (iter_type);
5887 DECL_CONTEXT (v) = current_function_decl;
5888 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
5889 DECL_CHAIN (v) = ctx->block_vars;
5890 ctx->block_vars = v;
5891 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
5892 ctx->lastprivate_conditional_map->put (o, v);
5897 /* Generate code to implement the LASTPRIVATE clauses. This is used for
5898 both parallel and workshare constructs. PREDICATE may be NULL if it's
5899 always true. BODY_P is the sequence to insert early initialization
5900 if needed, STMT_LIST is where the non-conditional lastprivate handling
5901 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
5902 section. */
5904 static void
5905 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
5906 gimple_seq *stmt_list, gimple_seq *cstmt_list,
5907 omp_context *ctx)
5909 tree x, c, label = NULL, orig_clauses = clauses;
5910 bool par_clauses = false;
5911 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
5912 unsigned HOST_WIDE_INT conditional_off = 0;
5914 /* Early exit if there are no lastprivate or linear clauses. */
5915 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
5916 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
5917 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
5918 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
5919 break;
5920 if (clauses == NULL)
5922 /* If this was a workshare clause, see if it had been combined
5923 with its parallel. In that case, look for the clauses on the
5924 parallel statement itself. */
5925 if (is_parallel_ctx (ctx))
5926 return;
5928 ctx = ctx->outer;
5929 if (ctx == NULL || !is_parallel_ctx (ctx))
5930 return;
5932 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
5933 OMP_CLAUSE_LASTPRIVATE);
5934 if (clauses == NULL)
5935 return;
5936 par_clauses = true;
5939 bool maybe_simt = false;
5940 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5941 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5943 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
5944 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
5945 if (simduid)
5946 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
5949 if (predicate)
5951 gcond *stmt;
5952 tree label_true, arm1, arm2;
5953 enum tree_code pred_code = TREE_CODE (predicate);
5955 label = create_artificial_label (UNKNOWN_LOCATION);
5956 label_true = create_artificial_label (UNKNOWN_LOCATION);
5957 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
5959 arm1 = TREE_OPERAND (predicate, 0);
5960 arm2 = TREE_OPERAND (predicate, 1);
5961 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
5962 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
5964 else
5966 arm1 = predicate;
5967 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
5968 arm2 = boolean_false_node;
5969 pred_code = NE_EXPR;
5971 if (maybe_simt)
5973 c = build2 (pred_code, boolean_type_node, arm1, arm2);
5974 c = fold_convert (integer_type_node, c);
5975 simtcond = create_tmp_var (integer_type_node);
5976 gimplify_assign (simtcond, c, stmt_list);
5977 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
5978 1, simtcond);
5979 c = create_tmp_var (integer_type_node);
5980 gimple_call_set_lhs (g, c);
5981 gimple_seq_add_stmt (stmt_list, g);
5982 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
5983 label_true, label);
5985 else
5986 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
5987 gimple_seq_add_stmt (stmt_list, stmt);
5988 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
5991 tree cond_ptr = NULL_TREE;
5992 for (c = clauses; c ;)
5994 tree var, new_var;
5995 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5996 gimple_seq *this_stmt_list = stmt_list;
5997 tree lab2 = NULL_TREE;
5999 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6000 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6001 && ctx->lastprivate_conditional_map
6002 && !ctx->combined_into_simd_safelen0)
6004 gcc_assert (body_p);
6005 if (simduid)
6006 goto next;
6007 if (cond_ptr == NULL_TREE)
6009 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
6010 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
6012 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
6013 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6014 tree v = *ctx->lastprivate_conditional_map->get (o);
6015 gimplify_assign (v, build_zero_cst (type), body_p);
6016 this_stmt_list = cstmt_list;
6017 tree mem;
6018 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
6020 mem = build2 (MEM_REF, type, cond_ptr,
6021 build_int_cst (TREE_TYPE (cond_ptr),
6022 conditional_off));
6023 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
6025 else
6026 mem = build4 (ARRAY_REF, type, cond_ptr,
6027 size_int (conditional_off++), NULL_TREE, NULL_TREE);
6028 tree mem2 = copy_node (mem);
6029 gimple_seq seq = NULL;
6030 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
6031 gimple_seq_add_seq (this_stmt_list, seq);
6032 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
6033 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6034 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
6035 gimple_seq_add_stmt (this_stmt_list, g);
6036 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
6037 gimplify_assign (mem2, v, this_stmt_list);
6040 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6041 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6042 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6044 var = OMP_CLAUSE_DECL (c);
6045 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6046 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6047 && is_taskloop_ctx (ctx))
6049 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
6050 new_var = lookup_decl (var, ctx->outer);
6052 else
6054 new_var = lookup_decl (var, ctx);
6055 /* Avoid uninitialized warnings for lastprivate and
6056 for linear iterators. */
6057 if (predicate
6058 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6059 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
6060 TREE_NO_WARNING (new_var) = 1;
6063 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
6065 tree val = DECL_VALUE_EXPR (new_var);
6066 if (TREE_CODE (val) == ARRAY_REF
6067 && VAR_P (TREE_OPERAND (val, 0))
6068 && lookup_attribute ("omp simd array",
6069 DECL_ATTRIBUTES (TREE_OPERAND (val,
6070 0))))
6072 if (lastlane == NULL)
6074 lastlane = create_tmp_var (unsigned_type_node);
6075 gcall *g
6076 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6077 2, simduid,
6078 TREE_OPERAND (val, 1));
6079 gimple_call_set_lhs (g, lastlane);
6080 gimple_seq_add_stmt (this_stmt_list, g);
6082 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
6083 TREE_OPERAND (val, 0), lastlane,
6084 NULL_TREE, NULL_TREE);
6085 TREE_THIS_NOTRAP (new_var) = 1;
6088 else if (maybe_simt)
6090 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
6091 ? DECL_VALUE_EXPR (new_var)
6092 : new_var);
6093 if (simtlast == NULL)
6095 simtlast = create_tmp_var (unsigned_type_node);
6096 gcall *g = gimple_build_call_internal
6097 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
6098 gimple_call_set_lhs (g, simtlast);
6099 gimple_seq_add_stmt (this_stmt_list, g);
6101 x = build_call_expr_internal_loc
6102 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
6103 TREE_TYPE (val), 2, val, simtlast);
6104 new_var = unshare_expr (new_var);
6105 gimplify_assign (new_var, x, this_stmt_list);
6106 new_var = unshare_expr (new_var);
6109 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6110 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
6112 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
6113 gimple_seq_add_seq (this_stmt_list,
6114 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6115 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
6117 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6118 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
6120 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
6121 gimple_seq_add_seq (this_stmt_list,
6122 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6123 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
6126 x = NULL_TREE;
6127 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6128 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
6130 gcc_checking_assert (is_taskloop_ctx (ctx));
6131 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
6132 ctx->outer->outer);
6133 if (is_global_var (ovar))
6134 x = ovar;
6136 if (!x)
6137 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
6138 if (omp_is_reference (var))
6139 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6140 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
6141 gimplify_and_add (x, this_stmt_list);
6143 if (lab2)
6144 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
6147 next:
6148 c = OMP_CLAUSE_CHAIN (c);
6149 if (c == NULL && !par_clauses)
6151 /* If this was a workshare clause, see if it had been combined
6152 with its parallel. In that case, continue looking for the
6153 clauses also on the parallel statement itself. */
6154 if (is_parallel_ctx (ctx))
6155 break;
6157 ctx = ctx->outer;
6158 if (ctx == NULL || !is_parallel_ctx (ctx))
6159 break;
6161 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6162 OMP_CLAUSE_LASTPRIVATE);
6163 par_clauses = true;
6167 if (label)
6168 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
6171 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6172 (which might be a placeholder). INNER is true if this is an inner
6173 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6174 join markers. Generate the before-loop forking sequence in
6175 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6176 general form of these sequences is
6178 GOACC_REDUCTION_SETUP
6179 GOACC_FORK
6180 GOACC_REDUCTION_INIT
6182 GOACC_REDUCTION_FINI
6183 GOACC_JOIN
6184 GOACC_REDUCTION_TEARDOWN. */
6186 static void
6187 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
6188 gcall *fork, gcall *join, gimple_seq *fork_seq,
6189 gimple_seq *join_seq, omp_context *ctx)
6191 gimple_seq before_fork = NULL;
6192 gimple_seq after_fork = NULL;
6193 gimple_seq before_join = NULL;
6194 gimple_seq after_join = NULL;
6195 tree init_code = NULL_TREE, fini_code = NULL_TREE,
6196 setup_code = NULL_TREE, teardown_code = NULL_TREE;
6197 unsigned offset = 0;
6199 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6200 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
6202 tree orig = OMP_CLAUSE_DECL (c);
6203 tree var = maybe_lookup_decl (orig, ctx);
6204 tree ref_to_res = NULL_TREE;
6205 tree incoming, outgoing, v1, v2, v3;
6206 bool is_private = false;
6208 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
6209 if (rcode == MINUS_EXPR)
6210 rcode = PLUS_EXPR;
6211 else if (rcode == TRUTH_ANDIF_EXPR)
6212 rcode = BIT_AND_EXPR;
6213 else if (rcode == TRUTH_ORIF_EXPR)
6214 rcode = BIT_IOR_EXPR;
6215 tree op = build_int_cst (unsigned_type_node, rcode);
6217 if (!var)
6218 var = orig;
6220 incoming = outgoing = var;
6222 if (!inner)
6224 /* See if an outer construct also reduces this variable. */
6225 omp_context *outer = ctx;
6227 while (omp_context *probe = outer->outer)
6229 enum gimple_code type = gimple_code (probe->stmt);
6230 tree cls;
6232 switch (type)
6234 case GIMPLE_OMP_FOR:
6235 cls = gimple_omp_for_clauses (probe->stmt);
6236 break;
6238 case GIMPLE_OMP_TARGET:
6239 if (gimple_omp_target_kind (probe->stmt)
6240 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
6241 goto do_lookup;
6243 cls = gimple_omp_target_clauses (probe->stmt);
6244 break;
6246 default:
6247 goto do_lookup;
6250 outer = probe;
6251 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
6252 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
6253 && orig == OMP_CLAUSE_DECL (cls))
6255 incoming = outgoing = lookup_decl (orig, probe);
6256 goto has_outer_reduction;
6258 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
6259 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
6260 && orig == OMP_CLAUSE_DECL (cls))
6262 is_private = true;
6263 goto do_lookup;
6267 do_lookup:
6268 /* This is the outermost construct with this reduction,
6269 see if there's a mapping for it. */
6270 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
6271 && maybe_lookup_field (orig, outer) && !is_private)
6273 ref_to_res = build_receiver_ref (orig, false, outer);
6274 if (omp_is_reference (orig))
6275 ref_to_res = build_simple_mem_ref (ref_to_res);
6277 tree type = TREE_TYPE (var);
6278 if (POINTER_TYPE_P (type))
6279 type = TREE_TYPE (type);
6281 outgoing = var;
6282 incoming = omp_reduction_init_op (loc, rcode, type);
6284 else
6286 /* Try to look at enclosing contexts for reduction var,
6287 use original if no mapping found. */
6288 tree t = NULL_TREE;
6289 omp_context *c = ctx->outer;
6290 while (c && !t)
6292 t = maybe_lookup_decl (orig, c);
6293 c = c->outer;
6295 incoming = outgoing = (t ? t : orig);
6298 has_outer_reduction:;
6301 if (!ref_to_res)
6302 ref_to_res = integer_zero_node;
6304 if (omp_is_reference (orig))
6306 tree type = TREE_TYPE (var);
6307 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
6309 if (!inner)
6311 tree x = create_tmp_var (TREE_TYPE (type), id);
6312 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
6315 v1 = create_tmp_var (type, id);
6316 v2 = create_tmp_var (type, id);
6317 v3 = create_tmp_var (type, id);
6319 gimplify_assign (v1, var, fork_seq);
6320 gimplify_assign (v2, var, fork_seq);
6321 gimplify_assign (v3, var, fork_seq);
6323 var = build_simple_mem_ref (var);
6324 v1 = build_simple_mem_ref (v1);
6325 v2 = build_simple_mem_ref (v2);
6326 v3 = build_simple_mem_ref (v3);
6327 outgoing = build_simple_mem_ref (outgoing);
6329 if (!TREE_CONSTANT (incoming))
6330 incoming = build_simple_mem_ref (incoming);
6332 else
6333 v1 = v2 = v3 = var;
6335 /* Determine position in reduction buffer, which may be used
6336 by target. The parser has ensured that this is not a
6337 variable-sized type. */
6338 fixed_size_mode mode
6339 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
6340 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6341 offset = (offset + align - 1) & ~(align - 1);
6342 tree off = build_int_cst (sizetype, offset);
6343 offset += GET_MODE_SIZE (mode);
6345 if (!init_code)
6347 init_code = build_int_cst (integer_type_node,
6348 IFN_GOACC_REDUCTION_INIT);
6349 fini_code = build_int_cst (integer_type_node,
6350 IFN_GOACC_REDUCTION_FINI);
6351 setup_code = build_int_cst (integer_type_node,
6352 IFN_GOACC_REDUCTION_SETUP);
6353 teardown_code = build_int_cst (integer_type_node,
6354 IFN_GOACC_REDUCTION_TEARDOWN);
6357 tree setup_call
6358 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6359 TREE_TYPE (var), 6, setup_code,
6360 unshare_expr (ref_to_res),
6361 incoming, level, op, off);
6362 tree init_call
6363 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6364 TREE_TYPE (var), 6, init_code,
6365 unshare_expr (ref_to_res),
6366 v1, level, op, off);
6367 tree fini_call
6368 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6369 TREE_TYPE (var), 6, fini_code,
6370 unshare_expr (ref_to_res),
6371 v2, level, op, off);
6372 tree teardown_call
6373 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6374 TREE_TYPE (var), 6, teardown_code,
6375 ref_to_res, v3, level, op, off);
6377 gimplify_assign (v1, setup_call, &before_fork);
6378 gimplify_assign (v2, init_call, &after_fork);
6379 gimplify_assign (v3, fini_call, &before_join);
6380 gimplify_assign (outgoing, teardown_call, &after_join);
6383 /* Now stitch things together. */
6384 gimple_seq_add_seq (fork_seq, before_fork);
6385 if (fork)
6386 gimple_seq_add_stmt (fork_seq, fork);
6387 gimple_seq_add_seq (fork_seq, after_fork);
6389 gimple_seq_add_seq (join_seq, before_join);
6390 if (join)
6391 gimple_seq_add_stmt (join_seq, join);
6392 gimple_seq_add_seq (join_seq, after_join);
6395 /* Generate code to implement the REDUCTION clauses, append it
6396 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6397 that should be emitted also inside of the critical section,
6398 in that case clear *CLIST afterwards, otherwise leave it as is
6399 and let the caller emit it itself. */
6401 static void
6402 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
6403 gimple_seq *clist, omp_context *ctx)
6405 gimple_seq sub_seq = NULL;
6406 gimple *stmt;
6407 tree x, c;
6408 int count = 0;
6410 /* OpenACC loop reductions are handled elsewhere. */
6411 if (is_gimple_omp_oacc (ctx->stmt))
6412 return;
6414 /* SIMD reductions are handled in lower_rec_input_clauses. */
6415 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6416 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
6417 return;
6419 /* inscan reductions are handled elsewhere. */
6420 if (ctx->scan_inclusive || ctx->scan_exclusive)
6421 return;
6423 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6424 update in that case, otherwise use a lock. */
6425 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
6426 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6427 && !OMP_CLAUSE_REDUCTION_TASK (c))
6429 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
6430 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6432 /* Never use OMP_ATOMIC for array reductions or UDRs. */
6433 count = -1;
6434 break;
6436 count++;
6439 if (count == 0)
6440 return;
6442 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6444 tree var, ref, new_var, orig_var;
6445 enum tree_code code;
6446 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6448 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6449 || OMP_CLAUSE_REDUCTION_TASK (c))
6450 continue;
6452 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
6453 orig_var = var = OMP_CLAUSE_DECL (c);
6454 if (TREE_CODE (var) == MEM_REF)
6456 var = TREE_OPERAND (var, 0);
6457 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
6458 var = TREE_OPERAND (var, 0);
6459 if (TREE_CODE (var) == ADDR_EXPR)
6460 var = TREE_OPERAND (var, 0);
6461 else
6463 /* If this is a pointer or referenced based array
6464 section, the var could be private in the outer
6465 context e.g. on orphaned loop construct. Pretend this
6466 is private variable's outer reference. */
6467 ccode = OMP_CLAUSE_PRIVATE;
6468 if (TREE_CODE (var) == INDIRECT_REF)
6469 var = TREE_OPERAND (var, 0);
6471 orig_var = var;
6472 if (is_variable_sized (var))
6474 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
6475 var = DECL_VALUE_EXPR (var);
6476 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
6477 var = TREE_OPERAND (var, 0);
6478 gcc_assert (DECL_P (var));
6481 new_var = lookup_decl (var, ctx);
6482 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
6483 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6484 ref = build_outer_var_ref (var, ctx, ccode);
6485 code = OMP_CLAUSE_REDUCTION_CODE (c);
6487 /* reduction(-:var) sums up the partial results, so it acts
6488 identically to reduction(+:var). */
6489 if (code == MINUS_EXPR)
6490 code = PLUS_EXPR;
6492 if (count == 1)
6494 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
6496 addr = save_expr (addr);
6497 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
6498 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
6499 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
6500 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
6501 gimplify_and_add (x, stmt_seqp);
6502 return;
6504 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6506 tree d = OMP_CLAUSE_DECL (c);
6507 tree type = TREE_TYPE (d);
6508 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
6509 tree i = create_tmp_var (TREE_TYPE (v));
6510 tree ptype = build_pointer_type (TREE_TYPE (type));
6511 tree bias = TREE_OPERAND (d, 1);
6512 d = TREE_OPERAND (d, 0);
6513 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
6515 tree b = TREE_OPERAND (d, 1);
6516 b = maybe_lookup_decl (b, ctx);
6517 if (b == NULL)
6519 b = TREE_OPERAND (d, 1);
6520 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
6522 if (integer_zerop (bias))
6523 bias = b;
6524 else
6526 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
6527 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
6528 TREE_TYPE (b), b, bias);
6530 d = TREE_OPERAND (d, 0);
6532 /* For ref build_outer_var_ref already performs this, so
6533 only new_var needs a dereference. */
6534 if (TREE_CODE (d) == INDIRECT_REF)
6536 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6537 gcc_assert (omp_is_reference (var) && var == orig_var);
6539 else if (TREE_CODE (d) == ADDR_EXPR)
6541 if (orig_var == var)
6543 new_var = build_fold_addr_expr (new_var);
6544 ref = build_fold_addr_expr (ref);
6547 else
6549 gcc_assert (orig_var == var);
6550 if (omp_is_reference (var))
6551 ref = build_fold_addr_expr (ref);
6553 if (DECL_P (v))
6555 tree t = maybe_lookup_decl (v, ctx);
6556 if (t)
6557 v = t;
6558 else
6559 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
6560 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
6562 if (!integer_zerop (bias))
6564 bias = fold_convert_loc (clause_loc, sizetype, bias);
6565 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6566 TREE_TYPE (new_var), new_var,
6567 unshare_expr (bias));
6568 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6569 TREE_TYPE (ref), ref, bias);
6571 new_var = fold_convert_loc (clause_loc, ptype, new_var);
6572 ref = fold_convert_loc (clause_loc, ptype, ref);
6573 tree m = create_tmp_var (ptype);
6574 gimplify_assign (m, new_var, stmt_seqp);
6575 new_var = m;
6576 m = create_tmp_var (ptype);
6577 gimplify_assign (m, ref, stmt_seqp);
6578 ref = m;
6579 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
6580 tree body = create_artificial_label (UNKNOWN_LOCATION);
6581 tree end = create_artificial_label (UNKNOWN_LOCATION);
6582 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
6583 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
6584 tree out = build_simple_mem_ref_loc (clause_loc, ref);
6585 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6587 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6588 tree decl_placeholder
6589 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
6590 SET_DECL_VALUE_EXPR (placeholder, out);
6591 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6592 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
6593 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
6594 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6595 gimple_seq_add_seq (&sub_seq,
6596 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6597 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6598 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6599 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
6601 else
6603 x = build2 (code, TREE_TYPE (out), out, priv);
6604 out = unshare_expr (out);
6605 gimplify_assign (out, x, &sub_seq);
6607 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
6608 TYPE_SIZE_UNIT (TREE_TYPE (type)));
6609 gimple_seq_add_stmt (&sub_seq, g);
6610 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
6611 TYPE_SIZE_UNIT (TREE_TYPE (type)));
6612 gimple_seq_add_stmt (&sub_seq, g);
6613 g = gimple_build_assign (i, PLUS_EXPR, i,
6614 build_int_cst (TREE_TYPE (i), 1));
6615 gimple_seq_add_stmt (&sub_seq, g);
6616 g = gimple_build_cond (LE_EXPR, i, v, body, end);
6617 gimple_seq_add_stmt (&sub_seq, g);
6618 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
6620 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6622 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6624 if (omp_is_reference (var)
6625 && !useless_type_conversion_p (TREE_TYPE (placeholder),
6626 TREE_TYPE (ref)))
6627 ref = build_fold_addr_expr_loc (clause_loc, ref);
6628 SET_DECL_VALUE_EXPR (placeholder, ref);
6629 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6630 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6631 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6632 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6633 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6635 else
6637 x = build2 (code, TREE_TYPE (ref), ref, new_var);
6638 ref = build_outer_var_ref (var, ctx);
6639 gimplify_assign (ref, x, &sub_seq);
6643 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
6645 gimple_seq_add_stmt (stmt_seqp, stmt);
6647 gimple_seq_add_seq (stmt_seqp, sub_seq);
6649 if (clist)
6651 gimple_seq_add_seq (stmt_seqp, *clist);
6652 *clist = NULL;
6655 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
6657 gimple_seq_add_stmt (stmt_seqp, stmt);
6661 /* Generate code to implement the COPYPRIVATE clauses. */
6663 static void
6664 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
6665 omp_context *ctx)
6667 tree c;
6669 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6671 tree var, new_var, ref, x;
6672 bool by_ref;
6673 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6675 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
6676 continue;
6678 var = OMP_CLAUSE_DECL (c);
6679 by_ref = use_pointer_for_field (var, NULL);
6681 ref = build_sender_ref (var, ctx);
6682 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
6683 if (by_ref)
6685 x = build_fold_addr_expr_loc (clause_loc, new_var);
6686 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
6688 gimplify_assign (ref, x, slist);
6690 ref = build_receiver_ref (var, false, ctx);
6691 if (by_ref)
6693 ref = fold_convert_loc (clause_loc,
6694 build_pointer_type (TREE_TYPE (new_var)),
6695 ref);
6696 ref = build_fold_indirect_ref_loc (clause_loc, ref);
6698 if (omp_is_reference (var))
6700 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
6701 ref = build_simple_mem_ref_loc (clause_loc, ref);
6702 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6704 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
6705 gimplify_and_add (x, rlist);
6710 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
6711 and REDUCTION from the sender (aka parent) side. */
6713 static void
6714 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
6715 omp_context *ctx)
6717 tree c, t;
6718 int ignored_looptemp = 0;
6719 bool is_taskloop = false;
6721 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
6722 by GOMP_taskloop. */
6723 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
6725 ignored_looptemp = 2;
6726 is_taskloop = true;
6729 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6731 tree val, ref, x, var;
6732 bool by_ref, do_in = false, do_out = false;
6733 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6735 switch (OMP_CLAUSE_CODE (c))
6737 case OMP_CLAUSE_PRIVATE:
6738 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
6739 break;
6740 continue;
6741 case OMP_CLAUSE_FIRSTPRIVATE:
6742 case OMP_CLAUSE_COPYIN:
6743 case OMP_CLAUSE_LASTPRIVATE:
6744 case OMP_CLAUSE_IN_REDUCTION:
6745 case OMP_CLAUSE__REDUCTEMP_:
6746 break;
6747 case OMP_CLAUSE_REDUCTION:
6748 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
6749 continue;
6750 break;
6751 case OMP_CLAUSE_SHARED:
6752 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
6753 break;
6754 continue;
6755 case OMP_CLAUSE__LOOPTEMP_:
6756 if (ignored_looptemp)
6758 ignored_looptemp--;
6759 continue;
6761 break;
6762 default:
6763 continue;
6766 val = OMP_CLAUSE_DECL (c);
6767 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6768 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
6769 && TREE_CODE (val) == MEM_REF)
6771 val = TREE_OPERAND (val, 0);
6772 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
6773 val = TREE_OPERAND (val, 0);
6774 if (TREE_CODE (val) == INDIRECT_REF
6775 || TREE_CODE (val) == ADDR_EXPR)
6776 val = TREE_OPERAND (val, 0);
6777 if (is_variable_sized (val))
6778 continue;
6781 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
6782 outer taskloop region. */
6783 omp_context *ctx_for_o = ctx;
6784 if (is_taskloop
6785 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
6786 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
6787 ctx_for_o = ctx->outer;
6789 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
6791 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
6792 && is_global_var (var)
6793 && (val == OMP_CLAUSE_DECL (c)
6794 || !is_task_ctx (ctx)
6795 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
6796 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
6797 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
6798 != POINTER_TYPE)))))
6799 continue;
6801 t = omp_member_access_dummy_var (var);
6802 if (t)
6804 var = DECL_VALUE_EXPR (var);
6805 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
6806 if (o != t)
6807 var = unshare_and_remap (var, t, o);
6808 else
6809 var = unshare_expr (var);
6812 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
6814 /* Handle taskloop firstprivate/lastprivate, where the
6815 lastprivate on GIMPLE_OMP_TASK is represented as
6816 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
6817 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
6818 x = omp_build_component_ref (ctx->sender_decl, f);
6819 if (use_pointer_for_field (val, ctx))
6820 var = build_fold_addr_expr (var);
6821 gimplify_assign (x, var, ilist);
6822 DECL_ABSTRACT_ORIGIN (f) = NULL;
6823 continue;
6826 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6827 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
6828 || val == OMP_CLAUSE_DECL (c))
6829 && is_variable_sized (val))
6830 continue;
6831 by_ref = use_pointer_for_field (val, NULL);
6833 switch (OMP_CLAUSE_CODE (c))
6835 case OMP_CLAUSE_FIRSTPRIVATE:
6836 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
6837 && !by_ref
6838 && is_task_ctx (ctx))
6839 TREE_NO_WARNING (var) = 1;
6840 do_in = true;
6841 break;
6843 case OMP_CLAUSE_PRIVATE:
6844 case OMP_CLAUSE_COPYIN:
6845 case OMP_CLAUSE__LOOPTEMP_:
6846 case OMP_CLAUSE__REDUCTEMP_:
6847 do_in = true;
6848 break;
6850 case OMP_CLAUSE_LASTPRIVATE:
6851 if (by_ref || omp_is_reference (val))
6853 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
6854 continue;
6855 do_in = true;
6857 else
6859 do_out = true;
6860 if (lang_hooks.decls.omp_private_outer_ref (val))
6861 do_in = true;
6863 break;
6865 case OMP_CLAUSE_REDUCTION:
6866 case OMP_CLAUSE_IN_REDUCTION:
6867 do_in = true;
6868 if (val == OMP_CLAUSE_DECL (c))
6870 if (is_task_ctx (ctx))
6871 by_ref = use_pointer_for_field (val, ctx);
6872 else
6873 do_out = !(by_ref || omp_is_reference (val));
6875 else
6876 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
6877 break;
6879 default:
6880 gcc_unreachable ();
6883 if (do_in)
6885 ref = build_sender_ref (val, ctx);
6886 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
6887 gimplify_assign (ref, x, ilist);
6888 if (is_task_ctx (ctx))
6889 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
6892 if (do_out)
6894 ref = build_sender_ref (val, ctx);
6895 gimplify_assign (var, ref, olist);
6900 /* Generate code to implement SHARED from the sender (aka parent)
6901 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
6902 list things that got automatically shared. */
6904 static void
6905 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
6907 tree var, ovar, nvar, t, f, x, record_type;
6909 if (ctx->record_type == NULL)
6910 return;
6912 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
6913 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
6915 ovar = DECL_ABSTRACT_ORIGIN (f);
6916 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
6917 continue;
6919 nvar = maybe_lookup_decl (ovar, ctx);
6920 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
6921 continue;
6923 /* If CTX is a nested parallel directive. Find the immediately
6924 enclosing parallel or workshare construct that contains a
6925 mapping for OVAR. */
6926 var = lookup_decl_in_outer_ctx (ovar, ctx);
6928 t = omp_member_access_dummy_var (var);
6929 if (t)
6931 var = DECL_VALUE_EXPR (var);
6932 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
6933 if (o != t)
6934 var = unshare_and_remap (var, t, o);
6935 else
6936 var = unshare_expr (var);
6939 if (use_pointer_for_field (ovar, ctx))
6941 x = build_sender_ref (ovar, ctx);
6942 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
6943 && TREE_TYPE (f) == TREE_TYPE (ovar))
6945 gcc_assert (is_parallel_ctx (ctx)
6946 && DECL_ARTIFICIAL (ovar));
6947 /* _condtemp_ clause. */
6948 var = build_constructor (TREE_TYPE (x), NULL);
6950 else
6951 var = build_fold_addr_expr (var);
6952 gimplify_assign (x, var, ilist);
6954 else
6956 x = build_sender_ref (ovar, ctx);
6957 gimplify_assign (x, var, ilist);
6959 if (!TREE_READONLY (var)
6960 /* We don't need to receive a new reference to a result
6961 or parm decl. In fact we may not store to it as we will
6962 invalidate any pending RSO and generate wrong gimple
6963 during inlining. */
6964 && !((TREE_CODE (var) == RESULT_DECL
6965 || TREE_CODE (var) == PARM_DECL)
6966 && DECL_BY_REFERENCE (var)))
6968 x = build_sender_ref (ovar, ctx);
6969 gimplify_assign (var, x, olist);
6975 /* Emit an OpenACC head marker call, encapulating the partitioning and
6976 other information that must be processed by the target compiler.
6977 Return the maximum number of dimensions the associated loop might
6978 be partitioned over. */
6980 static unsigned
6981 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
6982 gimple_seq *seq, omp_context *ctx)
6984 unsigned levels = 0;
6985 unsigned tag = 0;
6986 tree gang_static = NULL_TREE;
6987 auto_vec<tree, 5> args;
6989 args.quick_push (build_int_cst
6990 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
6991 args.quick_push (ddvar);
6992 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6994 switch (OMP_CLAUSE_CODE (c))
6996 case OMP_CLAUSE_GANG:
6997 tag |= OLF_DIM_GANG;
6998 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
6999 /* static:* is represented by -1, and we can ignore it, as
7000 scheduling is always static. */
7001 if (gang_static && integer_minus_onep (gang_static))
7002 gang_static = NULL_TREE;
7003 levels++;
7004 break;
7006 case OMP_CLAUSE_WORKER:
7007 tag |= OLF_DIM_WORKER;
7008 levels++;
7009 break;
7011 case OMP_CLAUSE_VECTOR:
7012 tag |= OLF_DIM_VECTOR;
7013 levels++;
7014 break;
7016 case OMP_CLAUSE_SEQ:
7017 tag |= OLF_SEQ;
7018 break;
7020 case OMP_CLAUSE_AUTO:
7021 tag |= OLF_AUTO;
7022 break;
7024 case OMP_CLAUSE_INDEPENDENT:
7025 tag |= OLF_INDEPENDENT;
7026 break;
7028 case OMP_CLAUSE_TILE:
7029 tag |= OLF_TILE;
7030 break;
7032 default:
7033 continue;
7037 if (gang_static)
7039 if (DECL_P (gang_static))
7040 gang_static = build_outer_var_ref (gang_static, ctx);
7041 tag |= OLF_GANG_STATIC;
7044 /* In a parallel region, loops are implicitly INDEPENDENT. */
7045 omp_context *tgt = enclosing_target_ctx (ctx);
7046 if (!tgt || is_oacc_parallel (tgt))
7047 tag |= OLF_INDEPENDENT;
7049 if (tag & OLF_TILE)
7050 /* Tiling could use all 3 levels. */
7051 levels = 3;
7052 else
7054 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7055 Ensure at least one level, or 2 for possible auto
7056 partitioning */
7057 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
7058 << OLF_DIM_BASE) | OLF_SEQ));
7060 if (levels < 1u + maybe_auto)
7061 levels = 1u + maybe_auto;
7064 args.quick_push (build_int_cst (integer_type_node, levels));
7065 args.quick_push (build_int_cst (integer_type_node, tag));
7066 if (gang_static)
7067 args.quick_push (gang_static);
7069 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
7070 gimple_set_location (call, loc);
7071 gimple_set_lhs (call, ddvar);
7072 gimple_seq_add_stmt (seq, call);
7074 return levels;
7077 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7078 partitioning level of the enclosed region. */
7080 static void
7081 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
7082 tree tofollow, gimple_seq *seq)
7084 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
7085 : IFN_UNIQUE_OACC_TAIL_MARK);
7086 tree marker = build_int_cst (integer_type_node, marker_kind);
7087 int nargs = 2 + (tofollow != NULL_TREE);
7088 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
7089 marker, ddvar, tofollow);
7090 gimple_set_location (call, loc);
7091 gimple_set_lhs (call, ddvar);
7092 gimple_seq_add_stmt (seq, call);
7095 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7096 the loop clauses, from which we extract reductions. Initialize
7097 HEAD and TAIL. */
7099 static void
7100 lower_oacc_head_tail (location_t loc, tree clauses,
7101 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
7103 bool inner = false;
7104 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
7105 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
7107 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
7108 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
7109 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
7111 gcc_assert (count);
7112 for (unsigned done = 1; count; count--, done++)
7114 gimple_seq fork_seq = NULL;
7115 gimple_seq join_seq = NULL;
7117 tree place = build_int_cst (integer_type_node, -1);
7118 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
7119 fork_kind, ddvar, place);
7120 gimple_set_location (fork, loc);
7121 gimple_set_lhs (fork, ddvar);
7123 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
7124 join_kind, ddvar, place);
7125 gimple_set_location (join, loc);
7126 gimple_set_lhs (join, ddvar);
7128 /* Mark the beginning of this level sequence. */
7129 if (inner)
7130 lower_oacc_loop_marker (loc, ddvar, true,
7131 build_int_cst (integer_type_node, count),
7132 &fork_seq);
7133 lower_oacc_loop_marker (loc, ddvar, false,
7134 build_int_cst (integer_type_node, done),
7135 &join_seq);
7137 lower_oacc_reductions (loc, clauses, place, inner,
7138 fork, join, &fork_seq, &join_seq, ctx);
7140 /* Append this level to head. */
7141 gimple_seq_add_seq (head, fork_seq);
7142 /* Prepend it to tail. */
7143 gimple_seq_add_seq (&join_seq, *tail);
7144 *tail = join_seq;
7146 inner = true;
7149 /* Mark the end of the sequence. */
7150 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
7151 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
7154 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7155 catch handler and return it. This prevents programs from violating the
7156 structured block semantics with throws. */
7158 static gimple_seq
7159 maybe_catch_exception (gimple_seq body)
7161 gimple *g;
7162 tree decl;
7164 if (!flag_exceptions)
7165 return body;
7167 if (lang_hooks.eh_protect_cleanup_actions != NULL)
7168 decl = lang_hooks.eh_protect_cleanup_actions ();
7169 else
7170 decl = builtin_decl_explicit (BUILT_IN_TRAP);
7172 g = gimple_build_eh_must_not_throw (decl);
7173 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
7174 GIMPLE_TRY_CATCH);
7176 return gimple_seq_alloc_with_stmt (g);
7180 /* Routines to lower OMP directives into OMP-GIMPLE. */
7182 /* If ctx is a worksharing context inside of a cancellable parallel
7183 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7184 and conditional branch to parallel's cancel_label to handle
7185 cancellation in the implicit barrier. */
7187 static void
7188 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
7189 gimple_seq *body)
7191 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
7192 if (gimple_omp_return_nowait_p (omp_return))
7193 return;
7194 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7195 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7196 && outer->cancellable)
7198 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
7199 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
7200 tree lhs = create_tmp_var (c_bool_type);
7201 gimple_omp_return_set_lhs (omp_return, lhs);
7202 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
7203 gimple *g = gimple_build_cond (NE_EXPR, lhs,
7204 fold_convert (c_bool_type,
7205 boolean_false_node),
7206 outer->cancel_label, fallthru_label);
7207 gimple_seq_add_stmt (body, g);
7208 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
7210 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7211 return;
7214 /* Find the first task_reduction or reduction clause or return NULL
7215 if there are none. */
7217 static inline tree
7218 omp_task_reductions_find_first (tree clauses, enum tree_code code,
7219 enum omp_clause_code ccode)
7221 while (1)
7223 clauses = omp_find_clause (clauses, ccode);
7224 if (clauses == NULL_TREE)
7225 return NULL_TREE;
7226 if (ccode != OMP_CLAUSE_REDUCTION
7227 || code == OMP_TASKLOOP
7228 || OMP_CLAUSE_REDUCTION_TASK (clauses))
7229 return clauses;
7230 clauses = OMP_CLAUSE_CHAIN (clauses);
7234 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
7235 gimple_seq *, gimple_seq *);
7237 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7238 CTX is the enclosing OMP context for the current statement. */
7240 static void
7241 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7243 tree block, control;
7244 gimple_stmt_iterator tgsi;
7245 gomp_sections *stmt;
7246 gimple *t;
7247 gbind *new_stmt, *bind;
7248 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
7250 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
7252 push_gimplify_context ();
7254 dlist = NULL;
7255 ilist = NULL;
7257 tree rclauses
7258 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
7259 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
7260 tree rtmp = NULL_TREE;
7261 if (rclauses)
7263 tree type = build_pointer_type (pointer_sized_int_node);
7264 tree temp = create_tmp_var (type);
7265 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
7266 OMP_CLAUSE_DECL (c) = temp;
7267 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
7268 gimple_omp_sections_set_clauses (stmt, c);
7269 lower_omp_task_reductions (ctx, OMP_SECTIONS,
7270 gimple_omp_sections_clauses (stmt),
7271 &ilist, &tred_dlist);
7272 rclauses = c;
7273 rtmp = make_ssa_name (type);
7274 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
7277 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
7278 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
7280 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
7281 &ilist, &dlist, ctx, NULL);
7283 control = create_tmp_var (unsigned_type_node, ".section");
7284 gimple_omp_sections_set_control (stmt, control);
7286 new_body = gimple_omp_body (stmt);
7287 gimple_omp_set_body (stmt, NULL);
7288 tgsi = gsi_start (new_body);
7289 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
7291 omp_context *sctx;
7292 gimple *sec_start;
7294 sec_start = gsi_stmt (tgsi);
7295 sctx = maybe_lookup_ctx (sec_start);
7296 gcc_assert (sctx);
7298 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
7299 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
7300 GSI_CONTINUE_LINKING);
7301 gimple_omp_set_body (sec_start, NULL);
7303 if (gsi_one_before_end_p (tgsi))
7305 gimple_seq l = NULL;
7306 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
7307 &ilist, &l, &clist, ctx);
7308 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
7309 gimple_omp_section_set_last (sec_start);
7312 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
7313 GSI_CONTINUE_LINKING);
7316 block = make_node (BLOCK);
7317 bind = gimple_build_bind (NULL, new_body, block);
7319 olist = NULL;
7320 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
7321 &clist, ctx);
7322 if (clist)
7324 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
7325 gcall *g = gimple_build_call (fndecl, 0);
7326 gimple_seq_add_stmt (&olist, g);
7327 gimple_seq_add_seq (&olist, clist);
7328 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
7329 g = gimple_build_call (fndecl, 0);
7330 gimple_seq_add_stmt (&olist, g);
7333 block = make_node (BLOCK);
7334 new_stmt = gimple_build_bind (NULL, NULL, block);
7335 gsi_replace (gsi_p, new_stmt, true);
7337 pop_gimplify_context (new_stmt);
7338 gimple_bind_append_vars (new_stmt, ctx->block_vars);
7339 BLOCK_VARS (block) = gimple_bind_vars (bind);
7340 if (BLOCK_VARS (block))
7341 TREE_USED (block) = 1;
7343 new_body = NULL;
7344 gimple_seq_add_seq (&new_body, ilist);
7345 gimple_seq_add_stmt (&new_body, stmt);
7346 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
7347 gimple_seq_add_stmt (&new_body, bind);
7349 t = gimple_build_omp_continue (control, control);
7350 gimple_seq_add_stmt (&new_body, t);
7352 gimple_seq_add_seq (&new_body, olist);
7353 if (ctx->cancellable)
7354 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7355 gimple_seq_add_seq (&new_body, dlist);
7357 new_body = maybe_catch_exception (new_body);
7359 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
7360 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7361 t = gimple_build_omp_return (nowait);
7362 gimple_seq_add_stmt (&new_body, t);
7363 gimple_seq_add_seq (&new_body, tred_dlist);
7364 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
7366 if (rclauses)
7367 OMP_CLAUSE_DECL (rclauses) = rtmp;
7369 gimple_bind_set_body (new_stmt, new_body);
7373 /* A subroutine of lower_omp_single. Expand the simple form of
7374 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7376 if (GOMP_single_start ())
7377 BODY;
7378 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7380 FIXME. It may be better to delay expanding the logic of this until
7381 pass_expand_omp. The expanded logic may make the job more difficult
7382 to a synchronization analysis pass. */
7384 static void
7385 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
7387 location_t loc = gimple_location (single_stmt);
7388 tree tlabel = create_artificial_label (loc);
7389 tree flabel = create_artificial_label (loc);
7390 gimple *call, *cond;
7391 tree lhs, decl;
7393 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
7394 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
7395 call = gimple_build_call (decl, 0);
7396 gimple_call_set_lhs (call, lhs);
7397 gimple_seq_add_stmt (pre_p, call);
7399 cond = gimple_build_cond (EQ_EXPR, lhs,
7400 fold_convert_loc (loc, TREE_TYPE (lhs),
7401 boolean_true_node),
7402 tlabel, flabel);
7403 gimple_seq_add_stmt (pre_p, cond);
7404 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
7405 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7406 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
7410 /* A subroutine of lower_omp_single. Expand the simple form of
7411 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7413 #pragma omp single copyprivate (a, b, c)
7415 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7418 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7420 BODY;
7421 copyout.a = a;
7422 copyout.b = b;
7423 copyout.c = c;
7424 GOMP_single_copy_end (&copyout);
7426 else
7428 a = copyout_p->a;
7429 b = copyout_p->b;
7430 c = copyout_p->c;
7432 GOMP_barrier ();
7435 FIXME. It may be better to delay expanding the logic of this until
7436 pass_expand_omp. The expanded logic may make the job more difficult
7437 to a synchronization analysis pass. */
7439 static void
7440 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
7441 omp_context *ctx)
7443 tree ptr_type, t, l0, l1, l2, bfn_decl;
7444 gimple_seq copyin_seq;
7445 location_t loc = gimple_location (single_stmt);
7447 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
7449 ptr_type = build_pointer_type (ctx->record_type);
7450 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
7452 l0 = create_artificial_label (loc);
7453 l1 = create_artificial_label (loc);
7454 l2 = create_artificial_label (loc);
7456 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
7457 t = build_call_expr_loc (loc, bfn_decl, 0);
7458 t = fold_convert_loc (loc, ptr_type, t);
7459 gimplify_assign (ctx->receiver_decl, t, pre_p);
7461 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
7462 build_int_cst (ptr_type, 0));
7463 t = build3 (COND_EXPR, void_type_node, t,
7464 build_and_jump (&l0), build_and_jump (&l1));
7465 gimplify_and_add (t, pre_p);
7467 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
7469 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7471 copyin_seq = NULL;
7472 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
7473 &copyin_seq, ctx);
7475 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7476 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
7477 t = build_call_expr_loc (loc, bfn_decl, 1, t);
7478 gimplify_and_add (t, pre_p);
7480 t = build_and_jump (&l2);
7481 gimplify_and_add (t, pre_p);
7483 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
7485 gimple_seq_add_seq (pre_p, copyin_seq);
7487 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
7491 /* Expand code for an OpenMP single directive. */
7493 static void
7494 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7496 tree block;
7497 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
7498 gbind *bind;
7499 gimple_seq bind_body, bind_body_tail = NULL, dlist;
7501 push_gimplify_context ();
7503 block = make_node (BLOCK);
7504 bind = gimple_build_bind (NULL, NULL, block);
7505 gsi_replace (gsi_p, bind, true);
7506 bind_body = NULL;
7507 dlist = NULL;
7508 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
7509 &bind_body, &dlist, ctx, NULL);
7510 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
7512 gimple_seq_add_stmt (&bind_body, single_stmt);
7514 if (ctx->record_type)
7515 lower_omp_single_copy (single_stmt, &bind_body, ctx);
7516 else
7517 lower_omp_single_simple (single_stmt, &bind_body);
7519 gimple_omp_set_body (single_stmt, NULL);
7521 gimple_seq_add_seq (&bind_body, dlist);
7523 bind_body = maybe_catch_exception (bind_body);
7525 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
7526 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7527 gimple *g = gimple_build_omp_return (nowait);
7528 gimple_seq_add_stmt (&bind_body_tail, g);
7529 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
7530 if (ctx->record_type)
7532 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
7533 tree clobber = build_constructor (ctx->record_type, NULL);
7534 TREE_THIS_VOLATILE (clobber) = 1;
7535 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
7536 clobber), GSI_SAME_STMT);
7538 gimple_seq_add_seq (&bind_body, bind_body_tail);
7539 gimple_bind_set_body (bind, bind_body);
7541 pop_gimplify_context (bind);
7543 gimple_bind_append_vars (bind, ctx->block_vars);
7544 BLOCK_VARS (block) = ctx->block_vars;
7545 if (BLOCK_VARS (block))
7546 TREE_USED (block) = 1;
7550 /* Expand code for an OpenMP master directive. */
7552 static void
7553 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7555 tree block, lab = NULL, x, bfn_decl;
7556 gimple *stmt = gsi_stmt (*gsi_p);
7557 gbind *bind;
7558 location_t loc = gimple_location (stmt);
7559 gimple_seq tseq;
7561 push_gimplify_context ();
7563 block = make_node (BLOCK);
7564 bind = gimple_build_bind (NULL, NULL, block);
7565 gsi_replace (gsi_p, bind, true);
7566 gimple_bind_add_stmt (bind, stmt);
7568 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
7569 x = build_call_expr_loc (loc, bfn_decl, 0);
7570 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
7571 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
7572 tseq = NULL;
7573 gimplify_and_add (x, &tseq);
7574 gimple_bind_add_seq (bind, tseq);
7576 lower_omp (gimple_omp_body_ptr (stmt), ctx);
7577 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
7578 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7579 gimple_omp_set_body (stmt, NULL);
7581 gimple_bind_add_stmt (bind, gimple_build_label (lab));
7583 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
7585 pop_gimplify_context (bind);
7587 gimple_bind_append_vars (bind, ctx->block_vars);
7588 BLOCK_VARS (block) = ctx->block_vars;
7591 /* Helper function for lower_omp_task_reductions. For a specific PASS
7592 find out the current clause it should be processed, or return false
7593 if all have been processed already. */
7595 static inline bool
7596 omp_task_reduction_iterate (int pass, enum tree_code code,
7597 enum omp_clause_code ccode, tree *c, tree *decl,
7598 tree *type, tree *next)
7600 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
7602 if (ccode == OMP_CLAUSE_REDUCTION
7603 && code != OMP_TASKLOOP
7604 && !OMP_CLAUSE_REDUCTION_TASK (*c))
7605 continue;
7606 *decl = OMP_CLAUSE_DECL (*c);
7607 *type = TREE_TYPE (*decl);
7608 if (TREE_CODE (*decl) == MEM_REF)
7610 if (pass != 1)
7611 continue;
7613 else
7615 if (omp_is_reference (*decl))
7616 *type = TREE_TYPE (*type);
7617 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
7618 continue;
7620 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
7621 return true;
7623 *decl = NULL_TREE;
7624 *type = NULL_TREE;
7625 *next = NULL_TREE;
7626 return false;
7629 /* Lower task_reduction and reduction clauses (the latter unless CODE is
7630 OMP_TASKGROUP only with task modifier). Register mapping of those in
7631 START sequence and reducing them and unregister them in the END sequence. */
7633 static void
7634 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
7635 gimple_seq *start, gimple_seq *end)
7637 enum omp_clause_code ccode
7638 = (code == OMP_TASKGROUP
7639 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
7640 tree cancellable = NULL_TREE;
7641 clauses = omp_task_reductions_find_first (clauses, code, ccode);
7642 if (clauses == NULL_TREE)
7643 return;
7644 if (code == OMP_FOR || code == OMP_SECTIONS)
7646 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7647 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7648 && outer->cancellable)
7650 cancellable = error_mark_node;
7651 break;
7653 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7654 break;
7656 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
7657 tree *last = &TYPE_FIELDS (record_type);
7658 unsigned cnt = 0;
7659 if (cancellable)
7661 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
7662 ptr_type_node);
7663 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
7664 integer_type_node);
7665 *last = field;
7666 DECL_CHAIN (field) = ifield;
7667 last = &DECL_CHAIN (ifield);
7668 DECL_CONTEXT (field) = record_type;
7669 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
7670 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
7671 DECL_CONTEXT (ifield) = record_type;
7672 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
7673 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
7675 for (int pass = 0; pass < 2; pass++)
7677 tree decl, type, next;
7678 for (tree c = clauses;
7679 omp_task_reduction_iterate (pass, code, ccode,
7680 &c, &decl, &type, &next); c = next)
7682 ++cnt;
7683 tree new_type = type;
7684 if (ctx->outer)
7685 new_type = remap_type (type, &ctx->outer->cb);
7686 tree field
7687 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
7688 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
7689 new_type);
7690 if (DECL_P (decl) && type == TREE_TYPE (decl))
7692 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
7693 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
7694 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
7696 else
7697 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
7698 DECL_CONTEXT (field) = record_type;
7699 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
7700 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
7701 *last = field;
7702 last = &DECL_CHAIN (field);
7703 tree bfield
7704 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
7705 boolean_type_node);
7706 DECL_CONTEXT (bfield) = record_type;
7707 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
7708 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
7709 *last = bfield;
7710 last = &DECL_CHAIN (bfield);
7713 *last = NULL_TREE;
7714 layout_type (record_type);
7716 /* Build up an array which registers with the runtime all the reductions
7717 and deregisters them at the end. Format documented in libgomp/task.c. */
7718 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
7719 tree avar = create_tmp_var_raw (atype);
7720 gimple_add_tmp_var (avar);
7721 TREE_ADDRESSABLE (avar) = 1;
7722 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
7723 NULL_TREE, NULL_TREE);
7724 tree t = build_int_cst (pointer_sized_int_node, cnt);
7725 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7726 gimple_seq seq = NULL;
7727 tree sz = fold_convert (pointer_sized_int_node,
7728 TYPE_SIZE_UNIT (record_type));
7729 int cachesz = 64;
7730 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
7731 build_int_cst (pointer_sized_int_node, cachesz - 1));
7732 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
7733 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
7734 ctx->task_reductions.create (1 + cnt);
7735 ctx->task_reduction_map = new hash_map<tree, unsigned>;
7736 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
7737 ? sz : NULL_TREE);
7738 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
7739 gimple_seq_add_seq (start, seq);
7740 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
7741 NULL_TREE, NULL_TREE);
7742 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
7743 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
7744 NULL_TREE, NULL_TREE);
7745 t = build_int_cst (pointer_sized_int_node,
7746 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
7747 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7748 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
7749 NULL_TREE, NULL_TREE);
7750 t = build_int_cst (pointer_sized_int_node, -1);
7751 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7752 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
7753 NULL_TREE, NULL_TREE);
7754 t = build_int_cst (pointer_sized_int_node, 0);
7755 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7757 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
7758 and for each task reduction checks a bool right after the private variable
7759 within that thread's chunk; if the bool is clear, it hasn't been
7760 initialized and thus isn't going to be reduced nor destructed, otherwise
7761 reduce and destruct it. */
7762 tree idx = create_tmp_var (size_type_node);
7763 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
7764 tree num_thr_sz = create_tmp_var (size_type_node);
7765 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7766 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
7767 tree lab3 = NULL_TREE;
7768 gimple *g;
7769 if (code == OMP_FOR || code == OMP_SECTIONS)
7771 /* For worksharing constructs, only perform it in the master thread,
7772 with the exception of cancelled implicit barriers - then only handle
7773 the current thread. */
7774 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
7775 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
7776 tree thr_num = create_tmp_var (integer_type_node);
7777 g = gimple_build_call (t, 0);
7778 gimple_call_set_lhs (g, thr_num);
7779 gimple_seq_add_stmt (end, g);
7780 if (cancellable)
7782 tree c;
7783 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7784 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
7785 lab3 = create_artificial_label (UNKNOWN_LOCATION);
7786 if (code == OMP_FOR)
7787 c = gimple_omp_for_clauses (ctx->stmt);
7788 else /* if (code == OMP_SECTIONS) */
7789 c = gimple_omp_sections_clauses (ctx->stmt);
7790 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
7791 cancellable = c;
7792 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
7793 lab5, lab6);
7794 gimple_seq_add_stmt (end, g);
7795 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7796 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
7797 gimple_seq_add_stmt (end, g);
7798 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
7799 build_one_cst (TREE_TYPE (idx)));
7800 gimple_seq_add_stmt (end, g);
7801 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
7802 gimple_seq_add_stmt (end, gimple_build_label (lab6));
7804 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
7805 gimple_seq_add_stmt (end, g);
7806 gimple_seq_add_stmt (end, gimple_build_label (lab4));
7808 if (code != OMP_PARALLEL)
7810 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
7811 tree num_thr = create_tmp_var (integer_type_node);
7812 g = gimple_build_call (t, 0);
7813 gimple_call_set_lhs (g, num_thr);
7814 gimple_seq_add_stmt (end, g);
7815 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
7816 gimple_seq_add_stmt (end, g);
7817 if (cancellable)
7818 gimple_seq_add_stmt (end, gimple_build_label (lab3));
7820 else
7822 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7823 OMP_CLAUSE__REDUCTEMP_);
7824 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
7825 t = fold_convert (size_type_node, t);
7826 gimplify_assign (num_thr_sz, t, end);
7828 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
7829 NULL_TREE, NULL_TREE);
7830 tree data = create_tmp_var (pointer_sized_int_node);
7831 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
7832 gimple_seq_add_stmt (end, gimple_build_label (lab1));
7833 tree ptr;
7834 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
7835 ptr = create_tmp_var (build_pointer_type (record_type));
7836 else
7837 ptr = create_tmp_var (ptr_type_node);
7838 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
7840 tree field = TYPE_FIELDS (record_type);
7841 cnt = 0;
7842 if (cancellable)
7843 field = DECL_CHAIN (DECL_CHAIN (field));
7844 for (int pass = 0; pass < 2; pass++)
7846 tree decl, type, next;
7847 for (tree c = clauses;
7848 omp_task_reduction_iterate (pass, code, ccode,
7849 &c, &decl, &type, &next); c = next)
7851 tree var = decl, ref;
7852 if (TREE_CODE (decl) == MEM_REF)
7854 var = TREE_OPERAND (var, 0);
7855 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7856 var = TREE_OPERAND (var, 0);
7857 tree v = var;
7858 if (TREE_CODE (var) == ADDR_EXPR)
7859 var = TREE_OPERAND (var, 0);
7860 else if (TREE_CODE (var) == INDIRECT_REF)
7861 var = TREE_OPERAND (var, 0);
7862 tree orig_var = var;
7863 if (is_variable_sized (var))
7865 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7866 var = DECL_VALUE_EXPR (var);
7867 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7868 var = TREE_OPERAND (var, 0);
7869 gcc_assert (DECL_P (var));
7871 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
7872 if (orig_var != var)
7873 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
7874 else if (TREE_CODE (v) == ADDR_EXPR)
7875 t = build_fold_addr_expr (t);
7876 else if (TREE_CODE (v) == INDIRECT_REF)
7877 t = build_fold_indirect_ref (t);
7878 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
7880 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
7881 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7882 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
7884 if (!integer_zerop (TREE_OPERAND (decl, 1)))
7885 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
7886 fold_convert (size_type_node,
7887 TREE_OPERAND (decl, 1)));
7889 else
7891 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
7892 if (!omp_is_reference (decl))
7893 t = build_fold_addr_expr (t);
7895 t = fold_convert (pointer_sized_int_node, t);
7896 seq = NULL;
7897 t = force_gimple_operand (t, &seq, true, NULL_TREE);
7898 gimple_seq_add_seq (start, seq);
7899 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7900 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
7901 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7902 t = unshare_expr (byte_position (field));
7903 t = fold_convert (pointer_sized_int_node, t);
7904 ctx->task_reduction_map->put (c, cnt);
7905 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
7906 ? t : NULL_TREE);
7907 seq = NULL;
7908 t = force_gimple_operand (t, &seq, true, NULL_TREE);
7909 gimple_seq_add_seq (start, seq);
7910 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7911 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
7912 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7914 tree bfield = DECL_CHAIN (field);
7915 tree cond;
7916 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
7917 /* In parallel or worksharing all threads unconditionally
7918 initialize all their task reduction private variables. */
7919 cond = boolean_true_node;
7920 else if (TREE_TYPE (ptr) == ptr_type_node)
7922 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
7923 unshare_expr (byte_position (bfield)));
7924 seq = NULL;
7925 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
7926 gimple_seq_add_seq (end, seq);
7927 tree pbool = build_pointer_type (TREE_TYPE (bfield));
7928 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
7929 build_int_cst (pbool, 0));
7931 else
7932 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
7933 build_simple_mem_ref (ptr), bfield, NULL_TREE);
7934 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
7935 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
7936 tree condv = create_tmp_var (boolean_type_node);
7937 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
7938 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
7939 lab3, lab4);
7940 gimple_seq_add_stmt (end, g);
7941 gimple_seq_add_stmt (end, gimple_build_label (lab3));
7942 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
7944 /* If this reduction doesn't need destruction and parallel
7945 has been cancelled, there is nothing to do for this
7946 reduction, so jump around the merge operation. */
7947 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7948 g = gimple_build_cond (NE_EXPR, cancellable,
7949 build_zero_cst (TREE_TYPE (cancellable)),
7950 lab4, lab5);
7951 gimple_seq_add_stmt (end, g);
7952 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7955 tree new_var;
7956 if (TREE_TYPE (ptr) == ptr_type_node)
7958 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
7959 unshare_expr (byte_position (field)));
7960 seq = NULL;
7961 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
7962 gimple_seq_add_seq (end, seq);
7963 tree pbool = build_pointer_type (TREE_TYPE (field));
7964 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
7965 build_int_cst (pbool, 0));
7967 else
7968 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
7969 build_simple_mem_ref (ptr), field, NULL_TREE);
7971 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7972 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
7973 ref = build_simple_mem_ref (ref);
7974 /* reduction(-:var) sums up the partial results, so it acts
7975 identically to reduction(+:var). */
7976 if (rcode == MINUS_EXPR)
7977 rcode = PLUS_EXPR;
7978 if (TREE_CODE (decl) == MEM_REF)
7980 tree type = TREE_TYPE (new_var);
7981 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7982 tree i = create_tmp_var (TREE_TYPE (v));
7983 tree ptype = build_pointer_type (TREE_TYPE (type));
7984 if (DECL_P (v))
7986 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7987 tree vv = create_tmp_var (TREE_TYPE (v));
7988 gimplify_assign (vv, v, start);
7989 v = vv;
7991 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7992 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
7993 new_var = build_fold_addr_expr (new_var);
7994 new_var = fold_convert (ptype, new_var);
7995 ref = fold_convert (ptype, ref);
7996 tree m = create_tmp_var (ptype);
7997 gimplify_assign (m, new_var, end);
7998 new_var = m;
7999 m = create_tmp_var (ptype);
8000 gimplify_assign (m, ref, end);
8001 ref = m;
8002 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
8003 tree body = create_artificial_label (UNKNOWN_LOCATION);
8004 tree endl = create_artificial_label (UNKNOWN_LOCATION);
8005 gimple_seq_add_stmt (end, gimple_build_label (body));
8006 tree priv = build_simple_mem_ref (new_var);
8007 tree out = build_simple_mem_ref (ref);
8008 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8010 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8011 tree decl_placeholder
8012 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
8013 tree lab6 = NULL_TREE;
8014 if (cancellable)
8016 /* If this reduction needs destruction and parallel
8017 has been cancelled, jump around the merge operation
8018 to the destruction. */
8019 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8020 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8021 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8022 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8023 lab6, lab5);
8024 gimple_seq_add_stmt (end, g);
8025 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8027 SET_DECL_VALUE_EXPR (placeholder, out);
8028 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8029 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
8030 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
8031 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8032 gimple_seq_add_seq (end,
8033 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8034 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8035 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8037 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8038 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
8040 if (cancellable)
8041 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8042 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
8043 if (x)
8045 gimple_seq tseq = NULL;
8046 gimplify_stmt (&x, &tseq);
8047 gimple_seq_add_seq (end, tseq);
8050 else
8052 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
8053 out = unshare_expr (out);
8054 gimplify_assign (out, x, end);
8056 gimple *g
8057 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
8058 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8059 gimple_seq_add_stmt (end, g);
8060 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
8061 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8062 gimple_seq_add_stmt (end, g);
8063 g = gimple_build_assign (i, PLUS_EXPR, i,
8064 build_int_cst (TREE_TYPE (i), 1));
8065 gimple_seq_add_stmt (end, g);
8066 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
8067 gimple_seq_add_stmt (end, g);
8068 gimple_seq_add_stmt (end, gimple_build_label (endl));
8070 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8072 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8073 tree oldv = NULL_TREE;
8074 tree lab6 = NULL_TREE;
8075 if (cancellable)
8077 /* If this reduction needs destruction and parallel
8078 has been cancelled, jump around the merge operation
8079 to the destruction. */
8080 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8081 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8082 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8083 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8084 lab6, lab5);
8085 gimple_seq_add_stmt (end, g);
8086 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8088 if (omp_is_reference (decl)
8089 && !useless_type_conversion_p (TREE_TYPE (placeholder),
8090 TREE_TYPE (ref)))
8091 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8092 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8093 tree refv = create_tmp_var (TREE_TYPE (ref));
8094 gimplify_assign (refv, ref, end);
8095 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
8096 SET_DECL_VALUE_EXPR (placeholder, ref);
8097 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8098 tree d = maybe_lookup_decl (decl, ctx);
8099 gcc_assert (d);
8100 if (DECL_HAS_VALUE_EXPR_P (d))
8101 oldv = DECL_VALUE_EXPR (d);
8102 if (omp_is_reference (var))
8104 tree v = fold_convert (TREE_TYPE (d),
8105 build_fold_addr_expr (new_var));
8106 SET_DECL_VALUE_EXPR (d, v);
8108 else
8109 SET_DECL_VALUE_EXPR (d, new_var);
8110 DECL_HAS_VALUE_EXPR_P (d) = 1;
8111 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8112 if (oldv)
8113 SET_DECL_VALUE_EXPR (d, oldv);
8114 else
8116 SET_DECL_VALUE_EXPR (d, NULL_TREE);
8117 DECL_HAS_VALUE_EXPR_P (d) = 0;
8119 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8120 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8121 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8122 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8123 if (cancellable)
8124 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8125 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
8126 if (x)
8128 gimple_seq tseq = NULL;
8129 gimplify_stmt (&x, &tseq);
8130 gimple_seq_add_seq (end, tseq);
8133 else
8135 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
8136 ref = unshare_expr (ref);
8137 gimplify_assign (ref, x, end);
8139 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8140 ++cnt;
8141 field = DECL_CHAIN (bfield);
8145 if (code == OMP_TASKGROUP)
8147 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
8148 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8149 gimple_seq_add_stmt (start, g);
8151 else
8153 tree c;
8154 if (code == OMP_FOR)
8155 c = gimple_omp_for_clauses (ctx->stmt);
8156 else if (code == OMP_SECTIONS)
8157 c = gimple_omp_sections_clauses (ctx->stmt);
8158 else
8159 c = gimple_omp_taskreg_clauses (ctx->stmt);
8160 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
8161 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
8162 build_fold_addr_expr (avar));
8163 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
8166 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
8167 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
8168 size_one_node));
8169 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
8170 gimple_seq_add_stmt (end, g);
8171 gimple_seq_add_stmt (end, gimple_build_label (lab2));
8172 if (code == OMP_FOR || code == OMP_SECTIONS)
8174 enum built_in_function bfn
8175 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
8176 t = builtin_decl_explicit (bfn);
8177 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
8178 tree arg;
8179 if (cancellable)
8181 arg = create_tmp_var (c_bool_type);
8182 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
8183 cancellable));
8185 else
8186 arg = build_int_cst (c_bool_type, 0);
8187 g = gimple_build_call (t, 1, arg);
8189 else
8191 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
8192 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8194 gimple_seq_add_stmt (end, g);
8195 t = build_constructor (atype, NULL);
8196 TREE_THIS_VOLATILE (t) = 1;
8197 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
8200 /* Expand code for an OpenMP taskgroup directive. */
8202 static void
8203 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8205 gimple *stmt = gsi_stmt (*gsi_p);
8206 gcall *x;
8207 gbind *bind;
8208 gimple_seq dseq = NULL;
8209 tree block = make_node (BLOCK);
8211 bind = gimple_build_bind (NULL, NULL, block);
8212 gsi_replace (gsi_p, bind, true);
8213 gimple_bind_add_stmt (bind, stmt);
8215 push_gimplify_context ();
8217 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
8219 gimple_bind_add_stmt (bind, x);
8221 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
8222 gimple_omp_taskgroup_clauses (stmt),
8223 gimple_bind_body_ptr (bind), &dseq);
8225 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8226 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8227 gimple_omp_set_body (stmt, NULL);
8229 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8230 gimple_bind_add_seq (bind, dseq);
8232 pop_gimplify_context (bind);
8234 gimple_bind_append_vars (bind, ctx->block_vars);
8235 BLOCK_VARS (block) = ctx->block_vars;
8239 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8241 static void
8242 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
8243 omp_context *ctx)
8245 struct omp_for_data fd;
8246 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
8247 return;
8249 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
8250 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
8251 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
8252 if (!fd.ordered)
8253 return;
8255 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8256 tree c = gimple_omp_ordered_clauses (ord_stmt);
8257 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
8258 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8260 /* Merge depend clauses from multiple adjacent
8261 #pragma omp ordered depend(sink:...) constructs
8262 into one #pragma omp ordered depend(sink:...), so that
8263 we can optimize them together. */
8264 gimple_stmt_iterator gsi = *gsi_p;
8265 gsi_next (&gsi);
8266 while (!gsi_end_p (gsi))
8268 gimple *stmt = gsi_stmt (gsi);
8269 if (is_gimple_debug (stmt)
8270 || gimple_code (stmt) == GIMPLE_NOP)
8272 gsi_next (&gsi);
8273 continue;
8275 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
8276 break;
8277 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
8278 c = gimple_omp_ordered_clauses (ord_stmt2);
8279 if (c == NULL_TREE
8280 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
8281 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8282 break;
8283 while (*list_p)
8284 list_p = &OMP_CLAUSE_CHAIN (*list_p);
8285 *list_p = c;
8286 gsi_remove (&gsi, true);
8290 /* Canonicalize sink dependence clauses into one folded clause if
8291 possible.
8293 The basic algorithm is to create a sink vector whose first
8294 element is the GCD of all the first elements, and whose remaining
8295 elements are the minimum of the subsequent columns.
8297 We ignore dependence vectors whose first element is zero because
8298 such dependencies are known to be executed by the same thread.
8300 We take into account the direction of the loop, so a minimum
8301 becomes a maximum if the loop is iterating forwards. We also
8302 ignore sink clauses where the loop direction is unknown, or where
8303 the offsets are clearly invalid because they are not a multiple
8304 of the loop increment.
8306 For example:
8308 #pragma omp for ordered(2)
8309 for (i=0; i < N; ++i)
8310 for (j=0; j < M; ++j)
8312 #pragma omp ordered \
8313 depend(sink:i-8,j-2) \
8314 depend(sink:i,j-1) \ // Completely ignored because i+0.
8315 depend(sink:i-4,j-3) \
8316 depend(sink:i-6,j-4)
8317 #pragma omp ordered depend(source)
8320 Folded clause is:
8322 depend(sink:-gcd(8,4,6),-min(2,3,4))
8323 -or-
8324 depend(sink:-2,-2)
8327 /* FIXME: Computing GCD's where the first element is zero is
8328 non-trivial in the presence of collapsed loops. Do this later. */
8329 if (fd.collapse > 1)
8330 return;
8332 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
8334 /* wide_int is not a POD so it must be default-constructed. */
8335 for (unsigned i = 0; i != 2 * len - 1; ++i)
8336 new (static_cast<void*>(folded_deps + i)) wide_int ();
8338 tree folded_dep = NULL_TREE;
8339 /* TRUE if the first dimension's offset is negative. */
8340 bool neg_offset_p = false;
8342 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8343 unsigned int i;
8344 while ((c = *list_p) != NULL)
8346 bool remove = false;
8348 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
8349 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8350 goto next_ordered_clause;
8352 tree vec;
8353 for (vec = OMP_CLAUSE_DECL (c), i = 0;
8354 vec && TREE_CODE (vec) == TREE_LIST;
8355 vec = TREE_CHAIN (vec), ++i)
8357 gcc_assert (i < len);
8359 /* omp_extract_for_data has canonicalized the condition. */
8360 gcc_assert (fd.loops[i].cond_code == LT_EXPR
8361 || fd.loops[i].cond_code == GT_EXPR);
8362 bool forward = fd.loops[i].cond_code == LT_EXPR;
8363 bool maybe_lexically_later = true;
8365 /* While the committee makes up its mind, bail if we have any
8366 non-constant steps. */
8367 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
8368 goto lower_omp_ordered_ret;
8370 tree itype = TREE_TYPE (TREE_VALUE (vec));
8371 if (POINTER_TYPE_P (itype))
8372 itype = sizetype;
8373 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
8374 TYPE_PRECISION (itype),
8375 TYPE_SIGN (itype));
8377 /* Ignore invalid offsets that are not multiples of the step. */
8378 if (!wi::multiple_of_p (wi::abs (offset),
8379 wi::abs (wi::to_wide (fd.loops[i].step)),
8380 UNSIGNED))
8382 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8383 "ignoring sink clause with offset that is not "
8384 "a multiple of the loop step");
8385 remove = true;
8386 goto next_ordered_clause;
8389 /* Calculate the first dimension. The first dimension of
8390 the folded dependency vector is the GCD of the first
8391 elements, while ignoring any first elements whose offset
8392 is 0. */
8393 if (i == 0)
8395 /* Ignore dependence vectors whose first dimension is 0. */
8396 if (offset == 0)
8398 remove = true;
8399 goto next_ordered_clause;
8401 else
8403 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
8405 error_at (OMP_CLAUSE_LOCATION (c),
8406 "first offset must be in opposite direction "
8407 "of loop iterations");
8408 goto lower_omp_ordered_ret;
8410 if (forward)
8411 offset = -offset;
8412 neg_offset_p = forward;
8413 /* Initialize the first time around. */
8414 if (folded_dep == NULL_TREE)
8416 folded_dep = c;
8417 folded_deps[0] = offset;
8419 else
8420 folded_deps[0] = wi::gcd (folded_deps[0],
8421 offset, UNSIGNED);
8424 /* Calculate minimum for the remaining dimensions. */
8425 else
8427 folded_deps[len + i - 1] = offset;
8428 if (folded_dep == c)
8429 folded_deps[i] = offset;
8430 else if (maybe_lexically_later
8431 && !wi::eq_p (folded_deps[i], offset))
8433 if (forward ^ wi::gts_p (folded_deps[i], offset))
8435 unsigned int j;
8436 folded_dep = c;
8437 for (j = 1; j <= i; j++)
8438 folded_deps[j] = folded_deps[len + j - 1];
8440 else
8441 maybe_lexically_later = false;
8445 gcc_assert (i == len);
8447 remove = true;
8449 next_ordered_clause:
8450 if (remove)
8451 *list_p = OMP_CLAUSE_CHAIN (c);
8452 else
8453 list_p = &OMP_CLAUSE_CHAIN (c);
8456 if (folded_dep)
8458 if (neg_offset_p)
8459 folded_deps[0] = -folded_deps[0];
8461 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
8462 if (POINTER_TYPE_P (itype))
8463 itype = sizetype;
8465 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
8466 = wide_int_to_tree (itype, folded_deps[0]);
8467 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
8468 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
8471 lower_omp_ordered_ret:
8473 /* Ordered without clauses is #pragma omp threads, while we want
8474 a nop instead if we remove all clauses. */
8475 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
8476 gsi_replace (gsi_p, gimple_build_nop (), true);
8480 /* Expand code for an OpenMP ordered directive. */
8482 static void
8483 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8485 tree block;
8486 gimple *stmt = gsi_stmt (*gsi_p), *g;
8487 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
8488 gcall *x;
8489 gbind *bind;
8490 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8491 OMP_CLAUSE_SIMD);
8492 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
8493 loop. */
8494 bool maybe_simt
8495 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
8496 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8497 OMP_CLAUSE_THREADS);
8499 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8500 OMP_CLAUSE_DEPEND))
8502 /* FIXME: This is needs to be moved to the expansion to verify various
8503 conditions only testable on cfg with dominators computed, and also
8504 all the depend clauses to be merged still might need to be available
8505 for the runtime checks. */
8506 if (0)
8507 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
8508 return;
8511 push_gimplify_context ();
8513 block = make_node (BLOCK);
8514 bind = gimple_build_bind (NULL, NULL, block);
8515 gsi_replace (gsi_p, bind, true);
8516 gimple_bind_add_stmt (bind, stmt);
8518 if (simd)
8520 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
8521 build_int_cst (NULL_TREE, threads));
8522 cfun->has_simduid_loops = true;
8524 else
8525 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
8527 gimple_bind_add_stmt (bind, x);
8529 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
8530 if (maybe_simt)
8532 counter = create_tmp_var (integer_type_node);
8533 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
8534 gimple_call_set_lhs (g, counter);
8535 gimple_bind_add_stmt (bind, g);
8537 body = create_artificial_label (UNKNOWN_LOCATION);
8538 test = create_artificial_label (UNKNOWN_LOCATION);
8539 gimple_bind_add_stmt (bind, gimple_build_label (body));
8541 tree simt_pred = create_tmp_var (integer_type_node);
8542 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
8543 gimple_call_set_lhs (g, simt_pred);
8544 gimple_bind_add_stmt (bind, g);
8546 tree t = create_artificial_label (UNKNOWN_LOCATION);
8547 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
8548 gimple_bind_add_stmt (bind, g);
8550 gimple_bind_add_stmt (bind, gimple_build_label (t));
8552 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8553 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8554 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8555 gimple_omp_set_body (stmt, NULL);
8557 if (maybe_simt)
8559 gimple_bind_add_stmt (bind, gimple_build_label (test));
8560 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
8561 gimple_bind_add_stmt (bind, g);
8563 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
8564 tree nonneg = create_tmp_var (integer_type_node);
8565 gimple_seq tseq = NULL;
8566 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
8567 gimple_bind_add_seq (bind, tseq);
8569 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
8570 gimple_call_set_lhs (g, nonneg);
8571 gimple_bind_add_stmt (bind, g);
8573 tree end = create_artificial_label (UNKNOWN_LOCATION);
8574 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
8575 gimple_bind_add_stmt (bind, g);
8577 gimple_bind_add_stmt (bind, gimple_build_label (end));
8579 if (simd)
8580 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
8581 build_int_cst (NULL_TREE, threads));
8582 else
8583 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
8585 gimple_bind_add_stmt (bind, x);
8587 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8589 pop_gimplify_context (bind);
8591 gimple_bind_append_vars (bind, ctx->block_vars);
8592 BLOCK_VARS (block) = gimple_bind_vars (bind);
8596 /* Expand code for an OpenMP scan directive and the structured block
8597 before the scan directive. */
8599 static void
8600 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8602 gimple *stmt = gsi_stmt (*gsi_p);
8603 bool has_clauses
8604 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
8605 tree lane = NULL_TREE;
8606 gimple_seq before = NULL;
8607 omp_context *octx = ctx->outer;
8608 gcc_assert (octx);
8609 if (octx->scan_exclusive && !has_clauses)
8611 gimple_stmt_iterator gsi2 = *gsi_p;
8612 gsi_next (&gsi2);
8613 gimple *stmt2 = gsi_stmt (gsi2);
8614 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
8615 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
8616 the one with exclusive clause(s), comes first. */
8617 if (stmt2
8618 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
8619 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
8621 gsi_remove (gsi_p, false);
8622 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
8623 ctx = maybe_lookup_ctx (stmt2);
8624 gcc_assert (ctx);
8625 lower_omp_scan (gsi_p, ctx);
8626 return;
8630 bool input_phase = has_clauses ^ octx->scan_inclusive;
8631 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
8632 && (gimple_omp_for_kind (octx->stmt) & GF_OMP_FOR_SIMD)
8633 && !gimple_omp_for_combined_into_p (octx->stmt));
8634 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
8635 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
8636 && !gimple_omp_for_combined_p (octx->stmt));
8637 if (is_simd)
8638 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
8639 OMP_CLAUSE__SIMDUID_))
8641 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
8642 lane = create_tmp_var (unsigned_type_node);
8643 tree t = build_int_cst (integer_type_node,
8644 input_phase ? 1
8645 : octx->scan_inclusive ? 2 : 3);
8646 gimple *g
8647 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
8648 gimple_call_set_lhs (g, lane);
8649 gimple_seq_add_stmt (&before, g);
8652 if (is_simd || is_for)
8654 for (tree c = gimple_omp_for_clauses (octx->stmt);
8655 c; c = OMP_CLAUSE_CHAIN (c))
8656 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8657 && OMP_CLAUSE_REDUCTION_INSCAN (c))
8659 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8660 tree var = OMP_CLAUSE_DECL (c);
8661 tree new_var = lookup_decl (var, octx);
8662 tree val = new_var;
8663 tree var2 = NULL_TREE;
8664 tree var3 = NULL_TREE;
8665 tree var4 = NULL_TREE;
8666 tree lane0 = NULL_TREE;
8667 tree new_vard = new_var;
8668 if (omp_is_reference (var))
8670 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
8671 val = new_var;
8673 if (DECL_HAS_VALUE_EXPR_P (new_vard))
8675 val = DECL_VALUE_EXPR (new_vard);
8676 if (new_vard != new_var)
8678 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
8679 val = TREE_OPERAND (val, 0);
8681 if (TREE_CODE (val) == ARRAY_REF
8682 && VAR_P (TREE_OPERAND (val, 0)))
8684 tree v = TREE_OPERAND (val, 0);
8685 if (lookup_attribute ("omp simd array",
8686 DECL_ATTRIBUTES (v)))
8688 val = unshare_expr (val);
8689 lane0 = TREE_OPERAND (val, 1);
8690 TREE_OPERAND (val, 1) = lane;
8691 var2 = lookup_decl (v, octx);
8692 if (octx->scan_exclusive)
8693 var4 = lookup_decl (var2, octx);
8694 if (input_phase
8695 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8696 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
8697 if (!input_phase)
8699 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
8700 var2, lane, NULL_TREE, NULL_TREE);
8701 TREE_THIS_NOTRAP (var2) = 1;
8702 if (octx->scan_exclusive)
8704 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
8705 var4, lane, NULL_TREE,
8706 NULL_TREE);
8707 TREE_THIS_NOTRAP (var4) = 1;
8710 else
8711 var2 = val;
8714 gcc_assert (var2);
8716 else
8718 var2 = build_outer_var_ref (var, octx);
8719 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8721 var3 = maybe_lookup_decl (new_vard, octx);
8722 if (var3 == new_vard || var3 == NULL_TREE)
8723 var3 = NULL_TREE;
8724 else if (is_simd && octx->scan_exclusive && !input_phase)
8726 var4 = maybe_lookup_decl (var3, octx);
8727 if (var4 == var3 || var4 == NULL_TREE)
8729 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
8731 var4 = var3;
8732 var3 = NULL_TREE;
8734 else
8735 var4 = NULL_TREE;
8739 if (is_simd
8740 && octx->scan_exclusive
8741 && !input_phase
8742 && var4 == NULL_TREE)
8743 var4 = create_tmp_var (TREE_TYPE (val));
8745 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8747 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8748 if (input_phase)
8750 if (var3)
8752 /* If we've added a separate identity element
8753 variable, copy it over into val. */
8754 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
8755 var3);
8756 gimplify_and_add (x, &before);
8758 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
8760 /* Otherwise, assign to it the identity element. */
8761 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
8762 if (is_for)
8763 tseq = copy_gimple_seq_and_replace_locals (tseq);
8764 tree ref = build_outer_var_ref (var, octx);
8765 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
8766 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
8767 if (x)
8769 if (new_vard != new_var)
8770 val = build_fold_addr_expr_loc (clause_loc, val);
8771 SET_DECL_VALUE_EXPR (new_vard, val);
8773 SET_DECL_VALUE_EXPR (placeholder, ref);
8774 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8775 lower_omp (&tseq, octx);
8776 if (x)
8777 SET_DECL_VALUE_EXPR (new_vard, x);
8778 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
8779 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
8780 gimple_seq_add_seq (&before, tseq);
8781 if (is_simd)
8782 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8785 else if (is_simd)
8787 tree x;
8788 if (octx->scan_exclusive)
8790 tree v4 = unshare_expr (var4);
8791 tree v2 = unshare_expr (var2);
8792 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
8793 gimplify_and_add (x, &before);
8795 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
8796 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
8797 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
8798 tree vexpr = val;
8799 if (x && new_vard != new_var)
8800 vexpr = build_fold_addr_expr_loc (clause_loc, val);
8801 if (x)
8802 SET_DECL_VALUE_EXPR (new_vard, vexpr);
8803 SET_DECL_VALUE_EXPR (placeholder, var2);
8804 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8805 lower_omp (&tseq, octx);
8806 gimple_seq_add_seq (&before, tseq);
8807 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8808 if (x)
8809 SET_DECL_VALUE_EXPR (new_vard, x);
8810 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
8811 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
8812 if (octx->scan_inclusive)
8814 x = lang_hooks.decls.omp_clause_assign_op (c, val,
8815 var2);
8816 gimplify_and_add (x, &before);
8818 else if (lane0 == NULL_TREE)
8820 x = lang_hooks.decls.omp_clause_assign_op (c, val,
8821 var4);
8822 gimplify_and_add (x, &before);
8826 else
8828 if (input_phase)
8830 /* input phase. Set val to initializer before
8831 the body. */
8832 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
8833 gimplify_assign (val, x, &before);
8835 else if (is_simd)
8837 /* scan phase. */
8838 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
8839 if (code == MINUS_EXPR)
8840 code = PLUS_EXPR;
8842 tree x = build2 (code, TREE_TYPE (var2),
8843 unshare_expr (var2), unshare_expr (val));
8844 if (octx->scan_inclusive)
8846 gimplify_assign (unshare_expr (var2), x, &before);
8847 gimplify_assign (val, var2, &before);
8849 else
8851 gimplify_assign (unshare_expr (var4),
8852 unshare_expr (var2), &before);
8853 gimplify_assign (var2, x, &before);
8854 if (lane0 == NULL_TREE)
8855 gimplify_assign (val, var4, &before);
8859 if (octx->scan_exclusive && !input_phase && lane0)
8861 tree vexpr = unshare_expr (var4);
8862 TREE_OPERAND (vexpr, 1) = lane0;
8863 if (new_vard != new_var)
8864 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
8865 SET_DECL_VALUE_EXPR (new_vard, vexpr);
8869 else if (has_clauses)
8870 sorry_at (gimple_location (stmt),
8871 "%<#pragma omp scan%> not supported yet");
8872 if (!is_for)
8874 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
8875 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
8876 gsi_replace (gsi_p, gimple_build_nop (), true);
8877 return;
8879 lower_omp (gimple_omp_body_ptr (stmt), octx);
8880 if (before)
8882 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
8883 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
8888 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
8889 substitution of a couple of function calls. But in the NAMED case,
8890 requires that languages coordinate a symbol name. It is therefore
8891 best put here in common code. */
8893 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
8895 static void
8896 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8898 tree block;
8899 tree name, lock, unlock;
8900 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
8901 gbind *bind;
8902 location_t loc = gimple_location (stmt);
8903 gimple_seq tbody;
8905 name = gimple_omp_critical_name (stmt);
8906 if (name)
8908 tree decl;
8910 if (!critical_name_mutexes)
8911 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
8913 tree *n = critical_name_mutexes->get (name);
8914 if (n == NULL)
8916 char *new_str;
8918 decl = create_tmp_var_raw (ptr_type_node);
8920 new_str = ACONCAT ((".gomp_critical_user_",
8921 IDENTIFIER_POINTER (name), NULL));
8922 DECL_NAME (decl) = get_identifier (new_str);
8923 TREE_PUBLIC (decl) = 1;
8924 TREE_STATIC (decl) = 1;
8925 DECL_COMMON (decl) = 1;
8926 DECL_ARTIFICIAL (decl) = 1;
8927 DECL_IGNORED_P (decl) = 1;
8929 varpool_node::finalize_decl (decl);
8931 critical_name_mutexes->put (name, decl);
8933 else
8934 decl = *n;
8936 /* If '#pragma omp critical' is inside offloaded region or
8937 inside function marked as offloadable, the symbol must be
8938 marked as offloadable too. */
8939 omp_context *octx;
8940 if (cgraph_node::get (current_function_decl)->offloadable)
8941 varpool_node::get_create (decl)->offloadable = 1;
8942 else
8943 for (octx = ctx->outer; octx; octx = octx->outer)
8944 if (is_gimple_omp_offloaded (octx->stmt))
8946 varpool_node::get_create (decl)->offloadable = 1;
8947 break;
8950 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
8951 lock = build_call_expr_loc (loc, lock, 1,
8952 build_fold_addr_expr_loc (loc, decl));
8954 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
8955 unlock = build_call_expr_loc (loc, unlock, 1,
8956 build_fold_addr_expr_loc (loc, decl));
8958 else
8960 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
8961 lock = build_call_expr_loc (loc, lock, 0);
8963 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
8964 unlock = build_call_expr_loc (loc, unlock, 0);
8967 push_gimplify_context ();
8969 block = make_node (BLOCK);
8970 bind = gimple_build_bind (NULL, NULL, block);
8971 gsi_replace (gsi_p, bind, true);
8972 gimple_bind_add_stmt (bind, stmt);
8974 tbody = gimple_bind_body (bind);
8975 gimplify_and_add (lock, &tbody);
8976 gimple_bind_set_body (bind, tbody);
8978 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8979 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8980 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8981 gimple_omp_set_body (stmt, NULL);
8983 tbody = gimple_bind_body (bind);
8984 gimplify_and_add (unlock, &tbody);
8985 gimple_bind_set_body (bind, tbody);
8987 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8989 pop_gimplify_context (bind);
8990 gimple_bind_append_vars (bind, ctx->block_vars);
8991 BLOCK_VARS (block) = gimple_bind_vars (bind);
8994 /* A subroutine of lower_omp_for. Generate code to emit the predicate
8995 for a lastprivate clause. Given a loop control predicate of (V
8996 cond N2), we gate the clause on (!(V cond N2)). The lowered form
8997 is appended to *DLIST, iterator initialization is appended to
8998 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
8999 to be emitted in a critical section. */
9001 static void
9002 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
9003 gimple_seq *dlist, gimple_seq *clist,
9004 struct omp_context *ctx)
9006 tree clauses, cond, vinit;
9007 enum tree_code cond_code;
9008 gimple_seq stmts;
9010 cond_code = fd->loop.cond_code;
9011 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
9013 /* When possible, use a strict equality expression. This can let VRP
9014 type optimizations deduce the value and remove a copy. */
9015 if (tree_fits_shwi_p (fd->loop.step))
9017 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
9018 if (step == 1 || step == -1)
9019 cond_code = EQ_EXPR;
9022 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
9023 || gimple_omp_for_grid_phony (fd->for_stmt))
9024 cond = omp_grid_lastprivate_predicate (fd);
9025 else
9027 tree n2 = fd->loop.n2;
9028 if (fd->collapse > 1
9029 && TREE_CODE (n2) != INTEGER_CST
9030 && gimple_omp_for_combined_into_p (fd->for_stmt))
9032 struct omp_context *taskreg_ctx = NULL;
9033 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
9035 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
9036 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
9037 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
9039 if (gimple_omp_for_combined_into_p (gfor))
9041 gcc_assert (ctx->outer->outer
9042 && is_parallel_ctx (ctx->outer->outer));
9043 taskreg_ctx = ctx->outer->outer;
9045 else
9047 struct omp_for_data outer_fd;
9048 omp_extract_for_data (gfor, &outer_fd, NULL);
9049 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
9052 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
9053 taskreg_ctx = ctx->outer->outer;
9055 else if (is_taskreg_ctx (ctx->outer))
9056 taskreg_ctx = ctx->outer;
9057 if (taskreg_ctx)
9059 int i;
9060 tree taskreg_clauses
9061 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
9062 tree innerc = omp_find_clause (taskreg_clauses,
9063 OMP_CLAUSE__LOOPTEMP_);
9064 gcc_assert (innerc);
9065 for (i = 0; i < fd->collapse; i++)
9067 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9068 OMP_CLAUSE__LOOPTEMP_);
9069 gcc_assert (innerc);
9071 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9072 OMP_CLAUSE__LOOPTEMP_);
9073 if (innerc)
9074 n2 = fold_convert (TREE_TYPE (n2),
9075 lookup_decl (OMP_CLAUSE_DECL (innerc),
9076 taskreg_ctx));
9079 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
9082 clauses = gimple_omp_for_clauses (fd->for_stmt);
9083 stmts = NULL;
9084 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
9085 if (!gimple_seq_empty_p (stmts))
9087 gimple_seq_add_seq (&stmts, *dlist);
9088 *dlist = stmts;
9090 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9091 vinit = fd->loop.n1;
9092 if (cond_code == EQ_EXPR
9093 && tree_fits_shwi_p (fd->loop.n2)
9094 && ! integer_zerop (fd->loop.n2))
9095 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
9096 else
9097 vinit = unshare_expr (vinit);
9099 /* Initialize the iterator variable, so that threads that don't execute
9100 any iterations don't execute the lastprivate clauses by accident. */
9101 gimplify_assign (fd->loop.v, vinit, body_p);
9105 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9107 tree
9108 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9109 struct walk_stmt_info *wi)
9111 gimple *stmt = gsi_stmt (*gsi_p);
9113 *handled_ops_p = true;
9114 switch (gimple_code (stmt))
9116 WALK_SUBSTMTS;
9118 case GIMPLE_OMP_SCAN:
9119 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
9120 return integer_zero_node;
9121 default:
9122 break;
9124 return NULL;
9127 /* Helper function for lower_omp_for, add transformations for a worksharing
9128 loop with scan directives inside of it.
9129 For worksharing loop not combined with simd, transform:
9130 #pragma omp for reduction(inscan,+:r) private(i)
9131 for (i = 0; i < n; i = i + 1)
9134 update (r);
9136 #pragma omp scan inclusive(r)
9138 use (r);
9142 into two worksharing loops + code to merge results:
9144 num_threads = omp_get_num_threads ();
9145 thread_num = omp_get_thread_num ();
9146 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9147 <D.2099>:
9148 var2 = r;
9149 goto <D.2101>;
9150 <D.2100>:
9151 // For UDRs this is UDR init, or if ctors are needed, copy from
9152 // var3 that has been constructed to contain the neutral element.
9153 var2 = 0;
9154 <D.2101>:
9155 ivar = 0;
9156 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9157 // a shared array with num_threads elements and rprivb to a local array
9158 // number of elements equal to the number of (contiguous) iterations the
9159 // current thread will perform. controlb and controlp variables are
9160 // temporaries to handle deallocation of rprivb at the end of second
9161 // GOMP_FOR.
9162 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9163 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9164 for (i = 0; i < n; i = i + 1)
9167 // For UDRs this is UDR init or copy from var3.
9168 r = 0;
9169 // This is the input phase from user code.
9170 update (r);
9173 // For UDRs this is UDR merge.
9174 var2 = var2 + r;
9175 // Rather than handing it over to the user, save to local thread's
9176 // array.
9177 rprivb[ivar] = var2;
9178 // For exclusive scan, the above two statements are swapped.
9179 ivar = ivar + 1;
9182 // And remember the final value from this thread's into the shared
9183 // rpriva array.
9184 rpriva[(sizetype) thread_num] = var2;
9185 // If more than one thread, compute using Work-Efficient prefix sum
9186 // the inclusive parallel scan of the rpriva array.
9187 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9188 <D.2102>:
9189 GOMP_barrier ();
9190 down = 0;
9191 k = 1;
9192 num_threadsu = (unsigned int) num_threads;
9193 thread_numup1 = (unsigned int) thread_num + 1;
9194 <D.2108>:
9195 twok = k << 1;
9196 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9197 <D.2110>:
9198 down = 4294967295;
9199 k = k >> 1;
9200 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9201 <D.2112>:
9202 k = k >> 1;
9203 <D.2111>:
9204 twok = k << 1;
9205 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9206 mul = REALPART_EXPR <cplx>;
9207 ovf = IMAGPART_EXPR <cplx>;
9208 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9209 <D.2116>:
9210 andv = k & down;
9211 andvm1 = andv + 4294967295;
9212 l = mul + andvm1;
9213 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9214 <D.2120>:
9215 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9216 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9217 rpriva[l] = rpriva[l - k] + rpriva[l];
9218 <D.2117>:
9219 if (down == 0) goto <D.2121>; else goto <D.2122>;
9220 <D.2121>:
9221 k = k << 1;
9222 goto <D.2123>;
9223 <D.2122>:
9224 k = k >> 1;
9225 <D.2123>:
9226 GOMP_barrier ();
9227 if (k != 0) goto <D.2108>; else goto <D.2103>;
9228 <D.2103>:
9229 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9230 <D.2124>:
9231 // For UDRs this is UDR init or copy from var3.
9232 var2 = 0;
9233 goto <D.2126>;
9234 <D.2125>:
9235 var2 = rpriva[thread_num - 1];
9236 <D.2126>:
9237 ivar = 0;
9238 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9239 reduction(inscan,+:r) private(i)
9240 for (i = 0; i < n; i = i + 1)
9243 // For UDRs, this is UDR merge (rprivb[ivar], var2); r = rprivb[ivar];
9244 r = rprivb[ivar] + var2;
9247 // This is the scan phase from user code.
9248 use (r);
9249 // Plus a bump of the iterator.
9250 ivar = ivar + 1;
9252 } */
9254 static void
9255 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
9256 struct omp_for_data *fd, omp_context *ctx)
9258 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
9260 gimple_seq body = gimple_omp_body (stmt);
9261 gimple_stmt_iterator input1_gsi = gsi_none ();
9262 struct walk_stmt_info wi;
9263 memset (&wi, 0, sizeof (wi));
9264 wi.val_only = true;
9265 wi.info = (void *) &input1_gsi;
9266 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
9267 gcc_assert (!gsi_end_p (input1_gsi));
9269 gimple *input_stmt1 = gsi_stmt (input1_gsi);
9270 gimple_stmt_iterator gsi = input1_gsi;
9271 gsi_next (&gsi);
9272 gimple_stmt_iterator scan1_gsi = gsi;
9273 gimple *scan_stmt1 = gsi_stmt (gsi);
9274 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
9276 gimple_seq input_body = gimple_omp_body (input_stmt1);
9277 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
9278 gimple_omp_set_body (input_stmt1, NULL);
9279 gimple_omp_set_body (scan_stmt1, NULL);
9280 gimple_omp_set_body (stmt, NULL);
9282 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
9283 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
9284 gimple_omp_set_body (stmt, body);
9285 gimple_omp_set_body (input_stmt1, input_body);
9287 gimple_stmt_iterator input2_gsi = gsi_none ();
9288 memset (&wi, 0, sizeof (wi));
9289 wi.val_only = true;
9290 wi.info = (void *) &input2_gsi;
9291 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
9292 gcc_assert (!gsi_end_p (input2_gsi));
9294 gimple *input_stmt2 = gsi_stmt (input2_gsi);
9295 gsi = input2_gsi;
9296 gsi_next (&gsi);
9297 gimple_stmt_iterator scan2_gsi = gsi;
9298 gimple *scan_stmt2 = gsi_stmt (gsi);
9299 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
9300 gimple_omp_set_body (scan_stmt2, scan_body);
9302 tree num_threads = create_tmp_var (integer_type_node);
9303 tree thread_num = create_tmp_var (integer_type_node);
9304 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9305 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9306 gimple *g = gimple_build_call (nthreads_decl, 0);
9307 gimple_call_set_lhs (g, num_threads);
9308 gimple_seq_add_stmt (body_p, g);
9309 g = gimple_build_call (threadnum_decl, 0);
9310 gimple_call_set_lhs (g, thread_num);
9311 gimple_seq_add_stmt (body_p, g);
9313 tree ivar = create_tmp_var (sizetype);
9314 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
9315 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
9316 tree k = create_tmp_var (unsigned_type_node);
9317 tree l = create_tmp_var (unsigned_type_node);
9319 gimple_seq clist = NULL, mdlist = NULL;
9320 gimple_seq thr01_list = NULL, thrn1_list = NULL;
9321 gimple_seq thr02_list = NULL, thrn2_list = NULL;
9322 gimple_seq scan1_list = NULL, input2_list = NULL;
9323 gimple_seq last_list = NULL, reduc_list = NULL;
9324 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
9325 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9326 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9328 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9329 tree var = OMP_CLAUSE_DECL (c);
9330 tree new_var = lookup_decl (var, ctx);
9331 tree var3 = NULL_TREE;
9332 tree new_vard = new_var;
9333 if (omp_is_reference (var))
9334 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9335 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9337 var3 = maybe_lookup_decl (new_vard, ctx);
9338 if (var3 == new_vard)
9339 var3 = NULL_TREE;
9342 tree ptype = build_pointer_type (TREE_TYPE (new_var));
9343 tree rpriva = create_tmp_var (ptype);
9344 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9345 OMP_CLAUSE_DECL (nc) = rpriva;
9346 *cp1 = nc;
9347 cp1 = &OMP_CLAUSE_CHAIN (nc);
9349 tree rprivb = create_tmp_var (ptype);
9350 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9351 OMP_CLAUSE_DECL (nc) = rprivb;
9352 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
9353 *cp1 = nc;
9354 cp1 = &OMP_CLAUSE_CHAIN (nc);
9356 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
9357 if (new_vard != new_var)
9358 TREE_ADDRESSABLE (var2) = 1;
9359 gimple_add_tmp_var (var2);
9361 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
9362 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9363 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9364 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9365 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
9367 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
9368 thread_num, integer_minus_one_node);
9369 x = fold_convert_loc (clause_loc, sizetype, x);
9370 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9371 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9372 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9373 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
9375 x = fold_convert_loc (clause_loc, sizetype, l);
9376 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9377 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9378 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9379 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
9381 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
9382 x = fold_convert_loc (clause_loc, sizetype, x);
9383 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9384 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9385 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9386 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
9388 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
9389 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9390 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
9391 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
9393 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9395 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9396 tree val = var2;
9397 if (new_vard != new_var)
9398 val = build_fold_addr_expr_loc (clause_loc, val);
9400 x = lang_hooks.decls.omp_clause_default_ctor
9401 (c, var2, build_outer_var_ref (var, ctx));
9402 if (x)
9403 gimplify_and_add (x, &clist);
9405 x = build_outer_var_ref (var, ctx);
9406 x = lang_hooks.decls.omp_clause_assign_op (c, var2, x);
9407 gimplify_and_add (x, &thr01_list);
9409 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
9410 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9411 if (var3)
9413 x = lang_hooks.decls.omp_clause_assign_op (c, var2, var3);
9414 gimplify_and_add (x, &thrn1_list);
9415 x = lang_hooks.decls.omp_clause_assign_op (c, var2, var3);
9416 gimplify_and_add (x, &thr02_list);
9418 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9420 /* Otherwise, assign to it the identity element. */
9421 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9422 tseq = copy_gimple_seq_and_replace_locals (tseq);
9423 SET_DECL_VALUE_EXPR (new_vard, val);
9424 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9425 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
9426 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9427 lower_omp (&tseq, ctx);
9428 gimple_seq_add_seq (&thrn1_list, tseq);
9429 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9430 lower_omp (&tseq, ctx);
9431 gimple_seq_add_seq (&thr02_list, tseq);
9432 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9433 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9434 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9435 if (y)
9436 SET_DECL_VALUE_EXPR (new_vard, y);
9437 else
9439 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9440 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9444 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivam1_ref);
9445 gimplify_and_add (x, &thrn2_list);
9447 if (ctx->scan_exclusive)
9449 x = unshare_expr (rprivb_ref);
9450 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
9451 gimplify_and_add (x, &scan1_list);
9454 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9455 tseq = copy_gimple_seq_and_replace_locals (tseq);
9456 SET_DECL_VALUE_EXPR (placeholder, var2);
9457 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9458 lower_omp (&tseq, ctx);
9459 gimple_seq_add_seq (&scan1_list, tseq);
9461 if (ctx->scan_inclusive)
9463 x = unshare_expr (rprivb_ref);
9464 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
9465 gimplify_and_add (x, &scan1_list);
9468 x = unshare_expr (rpriva_ref);
9469 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
9470 gimplify_and_add (x, &mdlist);
9472 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9473 tseq = copy_gimple_seq_and_replace_locals (tseq);
9474 SET_DECL_VALUE_EXPR (new_vard, val);
9475 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9476 SET_DECL_VALUE_EXPR (placeholder, rprivb_ref);
9477 lower_omp (&tseq, ctx);
9478 if (y)
9479 SET_DECL_VALUE_EXPR (new_vard, y);
9480 else
9482 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9483 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9485 gimple_seq_add_seq (&input2_list, tseq);
9487 x = unshare_expr (new_var);
9488 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivb_ref);
9489 gimplify_and_add (x, &input2_list);
9491 x = build_outer_var_ref (var, ctx);
9492 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
9493 gimplify_and_add (x, &last_list);
9495 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
9496 gimplify_and_add (x, &reduc_list);
9497 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9498 tseq = copy_gimple_seq_and_replace_locals (tseq);
9499 val = rprival_ref;
9500 if (new_vard != new_var)
9501 val = build_fold_addr_expr_loc (clause_loc, val);
9502 SET_DECL_VALUE_EXPR (new_vard, val);
9503 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9504 SET_DECL_VALUE_EXPR (placeholder, var2);
9505 lower_omp (&tseq, ctx);
9506 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9507 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9508 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9509 if (y)
9510 SET_DECL_VALUE_EXPR (new_vard, y);
9511 else
9513 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9514 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9516 gimple_seq_add_seq (&reduc_list, tseq);
9517 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
9518 gimplify_and_add (x, &reduc_list);
9520 x = lang_hooks.decls.omp_clause_dtor (c, var2);
9521 if (x)
9522 gimplify_and_add (x, dlist);
9524 else
9526 x = build_outer_var_ref (var, ctx);
9527 gimplify_assign (var2, x, &thr01_list);
9529 x = omp_reduction_init (c, TREE_TYPE (new_var));
9530 gimplify_assign (var2, unshare_expr (x), &thrn1_list);
9531 gimplify_assign (var2, x, &thr02_list);
9533 gimplify_assign (var2, rprivam1_ref, &thrn2_list);
9535 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9536 if (code == MINUS_EXPR)
9537 code = PLUS_EXPR;
9539 if (ctx->scan_exclusive)
9540 gimplify_assign (unshare_expr (rprivb_ref), var2, &scan1_list);
9541 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
9542 gimplify_assign (var2, x, &scan1_list);
9543 if (ctx->scan_inclusive)
9544 gimplify_assign (unshare_expr (rprivb_ref), var2, &scan1_list);
9546 gimplify_assign (unshare_expr (rpriva_ref), var2, &mdlist);
9548 x = build2 (code, TREE_TYPE (new_var), rprivb_ref, var2);
9549 gimplify_assign (new_var, x, &input2_list);
9551 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
9552 &last_list);
9554 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
9555 unshare_expr (rprival_ref));
9556 gimplify_assign (rprival_ref, x, &reduc_list);
9560 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
9561 gimple_seq_add_stmt (&scan1_list, g);
9562 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
9563 gimple_seq_add_stmt (gimple_omp_body_ptr (scan_stmt2), g);
9565 tree controlb = create_tmp_var (boolean_type_node);
9566 tree controlp = create_tmp_var (ptr_type_node);
9567 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
9568 OMP_CLAUSE_DECL (nc) = controlb;
9569 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
9570 *cp1 = nc;
9571 cp1 = &OMP_CLAUSE_CHAIN (nc);
9572 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
9573 OMP_CLAUSE_DECL (nc) = controlp;
9574 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
9575 *cp1 = nc;
9576 cp1 = &OMP_CLAUSE_CHAIN (nc);
9577 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
9578 OMP_CLAUSE_DECL (nc) = controlb;
9579 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
9580 *cp2 = nc;
9581 cp2 = &OMP_CLAUSE_CHAIN (nc);
9582 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
9583 OMP_CLAUSE_DECL (nc) = controlp;
9584 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
9585 *cp2 = nc;
9586 cp2 = &OMP_CLAUSE_CHAIN (nc);
9588 *cp1 = gimple_omp_for_clauses (stmt);
9589 gimple_omp_for_set_clauses (stmt, new_clauses1);
9590 *cp2 = gimple_omp_for_clauses (new_stmt);
9591 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
9593 gimple_omp_set_body (scan_stmt1, scan1_list);
9594 gimple_omp_set_body (input_stmt2, input2_list);
9596 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
9597 GSI_SAME_STMT);
9598 gsi_remove (&input1_gsi, true);
9599 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
9600 GSI_SAME_STMT);
9601 gsi_remove (&scan1_gsi, true);
9602 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
9603 GSI_SAME_STMT);
9604 gsi_remove (&input2_gsi, true);
9605 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
9606 GSI_SAME_STMT);
9607 gsi_remove (&scan2_gsi, true);
9609 gimple_seq_add_seq (body_p, clist);
9611 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
9612 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
9613 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
9614 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
9615 gimple_seq_add_stmt (body_p, g);
9616 g = gimple_build_label (lab1);
9617 gimple_seq_add_stmt (body_p, g);
9618 gimple_seq_add_seq (body_p, thr01_list);
9619 g = gimple_build_goto (lab3);
9620 gimple_seq_add_stmt (body_p, g);
9621 g = gimple_build_label (lab2);
9622 gimple_seq_add_stmt (body_p, g);
9623 gimple_seq_add_seq (body_p, thrn1_list);
9624 g = gimple_build_label (lab3);
9625 gimple_seq_add_stmt (body_p, g);
9627 g = gimple_build_assign (ivar, size_zero_node);
9628 gimple_seq_add_stmt (body_p, g);
9630 gimple_seq_add_stmt (body_p, stmt);
9631 gimple_seq_add_seq (body_p, body);
9632 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
9633 fd->loop.v));
9635 g = gimple_build_omp_return (true);
9636 gimple_seq_add_stmt (body_p, g);
9637 gimple_seq_add_seq (body_p, mdlist);
9639 lab1 = create_artificial_label (UNKNOWN_LOCATION);
9640 lab2 = create_artificial_label (UNKNOWN_LOCATION);
9641 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
9642 gimple_seq_add_stmt (body_p, g);
9643 g = gimple_build_label (lab1);
9644 gimple_seq_add_stmt (body_p, g);
9646 g = omp_build_barrier (NULL);
9647 gimple_seq_add_stmt (body_p, g);
9649 tree down = create_tmp_var (unsigned_type_node);
9650 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
9651 gimple_seq_add_stmt (body_p, g);
9653 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
9654 gimple_seq_add_stmt (body_p, g);
9656 tree num_threadsu = create_tmp_var (unsigned_type_node);
9657 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
9658 gimple_seq_add_stmt (body_p, g);
9660 tree thread_numu = create_tmp_var (unsigned_type_node);
9661 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
9662 gimple_seq_add_stmt (body_p, g);
9664 tree thread_nump1 = create_tmp_var (unsigned_type_node);
9665 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
9666 build_int_cst (unsigned_type_node, 1));
9667 gimple_seq_add_stmt (body_p, g);
9669 lab3 = create_artificial_label (UNKNOWN_LOCATION);
9670 g = gimple_build_label (lab3);
9671 gimple_seq_add_stmt (body_p, g);
9673 tree twok = create_tmp_var (unsigned_type_node);
9674 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
9675 gimple_seq_add_stmt (body_p, g);
9677 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9678 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9679 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
9680 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
9681 gimple_seq_add_stmt (body_p, g);
9682 g = gimple_build_label (lab4);
9683 gimple_seq_add_stmt (body_p, g);
9684 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
9685 gimple_seq_add_stmt (body_p, g);
9686 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
9687 gimple_seq_add_stmt (body_p, g);
9689 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
9690 gimple_seq_add_stmt (body_p, g);
9691 g = gimple_build_label (lab6);
9692 gimple_seq_add_stmt (body_p, g);
9694 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
9695 gimple_seq_add_stmt (body_p, g);
9697 g = gimple_build_label (lab5);
9698 gimple_seq_add_stmt (body_p, g);
9700 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
9701 gimple_seq_add_stmt (body_p, g);
9703 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
9704 DECL_GIMPLE_REG_P (cplx) = 1;
9705 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
9706 gimple_call_set_lhs (g, cplx);
9707 gimple_seq_add_stmt (body_p, g);
9708 tree mul = create_tmp_var (unsigned_type_node);
9709 g = gimple_build_assign (mul, REALPART_EXPR,
9710 build1 (REALPART_EXPR, unsigned_type_node, cplx));
9711 gimple_seq_add_stmt (body_p, g);
9712 tree ovf = create_tmp_var (unsigned_type_node);
9713 g = gimple_build_assign (ovf, IMAGPART_EXPR,
9714 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
9715 gimple_seq_add_stmt (body_p, g);
9717 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
9718 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
9719 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
9720 lab7, lab8);
9721 gimple_seq_add_stmt (body_p, g);
9722 g = gimple_build_label (lab7);
9723 gimple_seq_add_stmt (body_p, g);
9725 tree andv = create_tmp_var (unsigned_type_node);
9726 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
9727 gimple_seq_add_stmt (body_p, g);
9728 tree andvm1 = create_tmp_var (unsigned_type_node);
9729 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
9730 build_minus_one_cst (unsigned_type_node));
9731 gimple_seq_add_stmt (body_p, g);
9733 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
9734 gimple_seq_add_stmt (body_p, g);
9736 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
9737 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
9738 gimple_seq_add_stmt (body_p, g);
9739 g = gimple_build_label (lab9);
9740 gimple_seq_add_stmt (body_p, g);
9741 gimple_seq_add_seq (body_p, reduc_list);
9742 g = gimple_build_label (lab8);
9743 gimple_seq_add_stmt (body_p, g);
9745 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
9746 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
9747 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
9748 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
9749 lab10, lab11);
9750 gimple_seq_add_stmt (body_p, g);
9751 g = gimple_build_label (lab10);
9752 gimple_seq_add_stmt (body_p, g);
9753 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
9754 gimple_seq_add_stmt (body_p, g);
9755 g = gimple_build_goto (lab12);
9756 gimple_seq_add_stmt (body_p, g);
9757 g = gimple_build_label (lab11);
9758 gimple_seq_add_stmt (body_p, g);
9759 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
9760 gimple_seq_add_stmt (body_p, g);
9761 g = gimple_build_label (lab12);
9762 gimple_seq_add_stmt (body_p, g);
9764 g = omp_build_barrier (NULL);
9765 gimple_seq_add_stmt (body_p, g);
9767 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
9768 lab3, lab2);
9769 gimple_seq_add_stmt (body_p, g);
9771 g = gimple_build_label (lab2);
9772 gimple_seq_add_stmt (body_p, g);
9774 lab1 = create_artificial_label (UNKNOWN_LOCATION);
9775 lab2 = create_artificial_label (UNKNOWN_LOCATION);
9776 lab3 = create_artificial_label (UNKNOWN_LOCATION);
9777 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
9778 gimple_seq_add_stmt (body_p, g);
9779 g = gimple_build_label (lab1);
9780 gimple_seq_add_stmt (body_p, g);
9781 gimple_seq_add_seq (body_p, thr02_list);
9782 g = gimple_build_goto (lab3);
9783 gimple_seq_add_stmt (body_p, g);
9784 g = gimple_build_label (lab2);
9785 gimple_seq_add_stmt (body_p, g);
9786 gimple_seq_add_seq (body_p, thrn2_list);
9787 g = gimple_build_label (lab3);
9788 gimple_seq_add_stmt (body_p, g);
9790 g = gimple_build_assign (ivar, size_zero_node);
9791 gimple_seq_add_stmt (body_p, g);
9792 gimple_seq_add_stmt (body_p, new_stmt);
9793 gimple_seq_add_seq (body_p, new_body);
9795 gimple_seq new_dlist = NULL;
9796 lab1 = create_artificial_label (UNKNOWN_LOCATION);
9797 lab2 = create_artificial_label (UNKNOWN_LOCATION);
9798 tree num_threadsm1 = create_tmp_var (integer_type_node);
9799 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
9800 integer_minus_one_node);
9801 gimple_seq_add_stmt (&new_dlist, g);
9802 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
9803 gimple_seq_add_stmt (&new_dlist, g);
9804 g = gimple_build_label (lab1);
9805 gimple_seq_add_stmt (&new_dlist, g);
9806 gimple_seq_add_seq (&new_dlist, last_list);
9807 g = gimple_build_label (lab2);
9808 gimple_seq_add_stmt (&new_dlist, g);
9809 gimple_seq_add_seq (&new_dlist, *dlist);
9810 *dlist = new_dlist;
9813 /* Lower code for an OMP loop directive. */
9815 static void
9816 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9818 tree *rhs_p, block;
9819 struct omp_for_data fd, *fdp = NULL;
9820 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
9821 gbind *new_stmt;
9822 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
9823 gimple_seq cnt_list = NULL, clist = NULL;
9824 gimple_seq oacc_head = NULL, oacc_tail = NULL;
9825 size_t i;
9827 push_gimplify_context ();
9829 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
9831 block = make_node (BLOCK);
9832 new_stmt = gimple_build_bind (NULL, NULL, block);
9833 /* Replace at gsi right away, so that 'stmt' is no member
9834 of a sequence anymore as we're going to add to a different
9835 one below. */
9836 gsi_replace (gsi_p, new_stmt, true);
9838 /* Move declaration of temporaries in the loop body before we make
9839 it go away. */
9840 omp_for_body = gimple_omp_body (stmt);
9841 if (!gimple_seq_empty_p (omp_for_body)
9842 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
9844 gbind *inner_bind
9845 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
9846 tree vars = gimple_bind_vars (inner_bind);
9847 gimple_bind_append_vars (new_stmt, vars);
9848 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
9849 keep them on the inner_bind and it's block. */
9850 gimple_bind_set_vars (inner_bind, NULL_TREE);
9851 if (gimple_bind_block (inner_bind))
9852 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
9855 if (gimple_omp_for_combined_into_p (stmt))
9857 omp_extract_for_data (stmt, &fd, NULL);
9858 fdp = &fd;
9860 /* We need two temporaries with fd.loop.v type (istart/iend)
9861 and then (fd.collapse - 1) temporaries with the same
9862 type for count2 ... countN-1 vars if not constant. */
9863 size_t count = 2;
9864 tree type = fd.iter_type;
9865 if (fd.collapse > 1
9866 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
9867 count += fd.collapse - 1;
9868 bool taskreg_for
9869 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
9870 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
9871 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
9872 tree simtc = NULL;
9873 tree clauses = *pc;
9874 if (taskreg_for)
9875 outerc
9876 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
9877 OMP_CLAUSE__LOOPTEMP_);
9878 if (ctx->simt_stmt)
9879 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
9880 OMP_CLAUSE__LOOPTEMP_);
9881 for (i = 0; i < count; i++)
9883 tree temp;
9884 if (taskreg_for)
9886 gcc_assert (outerc);
9887 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
9888 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
9889 OMP_CLAUSE__LOOPTEMP_);
9891 else
9893 /* If there are 2 adjacent SIMD stmts, one with _simt_
9894 clause, another without, make sure they have the same
9895 decls in _looptemp_ clauses, because the outer stmt
9896 they are combined into will look up just one inner_stmt. */
9897 if (ctx->simt_stmt)
9898 temp = OMP_CLAUSE_DECL (simtc);
9899 else
9900 temp = create_tmp_var (type);
9901 insert_decl_map (&ctx->outer->cb, temp, temp);
9903 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
9904 OMP_CLAUSE_DECL (*pc) = temp;
9905 pc = &OMP_CLAUSE_CHAIN (*pc);
9906 if (ctx->simt_stmt)
9907 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
9908 OMP_CLAUSE__LOOPTEMP_);
9910 *pc = clauses;
9913 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
9914 dlist = NULL;
9915 body = NULL;
9916 tree rclauses
9917 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
9918 OMP_CLAUSE_REDUCTION);
9919 tree rtmp = NULL_TREE;
9920 if (rclauses)
9922 tree type = build_pointer_type (pointer_sized_int_node);
9923 tree temp = create_tmp_var (type);
9924 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
9925 OMP_CLAUSE_DECL (c) = temp;
9926 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
9927 gimple_omp_for_set_clauses (stmt, c);
9928 lower_omp_task_reductions (ctx, OMP_FOR,
9929 gimple_omp_for_clauses (stmt),
9930 &tred_ilist, &tred_dlist);
9931 rclauses = c;
9932 rtmp = make_ssa_name (type);
9933 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
9936 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
9937 ctx);
9939 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
9940 fdp);
9941 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
9942 gimple_omp_for_pre_body (stmt));
9944 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9946 /* Lower the header expressions. At this point, we can assume that
9947 the header is of the form:
9949 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
9951 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
9952 using the .omp_data_s mapping, if needed. */
9953 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
9955 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
9956 if (!is_gimple_min_invariant (*rhs_p))
9957 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
9958 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
9959 recompute_tree_invariant_for_addr_expr (*rhs_p);
9961 rhs_p = gimple_omp_for_final_ptr (stmt, i);
9962 if (!is_gimple_min_invariant (*rhs_p))
9963 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
9964 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
9965 recompute_tree_invariant_for_addr_expr (*rhs_p);
9967 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
9968 if (!is_gimple_min_invariant (*rhs_p))
9969 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
9971 if (rclauses)
9972 gimple_seq_add_seq (&tred_ilist, cnt_list);
9973 else
9974 gimple_seq_add_seq (&body, cnt_list);
9976 /* Once lowered, extract the bounds and clauses. */
9977 omp_extract_for_data (stmt, &fd, NULL);
9979 if (is_gimple_omp_oacc (ctx->stmt)
9980 && !ctx_in_oacc_kernels_region (ctx))
9981 lower_oacc_head_tail (gimple_location (stmt),
9982 gimple_omp_for_clauses (stmt),
9983 &oacc_head, &oacc_tail, ctx);
9985 /* Add OpenACC partitioning and reduction markers just before the loop. */
9986 if (oacc_head)
9987 gimple_seq_add_seq (&body, oacc_head);
9989 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
9991 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
9992 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
9993 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9994 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
9996 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
9997 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
9998 OMP_CLAUSE_LINEAR_STEP (c)
9999 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
10000 ctx);
10003 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
10004 && gimple_omp_for_grid_phony (stmt));
10005 if ((ctx->scan_inclusive || ctx->scan_exclusive)
10006 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10008 gcc_assert (!phony_loop);
10009 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
10011 else
10013 if (!phony_loop)
10014 gimple_seq_add_stmt (&body, stmt);
10015 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
10018 if (!phony_loop)
10019 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
10020 fd.loop.v));
10022 /* After the loop, add exit clauses. */
10023 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
10025 if (clist)
10027 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
10028 gcall *g = gimple_build_call (fndecl, 0);
10029 gimple_seq_add_stmt (&body, g);
10030 gimple_seq_add_seq (&body, clist);
10031 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
10032 g = gimple_build_call (fndecl, 0);
10033 gimple_seq_add_stmt (&body, g);
10036 if (ctx->cancellable)
10037 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
10039 gimple_seq_add_seq (&body, dlist);
10041 if (rclauses)
10043 gimple_seq_add_seq (&tred_ilist, body);
10044 body = tred_ilist;
10047 body = maybe_catch_exception (body);
10049 if (!phony_loop)
10051 /* Region exit marker goes at the end of the loop body. */
10052 gimple *g = gimple_build_omp_return (fd.have_nowait);
10053 gimple_seq_add_stmt (&body, g);
10055 gimple_seq_add_seq (&body, tred_dlist);
10057 maybe_add_implicit_barrier_cancel (ctx, g, &body);
10059 if (rclauses)
10060 OMP_CLAUSE_DECL (rclauses) = rtmp;
10063 /* Add OpenACC joining and reduction markers just after the loop. */
10064 if (oacc_tail)
10065 gimple_seq_add_seq (&body, oacc_tail);
10067 pop_gimplify_context (new_stmt);
10069 gimple_bind_append_vars (new_stmt, ctx->block_vars);
10070 maybe_remove_omp_member_access_dummy_vars (new_stmt);
10071 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
10072 if (BLOCK_VARS (block))
10073 TREE_USED (block) = 1;
10075 gimple_bind_set_body (new_stmt, body);
10076 gimple_omp_set_body (stmt, NULL);
10077 gimple_omp_for_set_pre_body (stmt, NULL);
10080 /* Callback for walk_stmts. Check if the current statement only contains
10081 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
10083 static tree
10084 check_combined_parallel (gimple_stmt_iterator *gsi_p,
10085 bool *handled_ops_p,
10086 struct walk_stmt_info *wi)
10088 int *info = (int *) wi->info;
10089 gimple *stmt = gsi_stmt (*gsi_p);
10091 *handled_ops_p = true;
10092 switch (gimple_code (stmt))
10094 WALK_SUBSTMTS;
10096 case GIMPLE_DEBUG:
10097 break;
10098 case GIMPLE_OMP_FOR:
10099 case GIMPLE_OMP_SECTIONS:
10100 *info = *info == 0 ? 1 : -1;
10101 break;
10102 default:
10103 *info = -1;
10104 break;
10106 return NULL;
10109 struct omp_taskcopy_context
10111 /* This field must be at the beginning, as we do "inheritance": Some
10112 callback functions for tree-inline.c (e.g., omp_copy_decl)
10113 receive a copy_body_data pointer that is up-casted to an
10114 omp_context pointer. */
10115 copy_body_data cb;
10116 omp_context *ctx;
10119 static tree
10120 task_copyfn_copy_decl (tree var, copy_body_data *cb)
10122 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
10124 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
10125 return create_tmp_var (TREE_TYPE (var));
10127 return var;
10130 static tree
10131 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
10133 tree name, new_fields = NULL, type, f;
10135 type = lang_hooks.types.make_type (RECORD_TYPE);
10136 name = DECL_NAME (TYPE_NAME (orig_type));
10137 name = build_decl (gimple_location (tcctx->ctx->stmt),
10138 TYPE_DECL, name, type);
10139 TYPE_NAME (type) = name;
10141 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
10143 tree new_f = copy_node (f);
10144 DECL_CONTEXT (new_f) = type;
10145 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
10146 TREE_CHAIN (new_f) = new_fields;
10147 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10148 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10149 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
10150 &tcctx->cb, NULL);
10151 new_fields = new_f;
10152 tcctx->cb.decl_map->put (f, new_f);
10154 TYPE_FIELDS (type) = nreverse (new_fields);
10155 layout_type (type);
10156 return type;
10159 /* Create task copyfn. */
10161 static void
10162 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
10164 struct function *child_cfun;
10165 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
10166 tree record_type, srecord_type, bind, list;
10167 bool record_needs_remap = false, srecord_needs_remap = false;
10168 splay_tree_node n;
10169 struct omp_taskcopy_context tcctx;
10170 location_t loc = gimple_location (task_stmt);
10171 size_t looptempno = 0;
10173 child_fn = gimple_omp_task_copy_fn (task_stmt);
10174 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
10175 gcc_assert (child_cfun->cfg == NULL);
10176 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
10178 /* Reset DECL_CONTEXT on function arguments. */
10179 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
10180 DECL_CONTEXT (t) = child_fn;
10182 /* Populate the function. */
10183 push_gimplify_context ();
10184 push_cfun (child_cfun);
10186 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
10187 TREE_SIDE_EFFECTS (bind) = 1;
10188 list = NULL;
10189 DECL_SAVED_TREE (child_fn) = bind;
10190 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
10192 /* Remap src and dst argument types if needed. */
10193 record_type = ctx->record_type;
10194 srecord_type = ctx->srecord_type;
10195 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
10196 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10198 record_needs_remap = true;
10199 break;
10201 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
10202 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10204 srecord_needs_remap = true;
10205 break;
10208 if (record_needs_remap || srecord_needs_remap)
10210 memset (&tcctx, '\0', sizeof (tcctx));
10211 tcctx.cb.src_fn = ctx->cb.src_fn;
10212 tcctx.cb.dst_fn = child_fn;
10213 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
10214 gcc_checking_assert (tcctx.cb.src_node);
10215 tcctx.cb.dst_node = tcctx.cb.src_node;
10216 tcctx.cb.src_cfun = ctx->cb.src_cfun;
10217 tcctx.cb.copy_decl = task_copyfn_copy_decl;
10218 tcctx.cb.eh_lp_nr = 0;
10219 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
10220 tcctx.cb.decl_map = new hash_map<tree, tree>;
10221 tcctx.ctx = ctx;
10223 if (record_needs_remap)
10224 record_type = task_copyfn_remap_type (&tcctx, record_type);
10225 if (srecord_needs_remap)
10226 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
10228 else
10229 tcctx.cb.decl_map = NULL;
10231 arg = DECL_ARGUMENTS (child_fn);
10232 TREE_TYPE (arg) = build_pointer_type (record_type);
10233 sarg = DECL_CHAIN (arg);
10234 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
10236 /* First pass: initialize temporaries used in record_type and srecord_type
10237 sizes and field offsets. */
10238 if (tcctx.cb.decl_map)
10239 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10240 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10242 tree *p;
10244 decl = OMP_CLAUSE_DECL (c);
10245 p = tcctx.cb.decl_map->get (decl);
10246 if (p == NULL)
10247 continue;
10248 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10249 sf = (tree) n->value;
10250 sf = *tcctx.cb.decl_map->get (sf);
10251 src = build_simple_mem_ref_loc (loc, sarg);
10252 src = omp_build_component_ref (src, sf);
10253 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
10254 append_to_statement_list (t, &list);
10257 /* Second pass: copy shared var pointers and copy construct non-VLA
10258 firstprivate vars. */
10259 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10260 switch (OMP_CLAUSE_CODE (c))
10262 splay_tree_key key;
10263 case OMP_CLAUSE_SHARED:
10264 decl = OMP_CLAUSE_DECL (c);
10265 key = (splay_tree_key) decl;
10266 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
10267 key = (splay_tree_key) &DECL_UID (decl);
10268 n = splay_tree_lookup (ctx->field_map, key);
10269 if (n == NULL)
10270 break;
10271 f = (tree) n->value;
10272 if (tcctx.cb.decl_map)
10273 f = *tcctx.cb.decl_map->get (f);
10274 n = splay_tree_lookup (ctx->sfield_map, key);
10275 sf = (tree) n->value;
10276 if (tcctx.cb.decl_map)
10277 sf = *tcctx.cb.decl_map->get (sf);
10278 src = build_simple_mem_ref_loc (loc, sarg);
10279 src = omp_build_component_ref (src, sf);
10280 dst = build_simple_mem_ref_loc (loc, arg);
10281 dst = omp_build_component_ref (dst, f);
10282 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10283 append_to_statement_list (t, &list);
10284 break;
10285 case OMP_CLAUSE_REDUCTION:
10286 case OMP_CLAUSE_IN_REDUCTION:
10287 decl = OMP_CLAUSE_DECL (c);
10288 if (TREE_CODE (decl) == MEM_REF)
10290 decl = TREE_OPERAND (decl, 0);
10291 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
10292 decl = TREE_OPERAND (decl, 0);
10293 if (TREE_CODE (decl) == INDIRECT_REF
10294 || TREE_CODE (decl) == ADDR_EXPR)
10295 decl = TREE_OPERAND (decl, 0);
10297 key = (splay_tree_key) decl;
10298 n = splay_tree_lookup (ctx->field_map, key);
10299 if (n == NULL)
10300 break;
10301 f = (tree) n->value;
10302 if (tcctx.cb.decl_map)
10303 f = *tcctx.cb.decl_map->get (f);
10304 n = splay_tree_lookup (ctx->sfield_map, key);
10305 sf = (tree) n->value;
10306 if (tcctx.cb.decl_map)
10307 sf = *tcctx.cb.decl_map->get (sf);
10308 src = build_simple_mem_ref_loc (loc, sarg);
10309 src = omp_build_component_ref (src, sf);
10310 if (decl != OMP_CLAUSE_DECL (c)
10311 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10312 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
10313 src = build_simple_mem_ref_loc (loc, src);
10314 dst = build_simple_mem_ref_loc (loc, arg);
10315 dst = omp_build_component_ref (dst, f);
10316 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10317 append_to_statement_list (t, &list);
10318 break;
10319 case OMP_CLAUSE__LOOPTEMP_:
10320 /* Fields for first two _looptemp_ clauses are initialized by
10321 GOMP_taskloop*, the rest are handled like firstprivate. */
10322 if (looptempno < 2)
10324 looptempno++;
10325 break;
10327 /* FALLTHRU */
10328 case OMP_CLAUSE__REDUCTEMP_:
10329 case OMP_CLAUSE_FIRSTPRIVATE:
10330 decl = OMP_CLAUSE_DECL (c);
10331 if (is_variable_sized (decl))
10332 break;
10333 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10334 if (n == NULL)
10335 break;
10336 f = (tree) n->value;
10337 if (tcctx.cb.decl_map)
10338 f = *tcctx.cb.decl_map->get (f);
10339 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10340 if (n != NULL)
10342 sf = (tree) n->value;
10343 if (tcctx.cb.decl_map)
10344 sf = *tcctx.cb.decl_map->get (sf);
10345 src = build_simple_mem_ref_loc (loc, sarg);
10346 src = omp_build_component_ref (src, sf);
10347 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
10348 src = build_simple_mem_ref_loc (loc, src);
10350 else
10351 src = decl;
10352 dst = build_simple_mem_ref_loc (loc, arg);
10353 dst = omp_build_component_ref (dst, f);
10354 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
10355 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10356 else
10357 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
10358 append_to_statement_list (t, &list);
10359 break;
10360 case OMP_CLAUSE_PRIVATE:
10361 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
10362 break;
10363 decl = OMP_CLAUSE_DECL (c);
10364 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10365 f = (tree) n->value;
10366 if (tcctx.cb.decl_map)
10367 f = *tcctx.cb.decl_map->get (f);
10368 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10369 if (n != NULL)
10371 sf = (tree) n->value;
10372 if (tcctx.cb.decl_map)
10373 sf = *tcctx.cb.decl_map->get (sf);
10374 src = build_simple_mem_ref_loc (loc, sarg);
10375 src = omp_build_component_ref (src, sf);
10376 if (use_pointer_for_field (decl, NULL))
10377 src = build_simple_mem_ref_loc (loc, src);
10379 else
10380 src = decl;
10381 dst = build_simple_mem_ref_loc (loc, arg);
10382 dst = omp_build_component_ref (dst, f);
10383 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10384 append_to_statement_list (t, &list);
10385 break;
10386 default:
10387 break;
10390 /* Last pass: handle VLA firstprivates. */
10391 if (tcctx.cb.decl_map)
10392 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10393 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10395 tree ind, ptr, df;
10397 decl = OMP_CLAUSE_DECL (c);
10398 if (!is_variable_sized (decl))
10399 continue;
10400 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10401 if (n == NULL)
10402 continue;
10403 f = (tree) n->value;
10404 f = *tcctx.cb.decl_map->get (f);
10405 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
10406 ind = DECL_VALUE_EXPR (decl);
10407 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
10408 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
10409 n = splay_tree_lookup (ctx->sfield_map,
10410 (splay_tree_key) TREE_OPERAND (ind, 0));
10411 sf = (tree) n->value;
10412 sf = *tcctx.cb.decl_map->get (sf);
10413 src = build_simple_mem_ref_loc (loc, sarg);
10414 src = omp_build_component_ref (src, sf);
10415 src = build_simple_mem_ref_loc (loc, src);
10416 dst = build_simple_mem_ref_loc (loc, arg);
10417 dst = omp_build_component_ref (dst, f);
10418 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
10419 append_to_statement_list (t, &list);
10420 n = splay_tree_lookup (ctx->field_map,
10421 (splay_tree_key) TREE_OPERAND (ind, 0));
10422 df = (tree) n->value;
10423 df = *tcctx.cb.decl_map->get (df);
10424 ptr = build_simple_mem_ref_loc (loc, arg);
10425 ptr = omp_build_component_ref (ptr, df);
10426 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
10427 build_fold_addr_expr_loc (loc, dst));
10428 append_to_statement_list (t, &list);
10431 t = build1 (RETURN_EXPR, void_type_node, NULL);
10432 append_to_statement_list (t, &list);
10434 if (tcctx.cb.decl_map)
10435 delete tcctx.cb.decl_map;
10436 pop_gimplify_context (NULL);
10437 BIND_EXPR_BODY (bind) = list;
10438 pop_cfun ();
10441 static void
10442 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
10444 tree c, clauses;
10445 gimple *g;
10446 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
10448 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
10449 gcc_assert (clauses);
10450 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10451 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
10452 switch (OMP_CLAUSE_DEPEND_KIND (c))
10454 case OMP_CLAUSE_DEPEND_LAST:
10455 /* Lowering already done at gimplification. */
10456 return;
10457 case OMP_CLAUSE_DEPEND_IN:
10458 cnt[2]++;
10459 break;
10460 case OMP_CLAUSE_DEPEND_OUT:
10461 case OMP_CLAUSE_DEPEND_INOUT:
10462 cnt[0]++;
10463 break;
10464 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
10465 cnt[1]++;
10466 break;
10467 case OMP_CLAUSE_DEPEND_DEPOBJ:
10468 cnt[3]++;
10469 break;
10470 case OMP_CLAUSE_DEPEND_SOURCE:
10471 case OMP_CLAUSE_DEPEND_SINK:
10472 /* FALLTHRU */
10473 default:
10474 gcc_unreachable ();
10476 if (cnt[1] || cnt[3])
10477 idx = 5;
10478 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
10479 tree type = build_array_type_nelts (ptr_type_node, total + idx);
10480 tree array = create_tmp_var (type);
10481 TREE_ADDRESSABLE (array) = 1;
10482 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
10483 NULL_TREE);
10484 if (idx == 5)
10486 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
10487 gimple_seq_add_stmt (iseq, g);
10488 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
10489 NULL_TREE);
10491 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
10492 gimple_seq_add_stmt (iseq, g);
10493 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
10495 r = build4 (ARRAY_REF, ptr_type_node, array,
10496 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
10497 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
10498 gimple_seq_add_stmt (iseq, g);
10500 for (i = 0; i < 4; i++)
10502 if (cnt[i] == 0)
10503 continue;
10504 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10505 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
10506 continue;
10507 else
10509 switch (OMP_CLAUSE_DEPEND_KIND (c))
10511 case OMP_CLAUSE_DEPEND_IN:
10512 if (i != 2)
10513 continue;
10514 break;
10515 case OMP_CLAUSE_DEPEND_OUT:
10516 case OMP_CLAUSE_DEPEND_INOUT:
10517 if (i != 0)
10518 continue;
10519 break;
10520 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
10521 if (i != 1)
10522 continue;
10523 break;
10524 case OMP_CLAUSE_DEPEND_DEPOBJ:
10525 if (i != 3)
10526 continue;
10527 break;
10528 default:
10529 gcc_unreachable ();
10531 tree t = OMP_CLAUSE_DECL (c);
10532 t = fold_convert (ptr_type_node, t);
10533 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
10534 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
10535 NULL_TREE, NULL_TREE);
10536 g = gimple_build_assign (r, t);
10537 gimple_seq_add_stmt (iseq, g);
10540 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
10541 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
10542 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
10543 OMP_CLAUSE_CHAIN (c) = *pclauses;
10544 *pclauses = c;
10545 tree clobber = build_constructor (type, NULL);
10546 TREE_THIS_VOLATILE (clobber) = 1;
10547 g = gimple_build_assign (array, clobber);
10548 gimple_seq_add_stmt (oseq, g);
10551 /* Lower the OpenMP parallel or task directive in the current statement
10552 in GSI_P. CTX holds context information for the directive. */
10554 static void
10555 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10557 tree clauses;
10558 tree child_fn, t;
10559 gimple *stmt = gsi_stmt (*gsi_p);
10560 gbind *par_bind, *bind, *dep_bind = NULL;
10561 gimple_seq par_body;
10562 location_t loc = gimple_location (stmt);
10564 clauses = gimple_omp_taskreg_clauses (stmt);
10565 if (gimple_code (stmt) == GIMPLE_OMP_TASK
10566 && gimple_omp_task_taskwait_p (stmt))
10568 par_bind = NULL;
10569 par_body = NULL;
10571 else
10573 par_bind
10574 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
10575 par_body = gimple_bind_body (par_bind);
10577 child_fn = ctx->cb.dst_fn;
10578 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
10579 && !gimple_omp_parallel_combined_p (stmt))
10581 struct walk_stmt_info wi;
10582 int ws_num = 0;
10584 memset (&wi, 0, sizeof (wi));
10585 wi.info = &ws_num;
10586 wi.val_only = true;
10587 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
10588 if (ws_num == 1)
10589 gimple_omp_parallel_set_combined_p (stmt, true);
10591 gimple_seq dep_ilist = NULL;
10592 gimple_seq dep_olist = NULL;
10593 if (gimple_code (stmt) == GIMPLE_OMP_TASK
10594 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
10596 push_gimplify_context ();
10597 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
10598 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
10599 &dep_ilist, &dep_olist);
10602 if (gimple_code (stmt) == GIMPLE_OMP_TASK
10603 && gimple_omp_task_taskwait_p (stmt))
10605 if (dep_bind)
10607 gsi_replace (gsi_p, dep_bind, true);
10608 gimple_bind_add_seq (dep_bind, dep_ilist);
10609 gimple_bind_add_stmt (dep_bind, stmt);
10610 gimple_bind_add_seq (dep_bind, dep_olist);
10611 pop_gimplify_context (dep_bind);
10613 return;
10616 if (ctx->srecord_type)
10617 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
10619 gimple_seq tskred_ilist = NULL;
10620 gimple_seq tskred_olist = NULL;
10621 if ((is_task_ctx (ctx)
10622 && gimple_omp_task_taskloop_p (ctx->stmt)
10623 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
10624 OMP_CLAUSE_REDUCTION))
10625 || (is_parallel_ctx (ctx)
10626 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
10627 OMP_CLAUSE__REDUCTEMP_)))
10629 if (dep_bind == NULL)
10631 push_gimplify_context ();
10632 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
10634 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
10635 : OMP_PARALLEL,
10636 gimple_omp_taskreg_clauses (ctx->stmt),
10637 &tskred_ilist, &tskred_olist);
10640 push_gimplify_context ();
10642 gimple_seq par_olist = NULL;
10643 gimple_seq par_ilist = NULL;
10644 gimple_seq par_rlist = NULL;
10645 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
10646 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
10647 if (phony_construct && ctx->record_type)
10649 gcc_checking_assert (!ctx->receiver_decl);
10650 ctx->receiver_decl = create_tmp_var
10651 (build_reference_type (ctx->record_type), ".omp_rec");
10653 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
10654 lower_omp (&par_body, ctx);
10655 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
10656 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
10658 /* Declare all the variables created by mapping and the variables
10659 declared in the scope of the parallel body. */
10660 record_vars_into (ctx->block_vars, child_fn);
10661 maybe_remove_omp_member_access_dummy_vars (par_bind);
10662 record_vars_into (gimple_bind_vars (par_bind), child_fn);
10664 if (ctx->record_type)
10666 ctx->sender_decl
10667 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
10668 : ctx->record_type, ".omp_data_o");
10669 DECL_NAMELESS (ctx->sender_decl) = 1;
10670 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
10671 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
10674 gimple_seq olist = NULL;
10675 gimple_seq ilist = NULL;
10676 lower_send_clauses (clauses, &ilist, &olist, ctx);
10677 lower_send_shared_vars (&ilist, &olist, ctx);
10679 if (ctx->record_type)
10681 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
10682 TREE_THIS_VOLATILE (clobber) = 1;
10683 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
10684 clobber));
10687 /* Once all the expansions are done, sequence all the different
10688 fragments inside gimple_omp_body. */
10690 gimple_seq new_body = NULL;
10692 if (ctx->record_type)
10694 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
10695 /* fixup_child_record_type might have changed receiver_decl's type. */
10696 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
10697 gimple_seq_add_stmt (&new_body,
10698 gimple_build_assign (ctx->receiver_decl, t));
10701 gimple_seq_add_seq (&new_body, par_ilist);
10702 gimple_seq_add_seq (&new_body, par_body);
10703 gimple_seq_add_seq (&new_body, par_rlist);
10704 if (ctx->cancellable)
10705 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
10706 gimple_seq_add_seq (&new_body, par_olist);
10707 new_body = maybe_catch_exception (new_body);
10708 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
10709 gimple_seq_add_stmt (&new_body,
10710 gimple_build_omp_continue (integer_zero_node,
10711 integer_zero_node));
10712 if (!phony_construct)
10714 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
10715 gimple_omp_set_body (stmt, new_body);
10718 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
10719 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
10720 else
10721 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
10722 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
10723 gimple_bind_add_seq (bind, ilist);
10724 if (!phony_construct)
10725 gimple_bind_add_stmt (bind, stmt);
10726 else
10727 gimple_bind_add_seq (bind, new_body);
10728 gimple_bind_add_seq (bind, olist);
10730 pop_gimplify_context (NULL);
10732 if (dep_bind)
10734 gimple_bind_add_seq (dep_bind, dep_ilist);
10735 gimple_bind_add_seq (dep_bind, tskred_ilist);
10736 gimple_bind_add_stmt (dep_bind, bind);
10737 gimple_bind_add_seq (dep_bind, tskred_olist);
10738 gimple_bind_add_seq (dep_bind, dep_olist);
10739 pop_gimplify_context (dep_bind);
10743 /* Lower the GIMPLE_OMP_TARGET in the current statement
10744 in GSI_P. CTX holds context information for the directive. */
10746 static void
10747 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10749 tree clauses;
10750 tree child_fn, t, c;
10751 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
10752 gbind *tgt_bind, *bind, *dep_bind = NULL;
10753 gimple_seq tgt_body, olist, ilist, fplist, new_body;
10754 location_t loc = gimple_location (stmt);
10755 bool offloaded, data_region;
10756 unsigned int map_cnt = 0;
10758 offloaded = is_gimple_omp_offloaded (stmt);
10759 switch (gimple_omp_target_kind (stmt))
10761 case GF_OMP_TARGET_KIND_REGION:
10762 case GF_OMP_TARGET_KIND_UPDATE:
10763 case GF_OMP_TARGET_KIND_ENTER_DATA:
10764 case GF_OMP_TARGET_KIND_EXIT_DATA:
10765 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
10766 case GF_OMP_TARGET_KIND_OACC_KERNELS:
10767 case GF_OMP_TARGET_KIND_OACC_UPDATE:
10768 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
10769 case GF_OMP_TARGET_KIND_OACC_DECLARE:
10770 data_region = false;
10771 break;
10772 case GF_OMP_TARGET_KIND_DATA:
10773 case GF_OMP_TARGET_KIND_OACC_DATA:
10774 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
10775 data_region = true;
10776 break;
10777 default:
10778 gcc_unreachable ();
10781 clauses = gimple_omp_target_clauses (stmt);
10783 gimple_seq dep_ilist = NULL;
10784 gimple_seq dep_olist = NULL;
10785 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
10787 push_gimplify_context ();
10788 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
10789 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
10790 &dep_ilist, &dep_olist);
10793 tgt_bind = NULL;
10794 tgt_body = NULL;
10795 if (offloaded)
10797 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
10798 tgt_body = gimple_bind_body (tgt_bind);
10800 else if (data_region)
10801 tgt_body = gimple_omp_body (stmt);
10802 child_fn = ctx->cb.dst_fn;
10804 push_gimplify_context ();
10805 fplist = NULL;
10807 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
10808 switch (OMP_CLAUSE_CODE (c))
10810 tree var, x;
10812 default:
10813 break;
10814 case OMP_CLAUSE_MAP:
10815 #if CHECKING_P
10816 /* First check what we're prepared to handle in the following. */
10817 switch (OMP_CLAUSE_MAP_KIND (c))
10819 case GOMP_MAP_ALLOC:
10820 case GOMP_MAP_TO:
10821 case GOMP_MAP_FROM:
10822 case GOMP_MAP_TOFROM:
10823 case GOMP_MAP_POINTER:
10824 case GOMP_MAP_TO_PSET:
10825 case GOMP_MAP_DELETE:
10826 case GOMP_MAP_RELEASE:
10827 case GOMP_MAP_ALWAYS_TO:
10828 case GOMP_MAP_ALWAYS_FROM:
10829 case GOMP_MAP_ALWAYS_TOFROM:
10830 case GOMP_MAP_FIRSTPRIVATE_POINTER:
10831 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
10832 case GOMP_MAP_STRUCT:
10833 case GOMP_MAP_ALWAYS_POINTER:
10834 break;
10835 case GOMP_MAP_FORCE_ALLOC:
10836 case GOMP_MAP_FORCE_TO:
10837 case GOMP_MAP_FORCE_FROM:
10838 case GOMP_MAP_FORCE_TOFROM:
10839 case GOMP_MAP_FORCE_PRESENT:
10840 case GOMP_MAP_FORCE_DEVICEPTR:
10841 case GOMP_MAP_DEVICE_RESIDENT:
10842 case GOMP_MAP_LINK:
10843 gcc_assert (is_gimple_omp_oacc (stmt));
10844 break;
10845 default:
10846 gcc_unreachable ();
10848 #endif
10849 /* FALLTHRU */
10850 case OMP_CLAUSE_TO:
10851 case OMP_CLAUSE_FROM:
10852 oacc_firstprivate:
10853 var = OMP_CLAUSE_DECL (c);
10854 if (!DECL_P (var))
10856 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
10857 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
10858 && (OMP_CLAUSE_MAP_KIND (c)
10859 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
10860 map_cnt++;
10861 continue;
10864 if (DECL_SIZE (var)
10865 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
10867 tree var2 = DECL_VALUE_EXPR (var);
10868 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
10869 var2 = TREE_OPERAND (var2, 0);
10870 gcc_assert (DECL_P (var2));
10871 var = var2;
10874 if (offloaded
10875 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10876 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
10877 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10879 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
10881 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
10882 && varpool_node::get_create (var)->offloadable)
10883 continue;
10885 tree type = build_pointer_type (TREE_TYPE (var));
10886 tree new_var = lookup_decl (var, ctx);
10887 x = create_tmp_var_raw (type, get_name (new_var));
10888 gimple_add_tmp_var (x);
10889 x = build_simple_mem_ref (x);
10890 SET_DECL_VALUE_EXPR (new_var, x);
10891 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10893 continue;
10896 if (!maybe_lookup_field (var, ctx))
10897 continue;
10899 /* Don't remap oacc parallel reduction variables, because the
10900 intermediate result must be local to each gang. */
10901 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10902 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
10904 x = build_receiver_ref (var, true, ctx);
10905 tree new_var = lookup_decl (var, ctx);
10907 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10908 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
10909 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
10910 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
10911 x = build_simple_mem_ref (x);
10912 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10914 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
10915 if (omp_is_reference (new_var)
10916 && TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE)
10918 /* Create a local object to hold the instance
10919 value. */
10920 tree type = TREE_TYPE (TREE_TYPE (new_var));
10921 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
10922 tree inst = create_tmp_var (type, id);
10923 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
10924 x = build_fold_addr_expr (inst);
10926 gimplify_assign (new_var, x, &fplist);
10928 else if (DECL_P (new_var))
10930 SET_DECL_VALUE_EXPR (new_var, x);
10931 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10933 else
10934 gcc_unreachable ();
10936 map_cnt++;
10937 break;
10939 case OMP_CLAUSE_FIRSTPRIVATE:
10940 if (is_oacc_parallel (ctx))
10941 goto oacc_firstprivate;
10942 map_cnt++;
10943 var = OMP_CLAUSE_DECL (c);
10944 if (!omp_is_reference (var)
10945 && !is_gimple_reg_type (TREE_TYPE (var)))
10947 tree new_var = lookup_decl (var, ctx);
10948 if (is_variable_sized (var))
10950 tree pvar = DECL_VALUE_EXPR (var);
10951 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10952 pvar = TREE_OPERAND (pvar, 0);
10953 gcc_assert (DECL_P (pvar));
10954 tree new_pvar = lookup_decl (pvar, ctx);
10955 x = build_fold_indirect_ref (new_pvar);
10956 TREE_THIS_NOTRAP (x) = 1;
10958 else
10959 x = build_receiver_ref (var, true, ctx);
10960 SET_DECL_VALUE_EXPR (new_var, x);
10961 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10963 break;
10965 case OMP_CLAUSE_PRIVATE:
10966 if (is_gimple_omp_oacc (ctx->stmt))
10967 break;
10968 var = OMP_CLAUSE_DECL (c);
10969 if (is_variable_sized (var))
10971 tree new_var = lookup_decl (var, ctx);
10972 tree pvar = DECL_VALUE_EXPR (var);
10973 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10974 pvar = TREE_OPERAND (pvar, 0);
10975 gcc_assert (DECL_P (pvar));
10976 tree new_pvar = lookup_decl (pvar, ctx);
10977 x = build_fold_indirect_ref (new_pvar);
10978 TREE_THIS_NOTRAP (x) = 1;
10979 SET_DECL_VALUE_EXPR (new_var, x);
10980 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10982 break;
10984 case OMP_CLAUSE_USE_DEVICE_PTR:
10985 case OMP_CLAUSE_IS_DEVICE_PTR:
10986 var = OMP_CLAUSE_DECL (c);
10987 map_cnt++;
10988 if (is_variable_sized (var))
10990 tree new_var = lookup_decl (var, ctx);
10991 tree pvar = DECL_VALUE_EXPR (var);
10992 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10993 pvar = TREE_OPERAND (pvar, 0);
10994 gcc_assert (DECL_P (pvar));
10995 tree new_pvar = lookup_decl (pvar, ctx);
10996 x = build_fold_indirect_ref (new_pvar);
10997 TREE_THIS_NOTRAP (x) = 1;
10998 SET_DECL_VALUE_EXPR (new_var, x);
10999 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11001 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11003 tree new_var = lookup_decl (var, ctx);
11004 tree type = build_pointer_type (TREE_TYPE (var));
11005 x = create_tmp_var_raw (type, get_name (new_var));
11006 gimple_add_tmp_var (x);
11007 x = build_simple_mem_ref (x);
11008 SET_DECL_VALUE_EXPR (new_var, x);
11009 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11011 else
11013 tree new_var = lookup_decl (var, ctx);
11014 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
11015 gimple_add_tmp_var (x);
11016 SET_DECL_VALUE_EXPR (new_var, x);
11017 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11019 break;
11022 if (offloaded)
11024 target_nesting_level++;
11025 lower_omp (&tgt_body, ctx);
11026 target_nesting_level--;
11028 else if (data_region)
11029 lower_omp (&tgt_body, ctx);
11031 if (offloaded)
11033 /* Declare all the variables created by mapping and the variables
11034 declared in the scope of the target body. */
11035 record_vars_into (ctx->block_vars, child_fn);
11036 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
11037 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
11040 olist = NULL;
11041 ilist = NULL;
11042 if (ctx->record_type)
11044 ctx->sender_decl
11045 = create_tmp_var (ctx->record_type, ".omp_data_arr");
11046 DECL_NAMELESS (ctx->sender_decl) = 1;
11047 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11048 t = make_tree_vec (3);
11049 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
11050 TREE_VEC_ELT (t, 1)
11051 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
11052 ".omp_data_sizes");
11053 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
11054 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
11055 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
11056 tree tkind_type = short_unsigned_type_node;
11057 int talign_shift = 8;
11058 TREE_VEC_ELT (t, 2)
11059 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
11060 ".omp_data_kinds");
11061 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
11062 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
11063 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
11064 gimple_omp_target_set_data_arg (stmt, t);
11066 vec<constructor_elt, va_gc> *vsize;
11067 vec<constructor_elt, va_gc> *vkind;
11068 vec_alloc (vsize, map_cnt);
11069 vec_alloc (vkind, map_cnt);
11070 unsigned int map_idx = 0;
11072 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11073 switch (OMP_CLAUSE_CODE (c))
11075 tree ovar, nc, s, purpose, var, x, type;
11076 unsigned int talign;
11078 default:
11079 break;
11081 case OMP_CLAUSE_MAP:
11082 case OMP_CLAUSE_TO:
11083 case OMP_CLAUSE_FROM:
11084 oacc_firstprivate_map:
11085 nc = c;
11086 ovar = OMP_CLAUSE_DECL (c);
11087 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11088 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11089 || (OMP_CLAUSE_MAP_KIND (c)
11090 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
11091 break;
11092 if (!DECL_P (ovar))
11094 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11095 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
11097 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
11098 == get_base_address (ovar));
11099 nc = OMP_CLAUSE_CHAIN (c);
11100 ovar = OMP_CLAUSE_DECL (nc);
11102 else
11104 tree x = build_sender_ref (ovar, ctx);
11105 tree v
11106 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
11107 gimplify_assign (x, v, &ilist);
11108 nc = NULL_TREE;
11111 else
11113 if (DECL_SIZE (ovar)
11114 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
11116 tree ovar2 = DECL_VALUE_EXPR (ovar);
11117 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
11118 ovar2 = TREE_OPERAND (ovar2, 0);
11119 gcc_assert (DECL_P (ovar2));
11120 ovar = ovar2;
11122 if (!maybe_lookup_field (ovar, ctx))
11123 continue;
11126 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
11127 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
11128 talign = DECL_ALIGN_UNIT (ovar);
11129 if (nc)
11131 var = lookup_decl_in_outer_ctx (ovar, ctx);
11132 x = build_sender_ref (ovar, ctx);
11134 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11135 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11136 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11137 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
11139 gcc_assert (offloaded);
11140 tree avar
11141 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
11142 mark_addressable (avar);
11143 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
11144 talign = DECL_ALIGN_UNIT (avar);
11145 avar = build_fold_addr_expr (avar);
11146 gimplify_assign (x, avar, &ilist);
11148 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11150 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11151 if (!omp_is_reference (var))
11153 if (is_gimple_reg (var)
11154 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11155 TREE_NO_WARNING (var) = 1;
11156 var = build_fold_addr_expr (var);
11158 else
11159 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11160 gimplify_assign (x, var, &ilist);
11162 else if (is_gimple_reg (var))
11164 gcc_assert (offloaded);
11165 tree avar = create_tmp_var (TREE_TYPE (var));
11166 mark_addressable (avar);
11167 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
11168 if (GOMP_MAP_COPY_TO_P (map_kind)
11169 || map_kind == GOMP_MAP_POINTER
11170 || map_kind == GOMP_MAP_TO_PSET
11171 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11173 /* If we need to initialize a temporary
11174 with VAR because it is not addressable, and
11175 the variable hasn't been initialized yet, then
11176 we'll get a warning for the store to avar.
11177 Don't warn in that case, the mapping might
11178 be implicit. */
11179 TREE_NO_WARNING (var) = 1;
11180 gimplify_assign (avar, var, &ilist);
11182 avar = build_fold_addr_expr (avar);
11183 gimplify_assign (x, avar, &ilist);
11184 if ((GOMP_MAP_COPY_FROM_P (map_kind)
11185 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11186 && !TYPE_READONLY (TREE_TYPE (var)))
11188 x = unshare_expr (x);
11189 x = build_simple_mem_ref (x);
11190 gimplify_assign (var, x, &olist);
11193 else
11195 var = build_fold_addr_expr (var);
11196 gimplify_assign (x, var, &ilist);
11199 s = NULL_TREE;
11200 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11202 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11203 s = TREE_TYPE (ovar);
11204 if (TREE_CODE (s) == REFERENCE_TYPE)
11205 s = TREE_TYPE (s);
11206 s = TYPE_SIZE_UNIT (s);
11208 else
11209 s = OMP_CLAUSE_SIZE (c);
11210 if (s == NULL_TREE)
11211 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11212 s = fold_convert (size_type_node, s);
11213 purpose = size_int (map_idx++);
11214 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11215 if (TREE_CODE (s) != INTEGER_CST)
11216 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11218 unsigned HOST_WIDE_INT tkind, tkind_zero;
11219 switch (OMP_CLAUSE_CODE (c))
11221 case OMP_CLAUSE_MAP:
11222 tkind = OMP_CLAUSE_MAP_KIND (c);
11223 tkind_zero = tkind;
11224 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
11225 switch (tkind)
11227 case GOMP_MAP_ALLOC:
11228 case GOMP_MAP_TO:
11229 case GOMP_MAP_FROM:
11230 case GOMP_MAP_TOFROM:
11231 case GOMP_MAP_ALWAYS_TO:
11232 case GOMP_MAP_ALWAYS_FROM:
11233 case GOMP_MAP_ALWAYS_TOFROM:
11234 case GOMP_MAP_RELEASE:
11235 case GOMP_MAP_FORCE_TO:
11236 case GOMP_MAP_FORCE_FROM:
11237 case GOMP_MAP_FORCE_TOFROM:
11238 case GOMP_MAP_FORCE_PRESENT:
11239 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
11240 break;
11241 case GOMP_MAP_DELETE:
11242 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
11243 default:
11244 break;
11246 if (tkind_zero != tkind)
11248 if (integer_zerop (s))
11249 tkind = tkind_zero;
11250 else if (integer_nonzerop (s))
11251 tkind_zero = tkind;
11253 break;
11254 case OMP_CLAUSE_FIRSTPRIVATE:
11255 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11256 tkind = GOMP_MAP_TO;
11257 tkind_zero = tkind;
11258 break;
11259 case OMP_CLAUSE_TO:
11260 tkind = GOMP_MAP_TO;
11261 tkind_zero = tkind;
11262 break;
11263 case OMP_CLAUSE_FROM:
11264 tkind = GOMP_MAP_FROM;
11265 tkind_zero = tkind;
11266 break;
11267 default:
11268 gcc_unreachable ();
11270 gcc_checking_assert (tkind
11271 < (HOST_WIDE_INT_C (1U) << talign_shift));
11272 gcc_checking_assert (tkind_zero
11273 < (HOST_WIDE_INT_C (1U) << talign_shift));
11274 talign = ceil_log2 (talign);
11275 tkind |= talign << talign_shift;
11276 tkind_zero |= talign << talign_shift;
11277 gcc_checking_assert (tkind
11278 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11279 gcc_checking_assert (tkind_zero
11280 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11281 if (tkind == tkind_zero)
11282 x = build_int_cstu (tkind_type, tkind);
11283 else
11285 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
11286 x = build3 (COND_EXPR, tkind_type,
11287 fold_build2 (EQ_EXPR, boolean_type_node,
11288 unshare_expr (s), size_zero_node),
11289 build_int_cstu (tkind_type, tkind_zero),
11290 build_int_cstu (tkind_type, tkind));
11292 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
11293 if (nc && nc != c)
11294 c = nc;
11295 break;
11297 case OMP_CLAUSE_FIRSTPRIVATE:
11298 if (is_oacc_parallel (ctx))
11299 goto oacc_firstprivate_map;
11300 ovar = OMP_CLAUSE_DECL (c);
11301 if (omp_is_reference (ovar))
11302 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11303 else
11304 talign = DECL_ALIGN_UNIT (ovar);
11305 var = lookup_decl_in_outer_ctx (ovar, ctx);
11306 x = build_sender_ref (ovar, ctx);
11307 tkind = GOMP_MAP_FIRSTPRIVATE;
11308 type = TREE_TYPE (ovar);
11309 if (omp_is_reference (ovar))
11310 type = TREE_TYPE (type);
11311 if ((INTEGRAL_TYPE_P (type)
11312 && TYPE_PRECISION (type) <= POINTER_SIZE)
11313 || TREE_CODE (type) == POINTER_TYPE)
11315 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11316 tree t = var;
11317 if (omp_is_reference (var))
11318 t = build_simple_mem_ref (var);
11319 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11320 TREE_NO_WARNING (var) = 1;
11321 if (TREE_CODE (type) != POINTER_TYPE)
11322 t = fold_convert (pointer_sized_int_node, t);
11323 t = fold_convert (TREE_TYPE (x), t);
11324 gimplify_assign (x, t, &ilist);
11326 else if (omp_is_reference (var))
11327 gimplify_assign (x, var, &ilist);
11328 else if (is_gimple_reg (var))
11330 tree avar = create_tmp_var (TREE_TYPE (var));
11331 mark_addressable (avar);
11332 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11333 TREE_NO_WARNING (var) = 1;
11334 gimplify_assign (avar, var, &ilist);
11335 avar = build_fold_addr_expr (avar);
11336 gimplify_assign (x, avar, &ilist);
11338 else
11340 var = build_fold_addr_expr (var);
11341 gimplify_assign (x, var, &ilist);
11343 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
11344 s = size_int (0);
11345 else if (omp_is_reference (ovar))
11346 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11347 else
11348 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11349 s = fold_convert (size_type_node, s);
11350 purpose = size_int (map_idx++);
11351 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11352 if (TREE_CODE (s) != INTEGER_CST)
11353 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11355 gcc_checking_assert (tkind
11356 < (HOST_WIDE_INT_C (1U) << talign_shift));
11357 talign = ceil_log2 (talign);
11358 tkind |= talign << talign_shift;
11359 gcc_checking_assert (tkind
11360 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11361 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
11362 build_int_cstu (tkind_type, tkind));
11363 break;
11365 case OMP_CLAUSE_USE_DEVICE_PTR:
11366 case OMP_CLAUSE_IS_DEVICE_PTR:
11367 ovar = OMP_CLAUSE_DECL (c);
11368 var = lookup_decl_in_outer_ctx (ovar, ctx);
11369 x = build_sender_ref (ovar, ctx);
11370 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
11371 tkind = GOMP_MAP_USE_DEVICE_PTR;
11372 else
11373 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11374 type = TREE_TYPE (ovar);
11375 if (TREE_CODE (type) == ARRAY_TYPE)
11376 var = build_fold_addr_expr (var);
11377 else
11379 if (omp_is_reference (ovar))
11381 type = TREE_TYPE (type);
11382 if (TREE_CODE (type) != ARRAY_TYPE)
11383 var = build_simple_mem_ref (var);
11384 var = fold_convert (TREE_TYPE (x), var);
11387 gimplify_assign (x, var, &ilist);
11388 s = size_int (0);
11389 purpose = size_int (map_idx++);
11390 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11391 gcc_checking_assert (tkind
11392 < (HOST_WIDE_INT_C (1U) << talign_shift));
11393 gcc_checking_assert (tkind
11394 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11395 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
11396 build_int_cstu (tkind_type, tkind));
11397 break;
11400 gcc_assert (map_idx == map_cnt);
11402 DECL_INITIAL (TREE_VEC_ELT (t, 1))
11403 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
11404 DECL_INITIAL (TREE_VEC_ELT (t, 2))
11405 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
11406 for (int i = 1; i <= 2; i++)
11407 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
11409 gimple_seq initlist = NULL;
11410 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
11411 TREE_VEC_ELT (t, i)),
11412 &initlist, true, NULL_TREE);
11413 gimple_seq_add_seq (&ilist, initlist);
11415 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
11416 NULL);
11417 TREE_THIS_VOLATILE (clobber) = 1;
11418 gimple_seq_add_stmt (&olist,
11419 gimple_build_assign (TREE_VEC_ELT (t, i),
11420 clobber));
11423 tree clobber = build_constructor (ctx->record_type, NULL);
11424 TREE_THIS_VOLATILE (clobber) = 1;
11425 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11426 clobber));
11429 /* Once all the expansions are done, sequence all the different
11430 fragments inside gimple_omp_body. */
11432 new_body = NULL;
11434 if (offloaded
11435 && ctx->record_type)
11437 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11438 /* fixup_child_record_type might have changed receiver_decl's type. */
11439 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11440 gimple_seq_add_stmt (&new_body,
11441 gimple_build_assign (ctx->receiver_decl, t));
11443 gimple_seq_add_seq (&new_body, fplist);
11445 if (offloaded || data_region)
11447 tree prev = NULL_TREE;
11448 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11449 switch (OMP_CLAUSE_CODE (c))
11451 tree var, x;
11452 default:
11453 break;
11454 case OMP_CLAUSE_FIRSTPRIVATE:
11455 if (is_gimple_omp_oacc (ctx->stmt))
11456 break;
11457 var = OMP_CLAUSE_DECL (c);
11458 if (omp_is_reference (var)
11459 || is_gimple_reg_type (TREE_TYPE (var)))
11461 tree new_var = lookup_decl (var, ctx);
11462 tree type;
11463 type = TREE_TYPE (var);
11464 if (omp_is_reference (var))
11465 type = TREE_TYPE (type);
11466 if ((INTEGRAL_TYPE_P (type)
11467 && TYPE_PRECISION (type) <= POINTER_SIZE)
11468 || TREE_CODE (type) == POINTER_TYPE)
11470 x = build_receiver_ref (var, false, ctx);
11471 if (TREE_CODE (type) != POINTER_TYPE)
11472 x = fold_convert (pointer_sized_int_node, x);
11473 x = fold_convert (type, x);
11474 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
11475 fb_rvalue);
11476 if (omp_is_reference (var))
11478 tree v = create_tmp_var_raw (type, get_name (var));
11479 gimple_add_tmp_var (v);
11480 TREE_ADDRESSABLE (v) = 1;
11481 gimple_seq_add_stmt (&new_body,
11482 gimple_build_assign (v, x));
11483 x = build_fold_addr_expr (v);
11485 gimple_seq_add_stmt (&new_body,
11486 gimple_build_assign (new_var, x));
11488 else
11490 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
11491 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
11492 fb_rvalue);
11493 gimple_seq_add_stmt (&new_body,
11494 gimple_build_assign (new_var, x));
11497 else if (is_variable_sized (var))
11499 tree pvar = DECL_VALUE_EXPR (var);
11500 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11501 pvar = TREE_OPERAND (pvar, 0);
11502 gcc_assert (DECL_P (pvar));
11503 tree new_var = lookup_decl (pvar, ctx);
11504 x = build_receiver_ref (var, false, ctx);
11505 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11506 gimple_seq_add_stmt (&new_body,
11507 gimple_build_assign (new_var, x));
11509 break;
11510 case OMP_CLAUSE_PRIVATE:
11511 if (is_gimple_omp_oacc (ctx->stmt))
11512 break;
11513 var = OMP_CLAUSE_DECL (c);
11514 if (omp_is_reference (var))
11516 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
11517 tree new_var = lookup_decl (var, ctx);
11518 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
11519 if (TREE_CONSTANT (x))
11521 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
11522 get_name (var));
11523 gimple_add_tmp_var (x);
11524 TREE_ADDRESSABLE (x) = 1;
11525 x = build_fold_addr_expr_loc (clause_loc, x);
11527 else
11528 break;
11530 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
11531 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11532 gimple_seq_add_stmt (&new_body,
11533 gimple_build_assign (new_var, x));
11535 break;
11536 case OMP_CLAUSE_USE_DEVICE_PTR:
11537 case OMP_CLAUSE_IS_DEVICE_PTR:
11538 var = OMP_CLAUSE_DECL (c);
11539 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
11540 x = build_sender_ref (var, ctx);
11541 else
11542 x = build_receiver_ref (var, false, ctx);
11543 if (is_variable_sized (var))
11545 tree pvar = DECL_VALUE_EXPR (var);
11546 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11547 pvar = TREE_OPERAND (pvar, 0);
11548 gcc_assert (DECL_P (pvar));
11549 tree new_var = lookup_decl (pvar, ctx);
11550 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11551 gimple_seq_add_stmt (&new_body,
11552 gimple_build_assign (new_var, x));
11554 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11556 tree new_var = lookup_decl (var, ctx);
11557 new_var = DECL_VALUE_EXPR (new_var);
11558 gcc_assert (TREE_CODE (new_var) == MEM_REF);
11559 new_var = TREE_OPERAND (new_var, 0);
11560 gcc_assert (DECL_P (new_var));
11561 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11562 gimple_seq_add_stmt (&new_body,
11563 gimple_build_assign (new_var, x));
11565 else
11567 tree type = TREE_TYPE (var);
11568 tree new_var = lookup_decl (var, ctx);
11569 if (omp_is_reference (var))
11571 type = TREE_TYPE (type);
11572 if (TREE_CODE (type) != ARRAY_TYPE)
11574 tree v = create_tmp_var_raw (type, get_name (var));
11575 gimple_add_tmp_var (v);
11576 TREE_ADDRESSABLE (v) = 1;
11577 x = fold_convert (type, x);
11578 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
11579 fb_rvalue);
11580 gimple_seq_add_stmt (&new_body,
11581 gimple_build_assign (v, x));
11582 x = build_fold_addr_expr (v);
11585 new_var = DECL_VALUE_EXPR (new_var);
11586 x = fold_convert (TREE_TYPE (new_var), x);
11587 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11588 gimple_seq_add_stmt (&new_body,
11589 gimple_build_assign (new_var, x));
11591 break;
11593 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
11594 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
11595 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
11596 or references to VLAs. */
11597 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11598 switch (OMP_CLAUSE_CODE (c))
11600 tree var;
11601 default:
11602 break;
11603 case OMP_CLAUSE_MAP:
11604 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11605 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11607 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
11608 poly_int64 offset = 0;
11609 gcc_assert (prev);
11610 var = OMP_CLAUSE_DECL (c);
11611 if (DECL_P (var)
11612 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
11613 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
11614 ctx))
11615 && varpool_node::get_create (var)->offloadable)
11616 break;
11617 if (TREE_CODE (var) == INDIRECT_REF
11618 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
11619 var = TREE_OPERAND (var, 0);
11620 if (TREE_CODE (var) == COMPONENT_REF)
11622 var = get_addr_base_and_unit_offset (var, &offset);
11623 gcc_assert (var != NULL_TREE && DECL_P (var));
11625 else if (DECL_SIZE (var)
11626 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
11628 tree var2 = DECL_VALUE_EXPR (var);
11629 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
11630 var2 = TREE_OPERAND (var2, 0);
11631 gcc_assert (DECL_P (var2));
11632 var = var2;
11634 tree new_var = lookup_decl (var, ctx), x;
11635 tree type = TREE_TYPE (new_var);
11636 bool is_ref;
11637 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
11638 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
11639 == COMPONENT_REF))
11641 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
11642 is_ref = true;
11643 new_var = build2 (MEM_REF, type,
11644 build_fold_addr_expr (new_var),
11645 build_int_cst (build_pointer_type (type),
11646 offset));
11648 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
11650 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
11651 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
11652 new_var = build2 (MEM_REF, type,
11653 build_fold_addr_expr (new_var),
11654 build_int_cst (build_pointer_type (type),
11655 offset));
11657 else
11658 is_ref = omp_is_reference (var);
11659 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11660 is_ref = false;
11661 bool ref_to_array = false;
11662 if (is_ref)
11664 type = TREE_TYPE (type);
11665 if (TREE_CODE (type) == ARRAY_TYPE)
11667 type = build_pointer_type (type);
11668 ref_to_array = true;
11671 else if (TREE_CODE (type) == ARRAY_TYPE)
11673 tree decl2 = DECL_VALUE_EXPR (new_var);
11674 gcc_assert (TREE_CODE (decl2) == MEM_REF);
11675 decl2 = TREE_OPERAND (decl2, 0);
11676 gcc_assert (DECL_P (decl2));
11677 new_var = decl2;
11678 type = TREE_TYPE (new_var);
11680 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
11681 x = fold_convert_loc (clause_loc, type, x);
11682 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
11684 tree bias = OMP_CLAUSE_SIZE (c);
11685 if (DECL_P (bias))
11686 bias = lookup_decl (bias, ctx);
11687 bias = fold_convert_loc (clause_loc, sizetype, bias);
11688 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
11689 bias);
11690 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
11691 TREE_TYPE (x), x, bias);
11693 if (ref_to_array)
11694 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
11695 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11696 if (is_ref && !ref_to_array)
11698 tree t = create_tmp_var_raw (type, get_name (var));
11699 gimple_add_tmp_var (t);
11700 TREE_ADDRESSABLE (t) = 1;
11701 gimple_seq_add_stmt (&new_body,
11702 gimple_build_assign (t, x));
11703 x = build_fold_addr_expr_loc (clause_loc, t);
11705 gimple_seq_add_stmt (&new_body,
11706 gimple_build_assign (new_var, x));
11707 prev = NULL_TREE;
11709 else if (OMP_CLAUSE_CHAIN (c)
11710 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
11711 == OMP_CLAUSE_MAP
11712 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
11713 == GOMP_MAP_FIRSTPRIVATE_POINTER
11714 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
11715 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
11716 prev = c;
11717 break;
11718 case OMP_CLAUSE_PRIVATE:
11719 var = OMP_CLAUSE_DECL (c);
11720 if (is_variable_sized (var))
11722 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
11723 tree new_var = lookup_decl (var, ctx);
11724 tree pvar = DECL_VALUE_EXPR (var);
11725 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11726 pvar = TREE_OPERAND (pvar, 0);
11727 gcc_assert (DECL_P (pvar));
11728 tree new_pvar = lookup_decl (pvar, ctx);
11729 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
11730 tree al = size_int (DECL_ALIGN (var));
11731 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
11732 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
11733 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
11734 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11735 gimple_seq_add_stmt (&new_body,
11736 gimple_build_assign (new_pvar, x));
11738 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
11740 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
11741 tree new_var = lookup_decl (var, ctx);
11742 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
11743 if (TREE_CONSTANT (x))
11744 break;
11745 else
11747 tree atmp
11748 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
11749 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
11750 tree al = size_int (TYPE_ALIGN (rtype));
11751 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
11754 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
11755 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11756 gimple_seq_add_stmt (&new_body,
11757 gimple_build_assign (new_var, x));
11759 break;
11762 gimple_seq fork_seq = NULL;
11763 gimple_seq join_seq = NULL;
11765 if (is_oacc_parallel (ctx))
11767 /* If there are reductions on the offloaded region itself, treat
11768 them as a dummy GANG loop. */
11769 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
11771 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
11772 false, NULL, NULL, &fork_seq, &join_seq, ctx);
11775 gimple_seq_add_seq (&new_body, fork_seq);
11776 gimple_seq_add_seq (&new_body, tgt_body);
11777 gimple_seq_add_seq (&new_body, join_seq);
11779 if (offloaded)
11780 new_body = maybe_catch_exception (new_body);
11782 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
11783 gimple_omp_set_body (stmt, new_body);
11786 bind = gimple_build_bind (NULL, NULL,
11787 tgt_bind ? gimple_bind_block (tgt_bind)
11788 : NULL_TREE);
11789 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
11790 gimple_bind_add_seq (bind, ilist);
11791 gimple_bind_add_stmt (bind, stmt);
11792 gimple_bind_add_seq (bind, olist);
11794 pop_gimplify_context (NULL);
11796 if (dep_bind)
11798 gimple_bind_add_seq (dep_bind, dep_ilist);
11799 gimple_bind_add_stmt (dep_bind, bind);
11800 gimple_bind_add_seq (dep_bind, dep_olist);
11801 pop_gimplify_context (dep_bind);
11805 /* Expand code for an OpenMP teams directive. */
11807 static void
11808 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11810 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
11811 push_gimplify_context ();
11813 tree block = make_node (BLOCK);
11814 gbind *bind = gimple_build_bind (NULL, NULL, block);
11815 gsi_replace (gsi_p, bind, true);
11816 gimple_seq bind_body = NULL;
11817 gimple_seq dlist = NULL;
11818 gimple_seq olist = NULL;
11820 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
11821 OMP_CLAUSE_NUM_TEAMS);
11822 if (num_teams == NULL_TREE)
11823 num_teams = build_int_cst (unsigned_type_node, 0);
11824 else
11826 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
11827 num_teams = fold_convert (unsigned_type_node, num_teams);
11828 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
11830 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
11831 OMP_CLAUSE_THREAD_LIMIT);
11832 if (thread_limit == NULL_TREE)
11833 thread_limit = build_int_cst (unsigned_type_node, 0);
11834 else
11836 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
11837 thread_limit = fold_convert (unsigned_type_node, thread_limit);
11838 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
11839 fb_rvalue);
11842 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
11843 &bind_body, &dlist, ctx, NULL);
11844 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
11845 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
11846 NULL, ctx);
11847 if (!gimple_omp_teams_grid_phony (teams_stmt))
11849 gimple_seq_add_stmt (&bind_body, teams_stmt);
11850 location_t loc = gimple_location (teams_stmt);
11851 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
11852 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
11853 gimple_set_location (call, loc);
11854 gimple_seq_add_stmt (&bind_body, call);
11857 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
11858 gimple_omp_set_body (teams_stmt, NULL);
11859 gimple_seq_add_seq (&bind_body, olist);
11860 gimple_seq_add_seq (&bind_body, dlist);
11861 if (!gimple_omp_teams_grid_phony (teams_stmt))
11862 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
11863 gimple_bind_set_body (bind, bind_body);
11865 pop_gimplify_context (bind);
11867 gimple_bind_append_vars (bind, ctx->block_vars);
11868 BLOCK_VARS (block) = ctx->block_vars;
11869 if (BLOCK_VARS (block))
11870 TREE_USED (block) = 1;
11873 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
11875 static void
11876 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11878 gimple *stmt = gsi_stmt (*gsi_p);
11879 lower_omp (gimple_omp_body_ptr (stmt), ctx);
11880 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
11881 gimple_build_omp_return (false));
11885 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
11886 regimplified. If DATA is non-NULL, lower_omp_1 is outside
11887 of OMP context, but with task_shared_vars set. */
11889 static tree
11890 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
11891 void *data)
11893 tree t = *tp;
11895 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
11896 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
11897 return t;
11899 if (task_shared_vars
11900 && DECL_P (t)
11901 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
11902 return t;
11904 /* If a global variable has been privatized, TREE_CONSTANT on
11905 ADDR_EXPR might be wrong. */
11906 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
11907 recompute_tree_invariant_for_addr_expr (t);
11909 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
11910 return NULL_TREE;
11913 /* Data to be communicated between lower_omp_regimplify_operands and
11914 lower_omp_regimplify_operands_p. */
11916 struct lower_omp_regimplify_operands_data
11918 omp_context *ctx;
11919 vec<tree> *decls;
11922 /* Helper function for lower_omp_regimplify_operands. Find
11923 omp_member_access_dummy_var vars and adjust temporarily their
11924 DECL_VALUE_EXPRs if needed. */
11926 static tree
11927 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
11928 void *data)
11930 tree t = omp_member_access_dummy_var (*tp);
11931 if (t)
11933 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
11934 lower_omp_regimplify_operands_data *ldata
11935 = (lower_omp_regimplify_operands_data *) wi->info;
11936 tree o = maybe_lookup_decl (t, ldata->ctx);
11937 if (o != t)
11939 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
11940 ldata->decls->safe_push (*tp);
11941 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
11942 SET_DECL_VALUE_EXPR (*tp, v);
11945 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
11946 return NULL_TREE;
11949 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
11950 of omp_member_access_dummy_var vars during regimplification. */
11952 static void
11953 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
11954 gimple_stmt_iterator *gsi_p)
11956 auto_vec<tree, 10> decls;
11957 if (ctx)
11959 struct walk_stmt_info wi;
11960 memset (&wi, '\0', sizeof (wi));
11961 struct lower_omp_regimplify_operands_data data;
11962 data.ctx = ctx;
11963 data.decls = &decls;
11964 wi.info = &data;
11965 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
11967 gimple_regimplify_operands (stmt, gsi_p);
11968 while (!decls.is_empty ())
11970 tree t = decls.pop ();
11971 tree v = decls.pop ();
11972 SET_DECL_VALUE_EXPR (t, v);
11976 static void
11977 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11979 gimple *stmt = gsi_stmt (*gsi_p);
11980 struct walk_stmt_info wi;
11981 gcall *call_stmt;
11983 if (gimple_has_location (stmt))
11984 input_location = gimple_location (stmt);
11986 if (task_shared_vars)
11987 memset (&wi, '\0', sizeof (wi));
11989 /* If we have issued syntax errors, avoid doing any heavy lifting.
11990 Just replace the OMP directives with a NOP to avoid
11991 confusing RTL expansion. */
11992 if (seen_error () && is_gimple_omp (stmt))
11994 gsi_replace (gsi_p, gimple_build_nop (), true);
11995 return;
11998 switch (gimple_code (stmt))
12000 case GIMPLE_COND:
12002 gcond *cond_stmt = as_a <gcond *> (stmt);
12003 if ((ctx || task_shared_vars)
12004 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
12005 lower_omp_regimplify_p,
12006 ctx ? NULL : &wi, NULL)
12007 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
12008 lower_omp_regimplify_p,
12009 ctx ? NULL : &wi, NULL)))
12010 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
12012 break;
12013 case GIMPLE_CATCH:
12014 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
12015 break;
12016 case GIMPLE_EH_FILTER:
12017 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
12018 break;
12019 case GIMPLE_TRY:
12020 lower_omp (gimple_try_eval_ptr (stmt), ctx);
12021 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
12022 break;
12023 case GIMPLE_TRANSACTION:
12024 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
12025 ctx);
12026 break;
12027 case GIMPLE_BIND:
12028 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
12029 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
12030 break;
12031 case GIMPLE_OMP_PARALLEL:
12032 case GIMPLE_OMP_TASK:
12033 ctx = maybe_lookup_ctx (stmt);
12034 gcc_assert (ctx);
12035 if (ctx->cancellable)
12036 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12037 lower_omp_taskreg (gsi_p, ctx);
12038 break;
12039 case GIMPLE_OMP_FOR:
12040 ctx = maybe_lookup_ctx (stmt);
12041 gcc_assert (ctx);
12042 if (ctx->cancellable)
12043 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12044 lower_omp_for (gsi_p, ctx);
12045 break;
12046 case GIMPLE_OMP_SECTIONS:
12047 ctx = maybe_lookup_ctx (stmt);
12048 gcc_assert (ctx);
12049 if (ctx->cancellable)
12050 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12051 lower_omp_sections (gsi_p, ctx);
12052 break;
12053 case GIMPLE_OMP_SINGLE:
12054 ctx = maybe_lookup_ctx (stmt);
12055 gcc_assert (ctx);
12056 lower_omp_single (gsi_p, ctx);
12057 break;
12058 case GIMPLE_OMP_MASTER:
12059 ctx = maybe_lookup_ctx (stmt);
12060 gcc_assert (ctx);
12061 lower_omp_master (gsi_p, ctx);
12062 break;
12063 case GIMPLE_OMP_TASKGROUP:
12064 ctx = maybe_lookup_ctx (stmt);
12065 gcc_assert (ctx);
12066 lower_omp_taskgroup (gsi_p, ctx);
12067 break;
12068 case GIMPLE_OMP_ORDERED:
12069 ctx = maybe_lookup_ctx (stmt);
12070 gcc_assert (ctx);
12071 lower_omp_ordered (gsi_p, ctx);
12072 break;
12073 case GIMPLE_OMP_SCAN:
12074 ctx = maybe_lookup_ctx (stmt);
12075 gcc_assert (ctx);
12076 lower_omp_scan (gsi_p, ctx);
12077 break;
12078 case GIMPLE_OMP_CRITICAL:
12079 ctx = maybe_lookup_ctx (stmt);
12080 gcc_assert (ctx);
12081 lower_omp_critical (gsi_p, ctx);
12082 break;
12083 case GIMPLE_OMP_ATOMIC_LOAD:
12084 if ((ctx || task_shared_vars)
12085 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
12086 as_a <gomp_atomic_load *> (stmt)),
12087 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
12088 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12089 break;
12090 case GIMPLE_OMP_TARGET:
12091 ctx = maybe_lookup_ctx (stmt);
12092 gcc_assert (ctx);
12093 lower_omp_target (gsi_p, ctx);
12094 break;
12095 case GIMPLE_OMP_TEAMS:
12096 ctx = maybe_lookup_ctx (stmt);
12097 gcc_assert (ctx);
12098 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
12099 lower_omp_taskreg (gsi_p, ctx);
12100 else
12101 lower_omp_teams (gsi_p, ctx);
12102 break;
12103 case GIMPLE_OMP_GRID_BODY:
12104 ctx = maybe_lookup_ctx (stmt);
12105 gcc_assert (ctx);
12106 lower_omp_grid_body (gsi_p, ctx);
12107 break;
12108 case GIMPLE_CALL:
12109 tree fndecl;
12110 call_stmt = as_a <gcall *> (stmt);
12111 fndecl = gimple_call_fndecl (call_stmt);
12112 if (fndecl
12113 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
12114 switch (DECL_FUNCTION_CODE (fndecl))
12116 case BUILT_IN_GOMP_BARRIER:
12117 if (ctx == NULL)
12118 break;
12119 /* FALLTHRU */
12120 case BUILT_IN_GOMP_CANCEL:
12121 case BUILT_IN_GOMP_CANCELLATION_POINT:
12122 omp_context *cctx;
12123 cctx = ctx;
12124 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
12125 cctx = cctx->outer;
12126 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
12127 if (!cctx->cancellable)
12129 if (DECL_FUNCTION_CODE (fndecl)
12130 == BUILT_IN_GOMP_CANCELLATION_POINT)
12132 stmt = gimple_build_nop ();
12133 gsi_replace (gsi_p, stmt, false);
12135 break;
12137 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
12139 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
12140 gimple_call_set_fndecl (call_stmt, fndecl);
12141 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
12143 tree lhs;
12144 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
12145 gimple_call_set_lhs (call_stmt, lhs);
12146 tree fallthru_label;
12147 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
12148 gimple *g;
12149 g = gimple_build_label (fallthru_label);
12150 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12151 g = gimple_build_cond (NE_EXPR, lhs,
12152 fold_convert (TREE_TYPE (lhs),
12153 boolean_false_node),
12154 cctx->cancel_label, fallthru_label);
12155 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12156 break;
12157 default:
12158 break;
12160 goto regimplify;
12162 case GIMPLE_ASSIGN:
12163 for (omp_context *up = ctx; up; up = up->outer)
12165 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
12166 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
12167 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
12168 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
12169 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
12170 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
12171 && (gimple_omp_target_kind (up->stmt)
12172 == GF_OMP_TARGET_KIND_DATA)))
12173 continue;
12174 else if (!up->lastprivate_conditional_map)
12175 break;
12176 tree lhs = get_base_address (gimple_assign_lhs (stmt));
12177 if (TREE_CODE (lhs) == MEM_REF
12178 && DECL_P (TREE_OPERAND (lhs, 0))
12179 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
12180 0))) == REFERENCE_TYPE)
12181 lhs = TREE_OPERAND (lhs, 0);
12182 if (DECL_P (lhs))
12183 if (tree *v = up->lastprivate_conditional_map->get (lhs))
12185 tree clauses;
12186 if (up->combined_into_simd_safelen0)
12187 up = up->outer;
12188 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
12189 clauses = gimple_omp_for_clauses (up->stmt);
12190 else
12191 clauses = gimple_omp_sections_clauses (up->stmt);
12192 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
12193 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
12194 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
12195 OMP_CLAUSE__CONDTEMP_);
12196 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
12197 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
12198 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12201 /* FALLTHRU */
12203 default:
12204 regimplify:
12205 if ((ctx || task_shared_vars)
12206 && walk_gimple_op (stmt, lower_omp_regimplify_p,
12207 ctx ? NULL : &wi))
12209 /* Just remove clobbers, this should happen only if we have
12210 "privatized" local addressable variables in SIMD regions,
12211 the clobber isn't needed in that case and gimplifying address
12212 of the ARRAY_REF into a pointer and creating MEM_REF based
12213 clobber would create worse code than we get with the clobber
12214 dropped. */
12215 if (gimple_clobber_p (stmt))
12217 gsi_replace (gsi_p, gimple_build_nop (), true);
12218 break;
12220 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12222 break;
12226 static void
12227 lower_omp (gimple_seq *body, omp_context *ctx)
12229 location_t saved_location = input_location;
12230 gimple_stmt_iterator gsi;
12231 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
12232 lower_omp_1 (&gsi, ctx);
12233 /* During gimplification, we haven't folded statments inside offloading
12234 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
12235 if (target_nesting_level || taskreg_nesting_level)
12236 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
12237 fold_stmt (&gsi);
12238 input_location = saved_location;
12241 /* Main entry point. */
12243 static unsigned int
12244 execute_lower_omp (void)
12246 gimple_seq body;
12247 int i;
12248 omp_context *ctx;
12250 /* This pass always runs, to provide PROP_gimple_lomp.
12251 But often, there is nothing to do. */
12252 if (flag_openacc == 0 && flag_openmp == 0
12253 && flag_openmp_simd == 0)
12254 return 0;
12256 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
12257 delete_omp_context);
12259 body = gimple_body (current_function_decl);
12261 if (hsa_gen_requested_p ())
12262 omp_grid_gridify_all_targets (&body);
12264 scan_omp (&body, NULL);
12265 gcc_assert (taskreg_nesting_level == 0);
12266 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
12267 finish_taskreg_scan (ctx);
12268 taskreg_contexts.release ();
12270 if (all_contexts->root)
12272 if (task_shared_vars)
12273 push_gimplify_context ();
12274 lower_omp (&body, NULL);
12275 if (task_shared_vars)
12276 pop_gimplify_context (NULL);
12279 if (all_contexts)
12281 splay_tree_delete (all_contexts);
12282 all_contexts = NULL;
12284 BITMAP_FREE (task_shared_vars);
12286 /* If current function is a method, remove artificial dummy VAR_DECL created
12287 for non-static data member privatization, they aren't needed for
12288 debuginfo nor anything else, have been already replaced everywhere in the
12289 IL and cause problems with LTO. */
12290 if (DECL_ARGUMENTS (current_function_decl)
12291 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
12292 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
12293 == POINTER_TYPE))
12294 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
12295 return 0;
12298 namespace {
12300 const pass_data pass_data_lower_omp =
12302 GIMPLE_PASS, /* type */
12303 "omplower", /* name */
12304 OPTGROUP_OMP, /* optinfo_flags */
12305 TV_NONE, /* tv_id */
12306 PROP_gimple_any, /* properties_required */
12307 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
12308 0, /* properties_destroyed */
12309 0, /* todo_flags_start */
12310 0, /* todo_flags_finish */
12313 class pass_lower_omp : public gimple_opt_pass
12315 public:
12316 pass_lower_omp (gcc::context *ctxt)
12317 : gimple_opt_pass (pass_data_lower_omp, ctxt)
12320 /* opt_pass methods: */
12321 virtual unsigned int execute (function *) { return execute_lower_omp (); }
12323 }; // class pass_lower_omp
12325 } // anon namespace
12327 gimple_opt_pass *
12328 make_pass_lower_omp (gcc::context *ctxt)
12330 return new pass_lower_omp (ctxt);
12333 /* The following is a utility to diagnose structured block violations.
12334 It is not part of the "omplower" pass, as that's invoked too late. It
12335 should be invoked by the respective front ends after gimplification. */
12337 static splay_tree all_labels;
12339 /* Check for mismatched contexts and generate an error if needed. Return
12340 true if an error is detected. */
12342 static bool
12343 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
12344 gimple *branch_ctx, gimple *label_ctx)
12346 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
12347 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
12349 if (label_ctx == branch_ctx)
12350 return false;
12352 const char* kind = NULL;
12354 if (flag_openacc)
12356 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
12357 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
12359 gcc_checking_assert (kind == NULL);
12360 kind = "OpenACC";
12363 if (kind == NULL)
12365 gcc_checking_assert (flag_openmp || flag_openmp_simd);
12366 kind = "OpenMP";
12369 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
12370 so we could traverse it and issue a correct "exit" or "enter" error
12371 message upon a structured block violation.
12373 We built the context by building a list with tree_cons'ing, but there is
12374 no easy counterpart in gimple tuples. It seems like far too much work
12375 for issuing exit/enter error messages. If someone really misses the
12376 distinct error message... patches welcome. */
12378 #if 0
12379 /* Try to avoid confusing the user by producing and error message
12380 with correct "exit" or "enter" verbiage. We prefer "exit"
12381 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
12382 if (branch_ctx == NULL)
12383 exit_p = false;
12384 else
12386 while (label_ctx)
12388 if (TREE_VALUE (label_ctx) == branch_ctx)
12390 exit_p = false;
12391 break;
12393 label_ctx = TREE_CHAIN (label_ctx);
12397 if (exit_p)
12398 error ("invalid exit from %s structured block", kind);
12399 else
12400 error ("invalid entry to %s structured block", kind);
12401 #endif
12403 /* If it's obvious we have an invalid entry, be specific about the error. */
12404 if (branch_ctx == NULL)
12405 error ("invalid entry to %s structured block", kind);
12406 else
12408 /* Otherwise, be vague and lazy, but efficient. */
12409 error ("invalid branch to/from %s structured block", kind);
12412 gsi_replace (gsi_p, gimple_build_nop (), false);
12413 return true;
12416 /* Pass 1: Create a minimal tree of structured blocks, and record
12417 where each label is found. */
12419 static tree
12420 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
12421 struct walk_stmt_info *wi)
12423 gimple *context = (gimple *) wi->info;
12424 gimple *inner_context;
12425 gimple *stmt = gsi_stmt (*gsi_p);
12427 *handled_ops_p = true;
12429 switch (gimple_code (stmt))
12431 WALK_SUBSTMTS;
12433 case GIMPLE_OMP_PARALLEL:
12434 case GIMPLE_OMP_TASK:
12435 case GIMPLE_OMP_SECTIONS:
12436 case GIMPLE_OMP_SINGLE:
12437 case GIMPLE_OMP_SECTION:
12438 case GIMPLE_OMP_MASTER:
12439 case GIMPLE_OMP_ORDERED:
12440 case GIMPLE_OMP_SCAN:
12441 case GIMPLE_OMP_CRITICAL:
12442 case GIMPLE_OMP_TARGET:
12443 case GIMPLE_OMP_TEAMS:
12444 case GIMPLE_OMP_TASKGROUP:
12445 /* The minimal context here is just the current OMP construct. */
12446 inner_context = stmt;
12447 wi->info = inner_context;
12448 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
12449 wi->info = context;
12450 break;
12452 case GIMPLE_OMP_FOR:
12453 inner_context = stmt;
12454 wi->info = inner_context;
12455 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
12456 walk them. */
12457 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
12458 diagnose_sb_1, NULL, wi);
12459 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
12460 wi->info = context;
12461 break;
12463 case GIMPLE_LABEL:
12464 splay_tree_insert (all_labels,
12465 (splay_tree_key) gimple_label_label (
12466 as_a <glabel *> (stmt)),
12467 (splay_tree_value) context);
12468 break;
12470 default:
12471 break;
12474 return NULL_TREE;
12477 /* Pass 2: Check each branch and see if its context differs from that of
12478 the destination label's context. */
12480 static tree
12481 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
12482 struct walk_stmt_info *wi)
12484 gimple *context = (gimple *) wi->info;
12485 splay_tree_node n;
12486 gimple *stmt = gsi_stmt (*gsi_p);
12488 *handled_ops_p = true;
12490 switch (gimple_code (stmt))
12492 WALK_SUBSTMTS;
12494 case GIMPLE_OMP_PARALLEL:
12495 case GIMPLE_OMP_TASK:
12496 case GIMPLE_OMP_SECTIONS:
12497 case GIMPLE_OMP_SINGLE:
12498 case GIMPLE_OMP_SECTION:
12499 case GIMPLE_OMP_MASTER:
12500 case GIMPLE_OMP_ORDERED:
12501 case GIMPLE_OMP_SCAN:
12502 case GIMPLE_OMP_CRITICAL:
12503 case GIMPLE_OMP_TARGET:
12504 case GIMPLE_OMP_TEAMS:
12505 case GIMPLE_OMP_TASKGROUP:
12506 wi->info = stmt;
12507 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
12508 wi->info = context;
12509 break;
12511 case GIMPLE_OMP_FOR:
12512 wi->info = stmt;
12513 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
12514 walk them. */
12515 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
12516 diagnose_sb_2, NULL, wi);
12517 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
12518 wi->info = context;
12519 break;
12521 case GIMPLE_COND:
12523 gcond *cond_stmt = as_a <gcond *> (stmt);
12524 tree lab = gimple_cond_true_label (cond_stmt);
12525 if (lab)
12527 n = splay_tree_lookup (all_labels,
12528 (splay_tree_key) lab);
12529 diagnose_sb_0 (gsi_p, context,
12530 n ? (gimple *) n->value : NULL);
12532 lab = gimple_cond_false_label (cond_stmt);
12533 if (lab)
12535 n = splay_tree_lookup (all_labels,
12536 (splay_tree_key) lab);
12537 diagnose_sb_0 (gsi_p, context,
12538 n ? (gimple *) n->value : NULL);
12541 break;
12543 case GIMPLE_GOTO:
12545 tree lab = gimple_goto_dest (stmt);
12546 if (TREE_CODE (lab) != LABEL_DECL)
12547 break;
12549 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
12550 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
12552 break;
12554 case GIMPLE_SWITCH:
12556 gswitch *switch_stmt = as_a <gswitch *> (stmt);
12557 unsigned int i;
12558 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
12560 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
12561 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
12562 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
12563 break;
12566 break;
12568 case GIMPLE_RETURN:
12569 diagnose_sb_0 (gsi_p, context, NULL);
12570 break;
12572 default:
12573 break;
12576 return NULL_TREE;
12579 static unsigned int
12580 diagnose_omp_structured_block_errors (void)
12582 struct walk_stmt_info wi;
12583 gimple_seq body = gimple_body (current_function_decl);
12585 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
12587 memset (&wi, 0, sizeof (wi));
12588 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
12590 memset (&wi, 0, sizeof (wi));
12591 wi.want_locations = true;
12592 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
12594 gimple_set_body (current_function_decl, body);
12596 splay_tree_delete (all_labels);
12597 all_labels = NULL;
12599 return 0;
12602 namespace {
12604 const pass_data pass_data_diagnose_omp_blocks =
12606 GIMPLE_PASS, /* type */
12607 "*diagnose_omp_blocks", /* name */
12608 OPTGROUP_OMP, /* optinfo_flags */
12609 TV_NONE, /* tv_id */
12610 PROP_gimple_any, /* properties_required */
12611 0, /* properties_provided */
12612 0, /* properties_destroyed */
12613 0, /* todo_flags_start */
12614 0, /* todo_flags_finish */
12617 class pass_diagnose_omp_blocks : public gimple_opt_pass
12619 public:
12620 pass_diagnose_omp_blocks (gcc::context *ctxt)
12621 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
12624 /* opt_pass methods: */
12625 virtual bool gate (function *)
12627 return flag_openacc || flag_openmp || flag_openmp_simd;
12629 virtual unsigned int execute (function *)
12631 return diagnose_omp_structured_block_errors ();
12634 }; // class pass_diagnose_omp_blocks
12636 } // anon namespace
12638 gimple_opt_pass *
12639 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
12641 return new pass_diagnose_omp_blocks (ctxt);
12645 #include "gt-omp-low.h"