altivec.md (altivec_mov<mode>, [...]): Change the RTL attribute "length" from "4...
[official-gcc.git] / gcc / omp-low.c
blob98a9df583bccfc4128d2650bfed2453190853f23
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2019 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
130 /* Nesting depth of this context. Used to beautify error messages re
131 invalid gotos. The outermost ctx is depth 1, with depth 0 being
132 reserved for the main body of the function. */
133 int depth;
135 /* True if this parallel directive is nested within another. */
136 bool is_nested;
138 /* True if this construct can be cancelled. */
139 bool cancellable;
141 /* True if lower_omp_1 should look up lastprivate conditional in parent
142 context. */
143 bool combined_into_simd_safelen0;
145 /* True if there is nested scan context with inclusive clause. */
146 bool scan_inclusive;
148 /* True if there is nested scan context with exclusive clause. */
149 bool scan_exclusive;
152 static splay_tree all_contexts;
153 static int taskreg_nesting_level;
154 static int target_nesting_level;
155 static bitmap task_shared_vars;
156 static vec<omp_context *> taskreg_contexts;
158 static void scan_omp (gimple_seq *, omp_context *);
159 static tree scan_omp_1_op (tree *, int *, void *);
161 #define WALK_SUBSTMTS \
162 case GIMPLE_BIND: \
163 case GIMPLE_TRY: \
164 case GIMPLE_CATCH: \
165 case GIMPLE_EH_FILTER: \
166 case GIMPLE_TRANSACTION: \
167 /* The sub-statements for these should be walked. */ \
168 *handled_ops_p = false; \
169 break;
171 /* Return true if CTX corresponds to an oacc parallel region. */
173 static bool
174 is_oacc_parallel (omp_context *ctx)
176 enum gimple_code outer_type = gimple_code (ctx->stmt);
177 return ((outer_type == GIMPLE_OMP_TARGET)
178 && (gimple_omp_target_kind (ctx->stmt)
179 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
182 /* Return true if CTX corresponds to an oacc kernels region. */
184 static bool
185 is_oacc_kernels (omp_context *ctx)
187 enum gimple_code outer_type = gimple_code (ctx->stmt);
188 return ((outer_type == GIMPLE_OMP_TARGET)
189 && (gimple_omp_target_kind (ctx->stmt)
190 == GF_OMP_TARGET_KIND_OACC_KERNELS));
193 /* If DECL is the artificial dummy VAR_DECL created for non-static
194 data member privatization, return the underlying "this" parameter,
195 otherwise return NULL. */
197 tree
198 omp_member_access_dummy_var (tree decl)
200 if (!VAR_P (decl)
201 || !DECL_ARTIFICIAL (decl)
202 || !DECL_IGNORED_P (decl)
203 || !DECL_HAS_VALUE_EXPR_P (decl)
204 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
205 return NULL_TREE;
207 tree v = DECL_VALUE_EXPR (decl);
208 if (TREE_CODE (v) != COMPONENT_REF)
209 return NULL_TREE;
211 while (1)
212 switch (TREE_CODE (v))
214 case COMPONENT_REF:
215 case MEM_REF:
216 case INDIRECT_REF:
217 CASE_CONVERT:
218 case POINTER_PLUS_EXPR:
219 v = TREE_OPERAND (v, 0);
220 continue;
221 case PARM_DECL:
222 if (DECL_CONTEXT (v) == current_function_decl
223 && DECL_ARTIFICIAL (v)
224 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
225 return v;
226 return NULL_TREE;
227 default:
228 return NULL_TREE;
232 /* Helper for unshare_and_remap, called through walk_tree. */
234 static tree
235 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
237 tree *pair = (tree *) data;
238 if (*tp == pair[0])
240 *tp = unshare_expr (pair[1]);
241 *walk_subtrees = 0;
243 else if (IS_TYPE_OR_DECL_P (*tp))
244 *walk_subtrees = 0;
245 return NULL_TREE;
248 /* Return unshare_expr (X) with all occurrences of FROM
249 replaced with TO. */
251 static tree
252 unshare_and_remap (tree x, tree from, tree to)
254 tree pair[2] = { from, to };
255 x = unshare_expr (x);
256 walk_tree (&x, unshare_and_remap_1, pair, NULL);
257 return x;
260 /* Convenience function for calling scan_omp_1_op on tree operands. */
262 static inline tree
263 scan_omp_op (tree *tp, omp_context *ctx)
265 struct walk_stmt_info wi;
267 memset (&wi, 0, sizeof (wi));
268 wi.info = ctx;
269 wi.want_locations = true;
271 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
274 static void lower_omp (gimple_seq *, omp_context *);
275 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
276 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
278 /* Return true if CTX is for an omp parallel. */
280 static inline bool
281 is_parallel_ctx (omp_context *ctx)
283 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
287 /* Return true if CTX is for an omp task. */
289 static inline bool
290 is_task_ctx (omp_context *ctx)
292 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
296 /* Return true if CTX is for an omp taskloop. */
298 static inline bool
299 is_taskloop_ctx (omp_context *ctx)
301 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
302 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
306 /* Return true if CTX is for a host omp teams. */
308 static inline bool
309 is_host_teams_ctx (omp_context *ctx)
311 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
312 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
315 /* Return true if CTX is for an omp parallel or omp task or host omp teams
316 (the last one is strictly not a task region in OpenMP speak, but we
317 need to treat it similarly). */
319 static inline bool
320 is_taskreg_ctx (omp_context *ctx)
322 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
325 /* Return true if EXPR is variable sized. */
327 static inline bool
328 is_variable_sized (const_tree expr)
330 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
333 /* Lookup variables. The "maybe" form
334 allows for the variable form to not have been entered, otherwise we
335 assert that the variable must have been entered. */
337 static inline tree
338 lookup_decl (tree var, omp_context *ctx)
340 tree *n = ctx->cb.decl_map->get (var);
341 return *n;
344 static inline tree
345 maybe_lookup_decl (const_tree var, omp_context *ctx)
347 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
348 return n ? *n : NULL_TREE;
351 static inline tree
352 lookup_field (tree var, omp_context *ctx)
354 splay_tree_node n;
355 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
356 return (tree) n->value;
359 static inline tree
360 lookup_sfield (splay_tree_key key, omp_context *ctx)
362 splay_tree_node n;
363 n = splay_tree_lookup (ctx->sfield_map
364 ? ctx->sfield_map : ctx->field_map, key);
365 return (tree) n->value;
368 static inline tree
369 lookup_sfield (tree var, omp_context *ctx)
371 return lookup_sfield ((splay_tree_key) var, ctx);
374 static inline tree
375 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
377 splay_tree_node n;
378 n = splay_tree_lookup (ctx->field_map, key);
379 return n ? (tree) n->value : NULL_TREE;
382 static inline tree
383 maybe_lookup_field (tree var, omp_context *ctx)
385 return maybe_lookup_field ((splay_tree_key) var, ctx);
388 /* Return true if DECL should be copied by pointer. SHARED_CTX is
389 the parallel context if DECL is to be shared. */
391 static bool
392 use_pointer_for_field (tree decl, omp_context *shared_ctx)
394 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
395 || TYPE_ATOMIC (TREE_TYPE (decl)))
396 return true;
398 /* We can only use copy-in/copy-out semantics for shared variables
399 when we know the value is not accessible from an outer scope. */
400 if (shared_ctx)
402 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
404 /* ??? Trivially accessible from anywhere. But why would we even
405 be passing an address in this case? Should we simply assert
406 this to be false, or should we have a cleanup pass that removes
407 these from the list of mappings? */
408 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
409 return true;
411 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
412 without analyzing the expression whether or not its location
413 is accessible to anyone else. In the case of nested parallel
414 regions it certainly may be. */
415 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
416 return true;
418 /* Do not use copy-in/copy-out for variables that have their
419 address taken. */
420 if (TREE_ADDRESSABLE (decl))
421 return true;
423 /* lower_send_shared_vars only uses copy-in, but not copy-out
424 for these. */
425 if (TREE_READONLY (decl)
426 || ((TREE_CODE (decl) == RESULT_DECL
427 || TREE_CODE (decl) == PARM_DECL)
428 && DECL_BY_REFERENCE (decl)))
429 return false;
431 /* Disallow copy-in/out in nested parallel if
432 decl is shared in outer parallel, otherwise
433 each thread could store the shared variable
434 in its own copy-in location, making the
435 variable no longer really shared. */
436 if (shared_ctx->is_nested)
438 omp_context *up;
440 for (up = shared_ctx->outer; up; up = up->outer)
441 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
442 break;
444 if (up)
446 tree c;
448 for (c = gimple_omp_taskreg_clauses (up->stmt);
449 c; c = OMP_CLAUSE_CHAIN (c))
450 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
451 && OMP_CLAUSE_DECL (c) == decl)
452 break;
454 if (c)
455 goto maybe_mark_addressable_and_ret;
459 /* For tasks avoid using copy-in/out. As tasks can be
460 deferred or executed in different thread, when GOMP_task
461 returns, the task hasn't necessarily terminated. */
462 if (is_task_ctx (shared_ctx))
464 tree outer;
465 maybe_mark_addressable_and_ret:
466 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
467 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
469 /* Taking address of OUTER in lower_send_shared_vars
470 might need regimplification of everything that uses the
471 variable. */
472 if (!task_shared_vars)
473 task_shared_vars = BITMAP_ALLOC (NULL);
474 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
475 TREE_ADDRESSABLE (outer) = 1;
477 return true;
481 return false;
484 /* Construct a new automatic decl similar to VAR. */
486 static tree
487 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
489 tree copy = copy_var_decl (var, name, type);
491 DECL_CONTEXT (copy) = current_function_decl;
492 DECL_CHAIN (copy) = ctx->block_vars;
493 /* If VAR is listed in task_shared_vars, it means it wasn't
494 originally addressable and is just because task needs to take
495 it's address. But we don't need to take address of privatizations
496 from that var. */
497 if (TREE_ADDRESSABLE (var)
498 && task_shared_vars
499 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
500 TREE_ADDRESSABLE (copy) = 0;
501 ctx->block_vars = copy;
503 return copy;
506 static tree
507 omp_copy_decl_1 (tree var, omp_context *ctx)
509 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
512 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
513 as appropriate. */
514 static tree
515 omp_build_component_ref (tree obj, tree field)
517 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
518 if (TREE_THIS_VOLATILE (field))
519 TREE_THIS_VOLATILE (ret) |= 1;
520 if (TREE_READONLY (field))
521 TREE_READONLY (ret) |= 1;
522 return ret;
525 /* Build tree nodes to access the field for VAR on the receiver side. */
527 static tree
528 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
530 tree x, field = lookup_field (var, ctx);
532 /* If the receiver record type was remapped in the child function,
533 remap the field into the new record type. */
534 x = maybe_lookup_field (field, ctx);
535 if (x != NULL)
536 field = x;
538 x = build_simple_mem_ref (ctx->receiver_decl);
539 TREE_THIS_NOTRAP (x) = 1;
540 x = omp_build_component_ref (x, field);
541 if (by_ref)
543 x = build_simple_mem_ref (x);
544 TREE_THIS_NOTRAP (x) = 1;
547 return x;
550 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
551 of a parallel, this is a component reference; for workshare constructs
552 this is some variable. */
554 static tree
555 build_outer_var_ref (tree var, omp_context *ctx,
556 enum omp_clause_code code = OMP_CLAUSE_ERROR)
558 tree x;
559 omp_context *outer = ctx->outer;
560 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
561 outer = outer->outer;
563 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
564 x = var;
565 else if (is_variable_sized (var))
567 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
568 x = build_outer_var_ref (x, ctx, code);
569 x = build_simple_mem_ref (x);
571 else if (is_taskreg_ctx (ctx))
573 bool by_ref = use_pointer_for_field (var, NULL);
574 x = build_receiver_ref (var, by_ref, ctx);
576 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
577 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
578 || (code == OMP_CLAUSE_PRIVATE
579 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
580 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
581 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
583 /* #pragma omp simd isn't a worksharing construct, and can reference
584 even private vars in its linear etc. clauses.
585 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
586 to private vars in all worksharing constructs. */
587 x = NULL_TREE;
588 if (outer && is_taskreg_ctx (outer))
589 x = lookup_decl (var, outer);
590 else if (outer)
591 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
592 if (x == NULL_TREE)
593 x = var;
595 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
597 gcc_assert (outer);
598 splay_tree_node n
599 = splay_tree_lookup (outer->field_map,
600 (splay_tree_key) &DECL_UID (var));
601 if (n == NULL)
603 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
604 x = var;
605 else
606 x = lookup_decl (var, outer);
608 else
610 tree field = (tree) n->value;
611 /* If the receiver record type was remapped in the child function,
612 remap the field into the new record type. */
613 x = maybe_lookup_field (field, outer);
614 if (x != NULL)
615 field = x;
617 x = build_simple_mem_ref (outer->receiver_decl);
618 x = omp_build_component_ref (x, field);
619 if (use_pointer_for_field (var, outer))
620 x = build_simple_mem_ref (x);
623 else if (outer)
625 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
627 outer = outer->outer;
628 gcc_assert (outer
629 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
631 x = lookup_decl (var, outer);
633 else if (omp_is_reference (var))
634 /* This can happen with orphaned constructs. If var is reference, it is
635 possible it is shared and as such valid. */
636 x = var;
637 else if (omp_member_access_dummy_var (var))
638 x = var;
639 else
640 gcc_unreachable ();
642 if (x == var)
644 tree t = omp_member_access_dummy_var (var);
645 if (t)
647 x = DECL_VALUE_EXPR (var);
648 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
649 if (o != t)
650 x = unshare_and_remap (x, t, o);
651 else
652 x = unshare_expr (x);
656 if (omp_is_reference (var))
657 x = build_simple_mem_ref (x);
659 return x;
662 /* Build tree nodes to access the field for VAR on the sender side. */
664 static tree
665 build_sender_ref (splay_tree_key key, omp_context *ctx)
667 tree field = lookup_sfield (key, ctx);
668 return omp_build_component_ref (ctx->sender_decl, field);
671 static tree
672 build_sender_ref (tree var, omp_context *ctx)
674 return build_sender_ref ((splay_tree_key) var, ctx);
677 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
678 BASE_POINTERS_RESTRICT, declare the field with restrict. */
680 static void
681 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
683 tree field, type, sfield = NULL_TREE;
684 splay_tree_key key = (splay_tree_key) var;
686 if ((mask & 8) != 0)
688 key = (splay_tree_key) &DECL_UID (var);
689 gcc_checking_assert (key != (splay_tree_key) var);
691 gcc_assert ((mask & 1) == 0
692 || !splay_tree_lookup (ctx->field_map, key));
693 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
694 || !splay_tree_lookup (ctx->sfield_map, key));
695 gcc_assert ((mask & 3) == 3
696 || !is_gimple_omp_oacc (ctx->stmt));
698 type = TREE_TYPE (var);
699 /* Prevent redeclaring the var in the split-off function with a restrict
700 pointer type. Note that we only clear type itself, restrict qualifiers in
701 the pointed-to type will be ignored by points-to analysis. */
702 if (POINTER_TYPE_P (type)
703 && TYPE_RESTRICT (type))
704 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
706 if (mask & 4)
708 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
709 type = build_pointer_type (build_pointer_type (type));
711 else if (by_ref)
712 type = build_pointer_type (type);
713 else if ((mask & 3) == 1 && omp_is_reference (var))
714 type = TREE_TYPE (type);
716 field = build_decl (DECL_SOURCE_LOCATION (var),
717 FIELD_DECL, DECL_NAME (var), type);
719 /* Remember what variable this field was created for. This does have a
720 side effect of making dwarf2out ignore this member, so for helpful
721 debugging we clear it later in delete_omp_context. */
722 DECL_ABSTRACT_ORIGIN (field) = var;
723 if (type == TREE_TYPE (var))
725 SET_DECL_ALIGN (field, DECL_ALIGN (var));
726 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
727 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
729 else
730 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
732 if ((mask & 3) == 3)
734 insert_field_into_struct (ctx->record_type, field);
735 if (ctx->srecord_type)
737 sfield = build_decl (DECL_SOURCE_LOCATION (var),
738 FIELD_DECL, DECL_NAME (var), type);
739 DECL_ABSTRACT_ORIGIN (sfield) = var;
740 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
741 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
742 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
743 insert_field_into_struct (ctx->srecord_type, sfield);
746 else
748 if (ctx->srecord_type == NULL_TREE)
750 tree t;
752 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
753 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
754 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
756 sfield = build_decl (DECL_SOURCE_LOCATION (t),
757 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
758 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
759 insert_field_into_struct (ctx->srecord_type, sfield);
760 splay_tree_insert (ctx->sfield_map,
761 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
762 (splay_tree_value) sfield);
765 sfield = field;
766 insert_field_into_struct ((mask & 1) ? ctx->record_type
767 : ctx->srecord_type, field);
770 if (mask & 1)
771 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
772 if ((mask & 2) && ctx->sfield_map)
773 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
776 static tree
777 install_var_local (tree var, omp_context *ctx)
779 tree new_var = omp_copy_decl_1 (var, ctx);
780 insert_decl_map (&ctx->cb, var, new_var);
781 return new_var;
784 /* Adjust the replacement for DECL in CTX for the new context. This means
785 copying the DECL_VALUE_EXPR, and fixing up the type. */
787 static void
788 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
790 tree new_decl, size;
792 new_decl = lookup_decl (decl, ctx);
794 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
796 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
797 && DECL_HAS_VALUE_EXPR_P (decl))
799 tree ve = DECL_VALUE_EXPR (decl);
800 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
801 SET_DECL_VALUE_EXPR (new_decl, ve);
802 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
805 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
807 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
808 if (size == error_mark_node)
809 size = TYPE_SIZE (TREE_TYPE (new_decl));
810 DECL_SIZE (new_decl) = size;
812 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
813 if (size == error_mark_node)
814 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
815 DECL_SIZE_UNIT (new_decl) = size;
819 /* The callback for remap_decl. Search all containing contexts for a
820 mapping of the variable; this avoids having to duplicate the splay
821 tree ahead of time. We know a mapping doesn't already exist in the
822 given context. Create new mappings to implement default semantics. */
824 static tree
825 omp_copy_decl (tree var, copy_body_data *cb)
827 omp_context *ctx = (omp_context *) cb;
828 tree new_var;
830 if (TREE_CODE (var) == LABEL_DECL)
832 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
833 return var;
834 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
835 DECL_CONTEXT (new_var) = current_function_decl;
836 insert_decl_map (&ctx->cb, var, new_var);
837 return new_var;
840 while (!is_taskreg_ctx (ctx))
842 ctx = ctx->outer;
843 if (ctx == NULL)
844 return var;
845 new_var = maybe_lookup_decl (var, ctx);
846 if (new_var)
847 return new_var;
850 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
851 return var;
853 return error_mark_node;
856 /* Create a new context, with OUTER_CTX being the surrounding context. */
858 static omp_context *
859 new_omp_context (gimple *stmt, omp_context *outer_ctx)
861 omp_context *ctx = XCNEW (omp_context);
863 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
864 (splay_tree_value) ctx);
865 ctx->stmt = stmt;
867 if (outer_ctx)
869 ctx->outer = outer_ctx;
870 ctx->cb = outer_ctx->cb;
871 ctx->cb.block = NULL;
872 ctx->depth = outer_ctx->depth + 1;
874 else
876 ctx->cb.src_fn = current_function_decl;
877 ctx->cb.dst_fn = current_function_decl;
878 ctx->cb.src_node = cgraph_node::get (current_function_decl);
879 gcc_checking_assert (ctx->cb.src_node);
880 ctx->cb.dst_node = ctx->cb.src_node;
881 ctx->cb.src_cfun = cfun;
882 ctx->cb.copy_decl = omp_copy_decl;
883 ctx->cb.eh_lp_nr = 0;
884 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
885 ctx->cb.adjust_array_error_bounds = true;
886 ctx->cb.dont_remap_vla_if_no_change = true;
887 ctx->depth = 1;
890 ctx->cb.decl_map = new hash_map<tree, tree>;
892 return ctx;
895 static gimple_seq maybe_catch_exception (gimple_seq);
897 /* Finalize task copyfn. */
899 static void
900 finalize_task_copyfn (gomp_task *task_stmt)
902 struct function *child_cfun;
903 tree child_fn;
904 gimple_seq seq = NULL, new_seq;
905 gbind *bind;
907 child_fn = gimple_omp_task_copy_fn (task_stmt);
908 if (child_fn == NULL_TREE)
909 return;
911 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
912 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
914 push_cfun (child_cfun);
915 bind = gimplify_body (child_fn, false);
916 gimple_seq_add_stmt (&seq, bind);
917 new_seq = maybe_catch_exception (seq);
918 if (new_seq != seq)
920 bind = gimple_build_bind (NULL, new_seq, NULL);
921 seq = NULL;
922 gimple_seq_add_stmt (&seq, bind);
924 gimple_set_body (child_fn, seq);
925 pop_cfun ();
927 /* Inform the callgraph about the new function. */
928 cgraph_node *node = cgraph_node::get_create (child_fn);
929 node->parallelized_function = 1;
930 cgraph_node::add_new_function (child_fn, false);
933 /* Destroy a omp_context data structures. Called through the splay tree
934 value delete callback. */
936 static void
937 delete_omp_context (splay_tree_value value)
939 omp_context *ctx = (omp_context *) value;
941 delete ctx->cb.decl_map;
943 if (ctx->field_map)
944 splay_tree_delete (ctx->field_map);
945 if (ctx->sfield_map)
946 splay_tree_delete (ctx->sfield_map);
948 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
949 it produces corrupt debug information. */
950 if (ctx->record_type)
952 tree t;
953 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
954 DECL_ABSTRACT_ORIGIN (t) = NULL;
956 if (ctx->srecord_type)
958 tree t;
959 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
960 DECL_ABSTRACT_ORIGIN (t) = NULL;
963 if (is_task_ctx (ctx))
964 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
966 if (ctx->task_reduction_map)
968 ctx->task_reductions.release ();
969 delete ctx->task_reduction_map;
972 delete ctx->lastprivate_conditional_map;
974 XDELETE (ctx);
977 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
978 context. */
980 static void
981 fixup_child_record_type (omp_context *ctx)
983 tree f, type = ctx->record_type;
985 if (!ctx->receiver_decl)
986 return;
987 /* ??? It isn't sufficient to just call remap_type here, because
988 variably_modified_type_p doesn't work the way we expect for
989 record types. Testing each field for whether it needs remapping
990 and creating a new record by hand works, however. */
991 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
992 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
993 break;
994 if (f)
996 tree name, new_fields = NULL;
998 type = lang_hooks.types.make_type (RECORD_TYPE);
999 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1000 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1001 TYPE_DECL, name, type);
1002 TYPE_NAME (type) = name;
1004 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1006 tree new_f = copy_node (f);
1007 DECL_CONTEXT (new_f) = type;
1008 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1009 DECL_CHAIN (new_f) = new_fields;
1010 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1011 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1012 &ctx->cb, NULL);
1013 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1014 &ctx->cb, NULL);
1015 new_fields = new_f;
1017 /* Arrange to be able to look up the receiver field
1018 given the sender field. */
1019 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1020 (splay_tree_value) new_f);
1022 TYPE_FIELDS (type) = nreverse (new_fields);
1023 layout_type (type);
1026 /* In a target region we never modify any of the pointers in *.omp_data_i,
1027 so attempt to help the optimizers. */
1028 if (is_gimple_omp_offloaded (ctx->stmt))
1029 type = build_qualified_type (type, TYPE_QUAL_CONST);
1031 TREE_TYPE (ctx->receiver_decl)
1032 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1035 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1036 specified by CLAUSES. */
1038 static void
1039 scan_sharing_clauses (tree clauses, omp_context *ctx)
1041 tree c, decl;
1042 bool scan_array_reductions = false;
1044 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1046 bool by_ref;
1048 switch (OMP_CLAUSE_CODE (c))
1050 case OMP_CLAUSE_PRIVATE:
1051 decl = OMP_CLAUSE_DECL (c);
1052 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1053 goto do_private;
1054 else if (!is_variable_sized (decl))
1055 install_var_local (decl, ctx);
1056 break;
1058 case OMP_CLAUSE_SHARED:
1059 decl = OMP_CLAUSE_DECL (c);
1060 /* Ignore shared directives in teams construct inside of
1061 target construct. */
1062 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1063 && !is_host_teams_ctx (ctx))
1065 /* Global variables don't need to be copied,
1066 the receiver side will use them directly. */
1067 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1068 if (is_global_var (odecl))
1069 break;
1070 insert_decl_map (&ctx->cb, decl, odecl);
1071 break;
1073 gcc_assert (is_taskreg_ctx (ctx));
1074 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1075 || !is_variable_sized (decl));
1076 /* Global variables don't need to be copied,
1077 the receiver side will use them directly. */
1078 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1079 break;
1080 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1082 use_pointer_for_field (decl, ctx);
1083 break;
1085 by_ref = use_pointer_for_field (decl, NULL);
1086 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1087 || TREE_ADDRESSABLE (decl)
1088 || by_ref
1089 || omp_is_reference (decl))
1091 by_ref = use_pointer_for_field (decl, ctx);
1092 install_var_field (decl, by_ref, 3, ctx);
1093 install_var_local (decl, ctx);
1094 break;
1096 /* We don't need to copy const scalar vars back. */
1097 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1098 goto do_private;
1100 case OMP_CLAUSE_REDUCTION:
1101 case OMP_CLAUSE_IN_REDUCTION:
1102 decl = OMP_CLAUSE_DECL (c);
1103 if (TREE_CODE (decl) == MEM_REF)
1105 tree t = TREE_OPERAND (decl, 0);
1106 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1107 t = TREE_OPERAND (t, 0);
1108 if (TREE_CODE (t) == INDIRECT_REF
1109 || TREE_CODE (t) == ADDR_EXPR)
1110 t = TREE_OPERAND (t, 0);
1111 install_var_local (t, ctx);
1112 if (is_taskreg_ctx (ctx)
1113 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1114 || (is_task_ctx (ctx)
1115 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1116 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1117 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1118 == POINTER_TYPE)))))
1119 && !is_variable_sized (t)
1120 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1121 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1122 && !is_task_ctx (ctx))))
1124 by_ref = use_pointer_for_field (t, NULL);
1125 if (is_task_ctx (ctx)
1126 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1127 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1129 install_var_field (t, false, 1, ctx);
1130 install_var_field (t, by_ref, 2, ctx);
1132 else
1133 install_var_field (t, by_ref, 3, ctx);
1135 break;
1137 if (is_task_ctx (ctx)
1138 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1139 && OMP_CLAUSE_REDUCTION_TASK (c)
1140 && is_parallel_ctx (ctx)))
1142 /* Global variables don't need to be copied,
1143 the receiver side will use them directly. */
1144 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1146 by_ref = use_pointer_for_field (decl, ctx);
1147 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1148 install_var_field (decl, by_ref, 3, ctx);
1150 install_var_local (decl, ctx);
1151 break;
1153 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1154 && OMP_CLAUSE_REDUCTION_TASK (c))
1156 install_var_local (decl, ctx);
1157 break;
1159 goto do_private;
1161 case OMP_CLAUSE_LASTPRIVATE:
1162 /* Let the corresponding firstprivate clause create
1163 the variable. */
1164 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1165 break;
1166 /* FALLTHRU */
1168 case OMP_CLAUSE_FIRSTPRIVATE:
1169 case OMP_CLAUSE_LINEAR:
1170 decl = OMP_CLAUSE_DECL (c);
1171 do_private:
1172 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1173 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1174 && is_gimple_omp_offloaded (ctx->stmt))
1176 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1177 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1178 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1179 install_var_field (decl, true, 3, ctx);
1180 else
1181 install_var_field (decl, false, 3, ctx);
1183 if (is_variable_sized (decl))
1185 if (is_task_ctx (ctx))
1186 install_var_field (decl, false, 1, ctx);
1187 break;
1189 else if (is_taskreg_ctx (ctx))
1191 bool global
1192 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1193 by_ref = use_pointer_for_field (decl, NULL);
1195 if (is_task_ctx (ctx)
1196 && (global || by_ref || omp_is_reference (decl)))
1198 install_var_field (decl, false, 1, ctx);
1199 if (!global)
1200 install_var_field (decl, by_ref, 2, ctx);
1202 else if (!global)
1203 install_var_field (decl, by_ref, 3, ctx);
1205 install_var_local (decl, ctx);
1206 break;
1208 case OMP_CLAUSE_USE_DEVICE_PTR:
1209 decl = OMP_CLAUSE_DECL (c);
1210 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1211 install_var_field (decl, true, 3, ctx);
1212 else
1213 install_var_field (decl, false, 3, ctx);
1214 if (DECL_SIZE (decl)
1215 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1217 tree decl2 = DECL_VALUE_EXPR (decl);
1218 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1219 decl2 = TREE_OPERAND (decl2, 0);
1220 gcc_assert (DECL_P (decl2));
1221 install_var_local (decl2, ctx);
1223 install_var_local (decl, ctx);
1224 break;
1226 case OMP_CLAUSE_IS_DEVICE_PTR:
1227 decl = OMP_CLAUSE_DECL (c);
1228 goto do_private;
1230 case OMP_CLAUSE__LOOPTEMP_:
1231 case OMP_CLAUSE__REDUCTEMP_:
1232 gcc_assert (is_taskreg_ctx (ctx));
1233 decl = OMP_CLAUSE_DECL (c);
1234 install_var_field (decl, false, 3, ctx);
1235 install_var_local (decl, ctx);
1236 break;
1238 case OMP_CLAUSE_COPYPRIVATE:
1239 case OMP_CLAUSE_COPYIN:
1240 decl = OMP_CLAUSE_DECL (c);
1241 by_ref = use_pointer_for_field (decl, NULL);
1242 install_var_field (decl, by_ref, 3, ctx);
1243 break;
1245 case OMP_CLAUSE_FINAL:
1246 case OMP_CLAUSE_IF:
1247 case OMP_CLAUSE_NUM_THREADS:
1248 case OMP_CLAUSE_NUM_TEAMS:
1249 case OMP_CLAUSE_THREAD_LIMIT:
1250 case OMP_CLAUSE_DEVICE:
1251 case OMP_CLAUSE_SCHEDULE:
1252 case OMP_CLAUSE_DIST_SCHEDULE:
1253 case OMP_CLAUSE_DEPEND:
1254 case OMP_CLAUSE_PRIORITY:
1255 case OMP_CLAUSE_GRAINSIZE:
1256 case OMP_CLAUSE_NUM_TASKS:
1257 case OMP_CLAUSE_NUM_GANGS:
1258 case OMP_CLAUSE_NUM_WORKERS:
1259 case OMP_CLAUSE_VECTOR_LENGTH:
1260 if (ctx->outer)
1261 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1262 break;
1264 case OMP_CLAUSE_TO:
1265 case OMP_CLAUSE_FROM:
1266 case OMP_CLAUSE_MAP:
1267 if (ctx->outer)
1268 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1269 decl = OMP_CLAUSE_DECL (c);
1270 /* Global variables with "omp declare target" attribute
1271 don't need to be copied, the receiver side will use them
1272 directly. However, global variables with "omp declare target link"
1273 attribute need to be copied. Or when ALWAYS modifier is used. */
1274 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1275 && DECL_P (decl)
1276 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1277 && (OMP_CLAUSE_MAP_KIND (c)
1278 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1279 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1280 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1281 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1282 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1283 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1284 && varpool_node::get_create (decl)->offloadable
1285 && !lookup_attribute ("omp declare target link",
1286 DECL_ATTRIBUTES (decl)))
1287 break;
1288 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1289 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1291 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1292 not offloaded; there is nothing to map for those. */
1293 if (!is_gimple_omp_offloaded (ctx->stmt)
1294 && !POINTER_TYPE_P (TREE_TYPE (decl))
1295 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1296 break;
1298 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1299 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1300 || (OMP_CLAUSE_MAP_KIND (c)
1301 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1303 if (TREE_CODE (decl) == COMPONENT_REF
1304 || (TREE_CODE (decl) == INDIRECT_REF
1305 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1306 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1307 == REFERENCE_TYPE)))
1308 break;
1309 if (DECL_SIZE (decl)
1310 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1312 tree decl2 = DECL_VALUE_EXPR (decl);
1313 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1314 decl2 = TREE_OPERAND (decl2, 0);
1315 gcc_assert (DECL_P (decl2));
1316 install_var_local (decl2, ctx);
1318 install_var_local (decl, ctx);
1319 break;
1321 if (DECL_P (decl))
1323 if (DECL_SIZE (decl)
1324 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1326 tree decl2 = DECL_VALUE_EXPR (decl);
1327 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1328 decl2 = TREE_OPERAND (decl2, 0);
1329 gcc_assert (DECL_P (decl2));
1330 install_var_field (decl2, true, 3, ctx);
1331 install_var_local (decl2, ctx);
1332 install_var_local (decl, ctx);
1334 else
1336 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1337 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1338 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1339 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1340 install_var_field (decl, true, 7, ctx);
1341 else
1342 install_var_field (decl, true, 3, ctx);
1343 if (is_gimple_omp_offloaded (ctx->stmt)
1344 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1345 install_var_local (decl, ctx);
1348 else
1350 tree base = get_base_address (decl);
1351 tree nc = OMP_CLAUSE_CHAIN (c);
1352 if (DECL_P (base)
1353 && nc != NULL_TREE
1354 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1355 && OMP_CLAUSE_DECL (nc) == base
1356 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1357 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1359 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1360 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1362 else
1364 if (ctx->outer)
1366 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1367 decl = OMP_CLAUSE_DECL (c);
1369 gcc_assert (!splay_tree_lookup (ctx->field_map,
1370 (splay_tree_key) decl));
1371 tree field
1372 = build_decl (OMP_CLAUSE_LOCATION (c),
1373 FIELD_DECL, NULL_TREE, ptr_type_node);
1374 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1375 insert_field_into_struct (ctx->record_type, field);
1376 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1377 (splay_tree_value) field);
1380 break;
1382 case OMP_CLAUSE__GRIDDIM_:
1383 if (ctx->outer)
1385 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1386 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1388 break;
1390 case OMP_CLAUSE_NOWAIT:
1391 case OMP_CLAUSE_ORDERED:
1392 case OMP_CLAUSE_COLLAPSE:
1393 case OMP_CLAUSE_UNTIED:
1394 case OMP_CLAUSE_MERGEABLE:
1395 case OMP_CLAUSE_PROC_BIND:
1396 case OMP_CLAUSE_SAFELEN:
1397 case OMP_CLAUSE_SIMDLEN:
1398 case OMP_CLAUSE_THREADS:
1399 case OMP_CLAUSE_SIMD:
1400 case OMP_CLAUSE_NOGROUP:
1401 case OMP_CLAUSE_DEFAULTMAP:
1402 case OMP_CLAUSE_ASYNC:
1403 case OMP_CLAUSE_WAIT:
1404 case OMP_CLAUSE_GANG:
1405 case OMP_CLAUSE_WORKER:
1406 case OMP_CLAUSE_VECTOR:
1407 case OMP_CLAUSE_INDEPENDENT:
1408 case OMP_CLAUSE_AUTO:
1409 case OMP_CLAUSE_SEQ:
1410 case OMP_CLAUSE_TILE:
1411 case OMP_CLAUSE__SIMT_:
1412 case OMP_CLAUSE_DEFAULT:
1413 case OMP_CLAUSE_NONTEMPORAL:
1414 case OMP_CLAUSE_IF_PRESENT:
1415 case OMP_CLAUSE_FINALIZE:
1416 case OMP_CLAUSE_TASK_REDUCTION:
1417 break;
1419 case OMP_CLAUSE_ALIGNED:
1420 decl = OMP_CLAUSE_DECL (c);
1421 if (is_global_var (decl)
1422 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1423 install_var_local (decl, ctx);
1424 break;
1426 case OMP_CLAUSE__CONDTEMP_:
1427 decl = OMP_CLAUSE_DECL (c);
1428 if (is_parallel_ctx (ctx))
1430 install_var_field (decl, false, 3, ctx);
1431 install_var_local (decl, ctx);
1433 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1434 && (gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
1435 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1436 install_var_local (decl, ctx);
1437 break;
1439 case OMP_CLAUSE__CACHE_:
1440 default:
1441 gcc_unreachable ();
1445 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1447 switch (OMP_CLAUSE_CODE (c))
1449 case OMP_CLAUSE_LASTPRIVATE:
1450 /* Let the corresponding firstprivate clause create
1451 the variable. */
1452 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1453 scan_array_reductions = true;
1454 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1455 break;
1456 /* FALLTHRU */
1458 case OMP_CLAUSE_FIRSTPRIVATE:
1459 case OMP_CLAUSE_PRIVATE:
1460 case OMP_CLAUSE_LINEAR:
1461 case OMP_CLAUSE_IS_DEVICE_PTR:
1462 decl = OMP_CLAUSE_DECL (c);
1463 if (is_variable_sized (decl))
1465 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1466 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1467 && is_gimple_omp_offloaded (ctx->stmt))
1469 tree decl2 = DECL_VALUE_EXPR (decl);
1470 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1471 decl2 = TREE_OPERAND (decl2, 0);
1472 gcc_assert (DECL_P (decl2));
1473 install_var_local (decl2, ctx);
1474 fixup_remapped_decl (decl2, ctx, false);
1476 install_var_local (decl, ctx);
1478 fixup_remapped_decl (decl, ctx,
1479 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1480 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1481 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1482 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1483 scan_array_reductions = true;
1484 break;
1486 case OMP_CLAUSE_REDUCTION:
1487 case OMP_CLAUSE_IN_REDUCTION:
1488 decl = OMP_CLAUSE_DECL (c);
1489 if (TREE_CODE (decl) != MEM_REF)
1491 if (is_variable_sized (decl))
1492 install_var_local (decl, ctx);
1493 fixup_remapped_decl (decl, ctx, false);
1495 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1496 scan_array_reductions = true;
1497 break;
1499 case OMP_CLAUSE_TASK_REDUCTION:
1500 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1501 scan_array_reductions = true;
1502 break;
1504 case OMP_CLAUSE_SHARED:
1505 /* Ignore shared directives in teams construct inside of
1506 target construct. */
1507 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1508 && !is_host_teams_ctx (ctx))
1509 break;
1510 decl = OMP_CLAUSE_DECL (c);
1511 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1512 break;
1513 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1515 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1516 ctx->outer)))
1517 break;
1518 bool by_ref = use_pointer_for_field (decl, ctx);
1519 install_var_field (decl, by_ref, 11, ctx);
1520 break;
1522 fixup_remapped_decl (decl, ctx, false);
1523 break;
1525 case OMP_CLAUSE_MAP:
1526 if (!is_gimple_omp_offloaded (ctx->stmt))
1527 break;
1528 decl = OMP_CLAUSE_DECL (c);
1529 if (DECL_P (decl)
1530 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1531 && (OMP_CLAUSE_MAP_KIND (c)
1532 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1533 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1534 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1535 && varpool_node::get_create (decl)->offloadable)
1536 break;
1537 if (DECL_P (decl))
1539 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1540 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1541 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1542 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1544 tree new_decl = lookup_decl (decl, ctx);
1545 TREE_TYPE (new_decl)
1546 = remap_type (TREE_TYPE (decl), &ctx->cb);
1548 else if (DECL_SIZE (decl)
1549 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1551 tree decl2 = DECL_VALUE_EXPR (decl);
1552 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1553 decl2 = TREE_OPERAND (decl2, 0);
1554 gcc_assert (DECL_P (decl2));
1555 fixup_remapped_decl (decl2, ctx, false);
1556 fixup_remapped_decl (decl, ctx, true);
1558 else
1559 fixup_remapped_decl (decl, ctx, false);
1561 break;
1563 case OMP_CLAUSE_COPYPRIVATE:
1564 case OMP_CLAUSE_COPYIN:
1565 case OMP_CLAUSE_DEFAULT:
1566 case OMP_CLAUSE_IF:
1567 case OMP_CLAUSE_NUM_THREADS:
1568 case OMP_CLAUSE_NUM_TEAMS:
1569 case OMP_CLAUSE_THREAD_LIMIT:
1570 case OMP_CLAUSE_DEVICE:
1571 case OMP_CLAUSE_SCHEDULE:
1572 case OMP_CLAUSE_DIST_SCHEDULE:
1573 case OMP_CLAUSE_NOWAIT:
1574 case OMP_CLAUSE_ORDERED:
1575 case OMP_CLAUSE_COLLAPSE:
1576 case OMP_CLAUSE_UNTIED:
1577 case OMP_CLAUSE_FINAL:
1578 case OMP_CLAUSE_MERGEABLE:
1579 case OMP_CLAUSE_PROC_BIND:
1580 case OMP_CLAUSE_SAFELEN:
1581 case OMP_CLAUSE_SIMDLEN:
1582 case OMP_CLAUSE_ALIGNED:
1583 case OMP_CLAUSE_DEPEND:
1584 case OMP_CLAUSE__LOOPTEMP_:
1585 case OMP_CLAUSE__REDUCTEMP_:
1586 case OMP_CLAUSE_TO:
1587 case OMP_CLAUSE_FROM:
1588 case OMP_CLAUSE_PRIORITY:
1589 case OMP_CLAUSE_GRAINSIZE:
1590 case OMP_CLAUSE_NUM_TASKS:
1591 case OMP_CLAUSE_THREADS:
1592 case OMP_CLAUSE_SIMD:
1593 case OMP_CLAUSE_NOGROUP:
1594 case OMP_CLAUSE_DEFAULTMAP:
1595 case OMP_CLAUSE_USE_DEVICE_PTR:
1596 case OMP_CLAUSE_NONTEMPORAL:
1597 case OMP_CLAUSE_ASYNC:
1598 case OMP_CLAUSE_WAIT:
1599 case OMP_CLAUSE_NUM_GANGS:
1600 case OMP_CLAUSE_NUM_WORKERS:
1601 case OMP_CLAUSE_VECTOR_LENGTH:
1602 case OMP_CLAUSE_GANG:
1603 case OMP_CLAUSE_WORKER:
1604 case OMP_CLAUSE_VECTOR:
1605 case OMP_CLAUSE_INDEPENDENT:
1606 case OMP_CLAUSE_AUTO:
1607 case OMP_CLAUSE_SEQ:
1608 case OMP_CLAUSE_TILE:
1609 case OMP_CLAUSE__GRIDDIM_:
1610 case OMP_CLAUSE__SIMT_:
1611 case OMP_CLAUSE_IF_PRESENT:
1612 case OMP_CLAUSE_FINALIZE:
1613 case OMP_CLAUSE__CONDTEMP_:
1614 break;
1616 case OMP_CLAUSE__CACHE_:
1617 default:
1618 gcc_unreachable ();
1622 gcc_checking_assert (!scan_array_reductions
1623 || !is_gimple_omp_oacc (ctx->stmt));
1624 if (scan_array_reductions)
1626 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1627 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1628 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1629 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1630 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1632 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1633 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1635 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1636 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1637 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1638 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1639 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1640 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1644 /* Create a new name for omp child function. Returns an identifier. */
1646 static tree
1647 create_omp_child_function_name (bool task_copy)
1649 return clone_function_name_numbered (current_function_decl,
1650 task_copy ? "_omp_cpyfn" : "_omp_fn");
1653 /* Return true if CTX may belong to offloaded code: either if current function
1654 is offloaded, or any enclosing context corresponds to a target region. */
1656 static bool
1657 omp_maybe_offloaded_ctx (omp_context *ctx)
1659 if (cgraph_node::get (current_function_decl)->offloadable)
1660 return true;
1661 for (; ctx; ctx = ctx->outer)
1662 if (is_gimple_omp_offloaded (ctx->stmt))
1663 return true;
1664 return false;
1667 /* Build a decl for the omp child function. It'll not contain a body
1668 yet, just the bare decl. */
1670 static void
1671 create_omp_child_function (omp_context *ctx, bool task_copy)
1673 tree decl, type, name, t;
1675 name = create_omp_child_function_name (task_copy);
1676 if (task_copy)
1677 type = build_function_type_list (void_type_node, ptr_type_node,
1678 ptr_type_node, NULL_TREE);
1679 else
1680 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1682 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1684 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1685 || !task_copy);
1686 if (!task_copy)
1687 ctx->cb.dst_fn = decl;
1688 else
1689 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1691 TREE_STATIC (decl) = 1;
1692 TREE_USED (decl) = 1;
1693 DECL_ARTIFICIAL (decl) = 1;
1694 DECL_IGNORED_P (decl) = 0;
1695 TREE_PUBLIC (decl) = 0;
1696 DECL_UNINLINABLE (decl) = 1;
1697 DECL_EXTERNAL (decl) = 0;
1698 DECL_CONTEXT (decl) = NULL_TREE;
1699 DECL_INITIAL (decl) = make_node (BLOCK);
1700 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1701 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1702 /* Remove omp declare simd attribute from the new attributes. */
1703 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1705 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1706 a = a2;
1707 a = TREE_CHAIN (a);
1708 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1709 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1710 *p = TREE_CHAIN (*p);
1711 else
1713 tree chain = TREE_CHAIN (*p);
1714 *p = copy_node (*p);
1715 p = &TREE_CHAIN (*p);
1716 *p = chain;
1719 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1720 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1721 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1722 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1723 DECL_FUNCTION_VERSIONED (decl)
1724 = DECL_FUNCTION_VERSIONED (current_function_decl);
1726 if (omp_maybe_offloaded_ctx (ctx))
1728 cgraph_node::get_create (decl)->offloadable = 1;
1729 if (ENABLE_OFFLOADING)
1730 g->have_offload = true;
1733 if (cgraph_node::get_create (decl)->offloadable
1734 && !lookup_attribute ("omp declare target",
1735 DECL_ATTRIBUTES (current_function_decl)))
1737 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1738 ? "omp target entrypoint"
1739 : "omp declare target");
1740 DECL_ATTRIBUTES (decl)
1741 = tree_cons (get_identifier (target_attr),
1742 NULL_TREE, DECL_ATTRIBUTES (decl));
1745 t = build_decl (DECL_SOURCE_LOCATION (decl),
1746 RESULT_DECL, NULL_TREE, void_type_node);
1747 DECL_ARTIFICIAL (t) = 1;
1748 DECL_IGNORED_P (t) = 1;
1749 DECL_CONTEXT (t) = decl;
1750 DECL_RESULT (decl) = t;
1752 tree data_name = get_identifier (".omp_data_i");
1753 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1754 ptr_type_node);
1755 DECL_ARTIFICIAL (t) = 1;
1756 DECL_NAMELESS (t) = 1;
1757 DECL_ARG_TYPE (t) = ptr_type_node;
1758 DECL_CONTEXT (t) = current_function_decl;
1759 TREE_USED (t) = 1;
1760 TREE_READONLY (t) = 1;
1761 DECL_ARGUMENTS (decl) = t;
1762 if (!task_copy)
1763 ctx->receiver_decl = t;
1764 else
1766 t = build_decl (DECL_SOURCE_LOCATION (decl),
1767 PARM_DECL, get_identifier (".omp_data_o"),
1768 ptr_type_node);
1769 DECL_ARTIFICIAL (t) = 1;
1770 DECL_NAMELESS (t) = 1;
1771 DECL_ARG_TYPE (t) = ptr_type_node;
1772 DECL_CONTEXT (t) = current_function_decl;
1773 TREE_USED (t) = 1;
1774 TREE_ADDRESSABLE (t) = 1;
1775 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1776 DECL_ARGUMENTS (decl) = t;
1779 /* Allocate memory for the function structure. The call to
1780 allocate_struct_function clobbers CFUN, so we need to restore
1781 it afterward. */
1782 push_struct_function (decl);
1783 cfun->function_end_locus = gimple_location (ctx->stmt);
1784 init_tree_ssa (cfun);
1785 pop_cfun ();
1788 /* Callback for walk_gimple_seq. Check if combined parallel
1789 contains gimple_omp_for_combined_into_p OMP_FOR. */
1791 tree
1792 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1793 bool *handled_ops_p,
1794 struct walk_stmt_info *wi)
1796 gimple *stmt = gsi_stmt (*gsi_p);
1798 *handled_ops_p = true;
1799 switch (gimple_code (stmt))
1801 WALK_SUBSTMTS;
1803 case GIMPLE_OMP_FOR:
1804 if (gimple_omp_for_combined_into_p (stmt)
1805 && gimple_omp_for_kind (stmt)
1806 == *(const enum gf_mask *) (wi->info))
1808 wi->info = stmt;
1809 return integer_zero_node;
1811 break;
1812 default:
1813 break;
1815 return NULL;
1818 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1820 static void
1821 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1822 omp_context *outer_ctx)
1824 struct walk_stmt_info wi;
1826 memset (&wi, 0, sizeof (wi));
1827 wi.val_only = true;
1828 wi.info = (void *) &msk;
1829 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1830 if (wi.info != (void *) &msk)
1832 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1833 struct omp_for_data fd;
1834 omp_extract_for_data (for_stmt, &fd, NULL);
1835 /* We need two temporaries with fd.loop.v type (istart/iend)
1836 and then (fd.collapse - 1) temporaries with the same
1837 type for count2 ... countN-1 vars if not constant. */
1838 size_t count = 2, i;
1839 tree type = fd.iter_type;
1840 if (fd.collapse > 1
1841 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1843 count += fd.collapse - 1;
1844 /* If there are lastprivate clauses on the inner
1845 GIMPLE_OMP_FOR, add one more temporaries for the total number
1846 of iterations (product of count1 ... countN-1). */
1847 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1848 OMP_CLAUSE_LASTPRIVATE))
1849 count++;
1850 else if (msk == GF_OMP_FOR_KIND_FOR
1851 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1852 OMP_CLAUSE_LASTPRIVATE))
1853 count++;
1855 for (i = 0; i < count; i++)
1857 tree temp = create_tmp_var (type);
1858 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1859 insert_decl_map (&outer_ctx->cb, temp, temp);
1860 OMP_CLAUSE_DECL (c) = temp;
1861 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1862 gimple_omp_taskreg_set_clauses (stmt, c);
1865 if (msk == GF_OMP_FOR_KIND_TASKLOOP
1866 && omp_find_clause (gimple_omp_task_clauses (stmt),
1867 OMP_CLAUSE_REDUCTION))
1869 tree type = build_pointer_type (pointer_sized_int_node);
1870 tree temp = create_tmp_var (type);
1871 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1872 insert_decl_map (&outer_ctx->cb, temp, temp);
1873 OMP_CLAUSE_DECL (c) = temp;
1874 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
1875 gimple_omp_task_set_clauses (stmt, c);
1879 /* Scan an OpenMP parallel directive. */
1881 static void
1882 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1884 omp_context *ctx;
1885 tree name;
1886 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1888 /* Ignore parallel directives with empty bodies, unless there
1889 are copyin clauses. */
1890 if (optimize > 0
1891 && empty_body_p (gimple_omp_body (stmt))
1892 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1893 OMP_CLAUSE_COPYIN) == NULL)
1895 gsi_replace (gsi, gimple_build_nop (), false);
1896 return;
1899 if (gimple_omp_parallel_combined_p (stmt))
1900 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1901 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
1902 OMP_CLAUSE_REDUCTION);
1903 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
1904 if (OMP_CLAUSE_REDUCTION_TASK (c))
1906 tree type = build_pointer_type (pointer_sized_int_node);
1907 tree temp = create_tmp_var (type);
1908 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1909 if (outer_ctx)
1910 insert_decl_map (&outer_ctx->cb, temp, temp);
1911 OMP_CLAUSE_DECL (c) = temp;
1912 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1913 gimple_omp_parallel_set_clauses (stmt, c);
1914 break;
1916 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
1917 break;
1919 ctx = new_omp_context (stmt, outer_ctx);
1920 taskreg_contexts.safe_push (ctx);
1921 if (taskreg_nesting_level > 1)
1922 ctx->is_nested = true;
1923 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1924 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1925 name = create_tmp_var_name (".omp_data_s");
1926 name = build_decl (gimple_location (stmt),
1927 TYPE_DECL, name, ctx->record_type);
1928 DECL_ARTIFICIAL (name) = 1;
1929 DECL_NAMELESS (name) = 1;
1930 TYPE_NAME (ctx->record_type) = name;
1931 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1932 if (!gimple_omp_parallel_grid_phony (stmt))
1934 create_omp_child_function (ctx, false);
1935 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1938 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1939 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1941 if (TYPE_FIELDS (ctx->record_type) == NULL)
1942 ctx->record_type = ctx->receiver_decl = NULL;
1945 /* Scan an OpenMP task directive. */
1947 static void
1948 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1950 omp_context *ctx;
1951 tree name, t;
1952 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1954 /* Ignore task directives with empty bodies, unless they have depend
1955 clause. */
1956 if (optimize > 0
1957 && gimple_omp_body (stmt)
1958 && empty_body_p (gimple_omp_body (stmt))
1959 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1961 gsi_replace (gsi, gimple_build_nop (), false);
1962 return;
1965 if (gimple_omp_task_taskloop_p (stmt))
1966 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1968 ctx = new_omp_context (stmt, outer_ctx);
1970 if (gimple_omp_task_taskwait_p (stmt))
1972 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1973 return;
1976 taskreg_contexts.safe_push (ctx);
1977 if (taskreg_nesting_level > 1)
1978 ctx->is_nested = true;
1979 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1980 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1981 name = create_tmp_var_name (".omp_data_s");
1982 name = build_decl (gimple_location (stmt),
1983 TYPE_DECL, name, ctx->record_type);
1984 DECL_ARTIFICIAL (name) = 1;
1985 DECL_NAMELESS (name) = 1;
1986 TYPE_NAME (ctx->record_type) = name;
1987 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1988 create_omp_child_function (ctx, false);
1989 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1991 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1993 if (ctx->srecord_type)
1995 name = create_tmp_var_name (".omp_data_a");
1996 name = build_decl (gimple_location (stmt),
1997 TYPE_DECL, name, ctx->srecord_type);
1998 DECL_ARTIFICIAL (name) = 1;
1999 DECL_NAMELESS (name) = 1;
2000 TYPE_NAME (ctx->srecord_type) = name;
2001 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2002 create_omp_child_function (ctx, true);
2005 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2007 if (TYPE_FIELDS (ctx->record_type) == NULL)
2009 ctx->record_type = ctx->receiver_decl = NULL;
2010 t = build_int_cst (long_integer_type_node, 0);
2011 gimple_omp_task_set_arg_size (stmt, t);
2012 t = build_int_cst (long_integer_type_node, 1);
2013 gimple_omp_task_set_arg_align (stmt, t);
2017 /* Helper function for finish_taskreg_scan, called through walk_tree.
2018 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2019 tree, replace it in the expression. */
2021 static tree
2022 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2024 if (VAR_P (*tp))
2026 omp_context *ctx = (omp_context *) data;
2027 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2028 if (t != *tp)
2030 if (DECL_HAS_VALUE_EXPR_P (t))
2031 t = unshare_expr (DECL_VALUE_EXPR (t));
2032 *tp = t;
2034 *walk_subtrees = 0;
2036 else if (IS_TYPE_OR_DECL_P (*tp))
2037 *walk_subtrees = 0;
2038 return NULL_TREE;
2041 /* If any decls have been made addressable during scan_omp,
2042 adjust their fields if needed, and layout record types
2043 of parallel/task constructs. */
2045 static void
2046 finish_taskreg_scan (omp_context *ctx)
2048 if (ctx->record_type == NULL_TREE)
2049 return;
2051 /* If any task_shared_vars were needed, verify all
2052 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2053 statements if use_pointer_for_field hasn't changed
2054 because of that. If it did, update field types now. */
2055 if (task_shared_vars)
2057 tree c;
2059 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2060 c; c = OMP_CLAUSE_CHAIN (c))
2061 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2062 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2064 tree decl = OMP_CLAUSE_DECL (c);
2066 /* Global variables don't need to be copied,
2067 the receiver side will use them directly. */
2068 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2069 continue;
2070 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2071 || !use_pointer_for_field (decl, ctx))
2072 continue;
2073 tree field = lookup_field (decl, ctx);
2074 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2075 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2076 continue;
2077 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2078 TREE_THIS_VOLATILE (field) = 0;
2079 DECL_USER_ALIGN (field) = 0;
2080 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2081 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2082 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2083 if (ctx->srecord_type)
2085 tree sfield = lookup_sfield (decl, ctx);
2086 TREE_TYPE (sfield) = TREE_TYPE (field);
2087 TREE_THIS_VOLATILE (sfield) = 0;
2088 DECL_USER_ALIGN (sfield) = 0;
2089 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2090 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2091 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2096 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2098 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2099 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2100 if (c)
2102 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2103 expects to find it at the start of data. */
2104 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2105 tree *p = &TYPE_FIELDS (ctx->record_type);
2106 while (*p)
2107 if (*p == f)
2109 *p = DECL_CHAIN (*p);
2110 break;
2112 else
2113 p = &DECL_CHAIN (*p);
2114 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2115 TYPE_FIELDS (ctx->record_type) = f;
2117 layout_type (ctx->record_type);
2118 fixup_child_record_type (ctx);
2120 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2122 layout_type (ctx->record_type);
2123 fixup_child_record_type (ctx);
2125 else
2127 location_t loc = gimple_location (ctx->stmt);
2128 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2129 /* Move VLA fields to the end. */
2130 p = &TYPE_FIELDS (ctx->record_type);
2131 while (*p)
2132 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2133 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2135 *q = *p;
2136 *p = TREE_CHAIN (*p);
2137 TREE_CHAIN (*q) = NULL_TREE;
2138 q = &TREE_CHAIN (*q);
2140 else
2141 p = &DECL_CHAIN (*p);
2142 *p = vla_fields;
2143 if (gimple_omp_task_taskloop_p (ctx->stmt))
2145 /* Move fields corresponding to first and second _looptemp_
2146 clause first. There are filled by GOMP_taskloop
2147 and thus need to be in specific positions. */
2148 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2149 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2150 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2151 OMP_CLAUSE__LOOPTEMP_);
2152 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2153 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2154 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2155 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2156 p = &TYPE_FIELDS (ctx->record_type);
2157 while (*p)
2158 if (*p == f1 || *p == f2 || *p == f3)
2159 *p = DECL_CHAIN (*p);
2160 else
2161 p = &DECL_CHAIN (*p);
2162 DECL_CHAIN (f1) = f2;
2163 if (c3)
2165 DECL_CHAIN (f2) = f3;
2166 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2168 else
2169 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2170 TYPE_FIELDS (ctx->record_type) = f1;
2171 if (ctx->srecord_type)
2173 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2174 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2175 if (c3)
2176 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2177 p = &TYPE_FIELDS (ctx->srecord_type);
2178 while (*p)
2179 if (*p == f1 || *p == f2 || *p == f3)
2180 *p = DECL_CHAIN (*p);
2181 else
2182 p = &DECL_CHAIN (*p);
2183 DECL_CHAIN (f1) = f2;
2184 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2185 if (c3)
2187 DECL_CHAIN (f2) = f3;
2188 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2190 else
2191 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2192 TYPE_FIELDS (ctx->srecord_type) = f1;
2195 layout_type (ctx->record_type);
2196 fixup_child_record_type (ctx);
2197 if (ctx->srecord_type)
2198 layout_type (ctx->srecord_type);
2199 tree t = fold_convert_loc (loc, long_integer_type_node,
2200 TYPE_SIZE_UNIT (ctx->record_type));
2201 if (TREE_CODE (t) != INTEGER_CST)
2203 t = unshare_expr (t);
2204 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2206 gimple_omp_task_set_arg_size (ctx->stmt, t);
2207 t = build_int_cst (long_integer_type_node,
2208 TYPE_ALIGN_UNIT (ctx->record_type));
2209 gimple_omp_task_set_arg_align (ctx->stmt, t);
2213 /* Find the enclosing offload context. */
2215 static omp_context *
2216 enclosing_target_ctx (omp_context *ctx)
2218 for (; ctx; ctx = ctx->outer)
2219 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2220 break;
2222 return ctx;
2225 /* Return true if ctx is part of an oacc kernels region. */
2227 static bool
2228 ctx_in_oacc_kernels_region (omp_context *ctx)
2230 for (;ctx != NULL; ctx = ctx->outer)
2232 gimple *stmt = ctx->stmt;
2233 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2234 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2235 return true;
2238 return false;
2241 /* Check the parallelism clauses inside a kernels regions.
2242 Until kernels handling moves to use the same loop indirection
2243 scheme as parallel, we need to do this checking early. */
2245 static unsigned
2246 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2248 bool checking = true;
2249 unsigned outer_mask = 0;
2250 unsigned this_mask = 0;
2251 bool has_seq = false, has_auto = false;
2253 if (ctx->outer)
2254 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2255 if (!stmt)
2257 checking = false;
2258 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2259 return outer_mask;
2260 stmt = as_a <gomp_for *> (ctx->stmt);
2263 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2265 switch (OMP_CLAUSE_CODE (c))
2267 case OMP_CLAUSE_GANG:
2268 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2269 break;
2270 case OMP_CLAUSE_WORKER:
2271 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2272 break;
2273 case OMP_CLAUSE_VECTOR:
2274 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2275 break;
2276 case OMP_CLAUSE_SEQ:
2277 has_seq = true;
2278 break;
2279 case OMP_CLAUSE_AUTO:
2280 has_auto = true;
2281 break;
2282 default:
2283 break;
2287 if (checking)
2289 if (has_seq && (this_mask || has_auto))
2290 error_at (gimple_location (stmt), "%<seq%> overrides other"
2291 " OpenACC loop specifiers");
2292 else if (has_auto && this_mask)
2293 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2294 " OpenACC loop specifiers");
2296 if (this_mask & outer_mask)
2297 error_at (gimple_location (stmt), "inner loop uses same"
2298 " OpenACC parallelism as containing loop");
2301 return outer_mask | this_mask;
2304 /* Scan a GIMPLE_OMP_FOR. */
2306 static omp_context *
2307 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2309 omp_context *ctx;
2310 size_t i;
2311 tree clauses = gimple_omp_for_clauses (stmt);
2313 ctx = new_omp_context (stmt, outer_ctx);
2315 if (is_gimple_omp_oacc (stmt))
2317 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2319 if (!tgt || is_oacc_parallel (tgt))
2320 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2322 char const *check = NULL;
2324 switch (OMP_CLAUSE_CODE (c))
2326 case OMP_CLAUSE_GANG:
2327 check = "gang";
2328 break;
2330 case OMP_CLAUSE_WORKER:
2331 check = "worker";
2332 break;
2334 case OMP_CLAUSE_VECTOR:
2335 check = "vector";
2336 break;
2338 default:
2339 break;
2342 if (check && OMP_CLAUSE_OPERAND (c, 0))
2343 error_at (gimple_location (stmt),
2344 "argument not permitted on %qs clause in"
2345 " OpenACC %<parallel%>", check);
2348 if (tgt && is_oacc_kernels (tgt))
2350 /* Strip out reductions, as they are not handled yet. */
2351 tree *prev_ptr = &clauses;
2353 while (tree probe = *prev_ptr)
2355 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2357 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2358 *prev_ptr = *next_ptr;
2359 else
2360 prev_ptr = next_ptr;
2363 gimple_omp_for_set_clauses (stmt, clauses);
2364 check_oacc_kernel_gwv (stmt, ctx);
2368 scan_sharing_clauses (clauses, ctx);
2370 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2371 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2373 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2374 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2375 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2376 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2378 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2379 return ctx;
2382 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2384 static void
2385 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2386 omp_context *outer_ctx)
2388 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2389 gsi_replace (gsi, bind, false);
2390 gimple_seq seq = NULL;
2391 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2392 tree cond = create_tmp_var_raw (integer_type_node);
2393 DECL_CONTEXT (cond) = current_function_decl;
2394 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2395 gimple_bind_set_vars (bind, cond);
2396 gimple_call_set_lhs (g, cond);
2397 gimple_seq_add_stmt (&seq, g);
2398 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2399 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2400 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2401 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2402 gimple_seq_add_stmt (&seq, g);
2403 g = gimple_build_label (lab1);
2404 gimple_seq_add_stmt (&seq, g);
2405 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2406 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2407 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2408 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2409 gimple_omp_for_set_clauses (new_stmt, clause);
2410 gimple_seq_add_stmt (&seq, new_stmt);
2411 g = gimple_build_goto (lab3);
2412 gimple_seq_add_stmt (&seq, g);
2413 g = gimple_build_label (lab2);
2414 gimple_seq_add_stmt (&seq, g);
2415 gimple_seq_add_stmt (&seq, stmt);
2416 g = gimple_build_label (lab3);
2417 gimple_seq_add_stmt (&seq, g);
2418 gimple_bind_set_body (bind, seq);
2419 update_stmt (bind);
2420 scan_omp_for (new_stmt, outer_ctx);
2421 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2424 /* Scan an OpenMP sections directive. */
2426 static void
2427 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2429 omp_context *ctx;
2431 ctx = new_omp_context (stmt, outer_ctx);
2432 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2433 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2436 /* Scan an OpenMP single directive. */
2438 static void
2439 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2441 omp_context *ctx;
2442 tree name;
2444 ctx = new_omp_context (stmt, outer_ctx);
2445 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2446 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2447 name = create_tmp_var_name (".omp_copy_s");
2448 name = build_decl (gimple_location (stmt),
2449 TYPE_DECL, name, ctx->record_type);
2450 TYPE_NAME (ctx->record_type) = name;
2452 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2453 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2455 if (TYPE_FIELDS (ctx->record_type) == NULL)
2456 ctx->record_type = NULL;
2457 else
2458 layout_type (ctx->record_type);
2461 /* Scan a GIMPLE_OMP_TARGET. */
2463 static void
2464 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2466 omp_context *ctx;
2467 tree name;
2468 bool offloaded = is_gimple_omp_offloaded (stmt);
2469 tree clauses = gimple_omp_target_clauses (stmt);
2471 ctx = new_omp_context (stmt, outer_ctx);
2472 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2473 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2474 name = create_tmp_var_name (".omp_data_t");
2475 name = build_decl (gimple_location (stmt),
2476 TYPE_DECL, name, ctx->record_type);
2477 DECL_ARTIFICIAL (name) = 1;
2478 DECL_NAMELESS (name) = 1;
2479 TYPE_NAME (ctx->record_type) = name;
2480 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2482 if (offloaded)
2484 create_omp_child_function (ctx, false);
2485 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2488 scan_sharing_clauses (clauses, ctx);
2489 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2491 if (TYPE_FIELDS (ctx->record_type) == NULL)
2492 ctx->record_type = ctx->receiver_decl = NULL;
2493 else
2495 TYPE_FIELDS (ctx->record_type)
2496 = nreverse (TYPE_FIELDS (ctx->record_type));
2497 if (flag_checking)
2499 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2500 for (tree field = TYPE_FIELDS (ctx->record_type);
2501 field;
2502 field = DECL_CHAIN (field))
2503 gcc_assert (DECL_ALIGN (field) == align);
2505 layout_type (ctx->record_type);
2506 if (offloaded)
2507 fixup_child_record_type (ctx);
2511 /* Scan an OpenMP teams directive. */
2513 static void
2514 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2516 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2518 if (!gimple_omp_teams_host (stmt))
2520 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2521 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2522 return;
2524 taskreg_contexts.safe_push (ctx);
2525 gcc_assert (taskreg_nesting_level == 1);
2526 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2527 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2528 tree name = create_tmp_var_name (".omp_data_s");
2529 name = build_decl (gimple_location (stmt),
2530 TYPE_DECL, name, ctx->record_type);
2531 DECL_ARTIFICIAL (name) = 1;
2532 DECL_NAMELESS (name) = 1;
2533 TYPE_NAME (ctx->record_type) = name;
2534 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2535 create_omp_child_function (ctx, false);
2536 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2538 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2539 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2541 if (TYPE_FIELDS (ctx->record_type) == NULL)
2542 ctx->record_type = ctx->receiver_decl = NULL;
2545 /* Check nesting restrictions. */
2546 static bool
2547 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2549 tree c;
2551 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2552 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2553 the original copy of its contents. */
2554 return true;
2556 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2557 inside an OpenACC CTX. */
2558 if (!(is_gimple_omp (stmt)
2559 && is_gimple_omp_oacc (stmt))
2560 /* Except for atomic codes that we share with OpenMP. */
2561 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2562 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2564 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2566 error_at (gimple_location (stmt),
2567 "non-OpenACC construct inside of OpenACC routine");
2568 return false;
2570 else
2571 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2572 if (is_gimple_omp (octx->stmt)
2573 && is_gimple_omp_oacc (octx->stmt))
2575 error_at (gimple_location (stmt),
2576 "non-OpenACC construct inside of OpenACC region");
2577 return false;
2581 if (ctx != NULL)
2583 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
2584 && ctx->outer
2585 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
2586 ctx = ctx->outer;
2587 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2588 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2590 c = NULL_TREE;
2591 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2593 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2594 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2596 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2597 && (ctx->outer == NULL
2598 || !gimple_omp_for_combined_into_p (ctx->stmt)
2599 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2600 || (gimple_omp_for_kind (ctx->outer->stmt)
2601 != GF_OMP_FOR_KIND_FOR)
2602 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2604 error_at (gimple_location (stmt),
2605 "%<ordered simd threads%> must be closely "
2606 "nested inside of %<for simd%> region");
2607 return false;
2609 return true;
2612 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2613 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
2614 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
2615 return true;
2616 error_at (gimple_location (stmt),
2617 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2618 " or %<#pragma omp atomic%> may not be nested inside"
2619 " %<simd%> region");
2620 return false;
2622 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2624 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2625 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2626 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2627 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2629 error_at (gimple_location (stmt),
2630 "only %<distribute%> or %<parallel%> regions are "
2631 "allowed to be strictly nested inside %<teams%> "
2632 "region");
2633 return false;
2637 switch (gimple_code (stmt))
2639 case GIMPLE_OMP_FOR:
2640 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2641 return true;
2642 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2644 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2646 error_at (gimple_location (stmt),
2647 "%<distribute%> region must be strictly nested "
2648 "inside %<teams%> construct");
2649 return false;
2651 return true;
2653 /* We split taskloop into task and nested taskloop in it. */
2654 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2655 return true;
2656 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2658 bool ok = false;
2660 if (ctx)
2661 switch (gimple_code (ctx->stmt))
2663 case GIMPLE_OMP_FOR:
2664 ok = (gimple_omp_for_kind (ctx->stmt)
2665 == GF_OMP_FOR_KIND_OACC_LOOP);
2666 break;
2668 case GIMPLE_OMP_TARGET:
2669 switch (gimple_omp_target_kind (ctx->stmt))
2671 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2672 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2673 ok = true;
2674 break;
2676 default:
2677 break;
2680 default:
2681 break;
2683 else if (oacc_get_fn_attrib (current_function_decl))
2684 ok = true;
2685 if (!ok)
2687 error_at (gimple_location (stmt),
2688 "OpenACC loop directive must be associated with"
2689 " an OpenACC compute region");
2690 return false;
2693 /* FALLTHRU */
2694 case GIMPLE_CALL:
2695 if (is_gimple_call (stmt)
2696 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2697 == BUILT_IN_GOMP_CANCEL
2698 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2699 == BUILT_IN_GOMP_CANCELLATION_POINT))
2701 const char *bad = NULL;
2702 const char *kind = NULL;
2703 const char *construct
2704 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2705 == BUILT_IN_GOMP_CANCEL)
2706 ? "#pragma omp cancel"
2707 : "#pragma omp cancellation point";
2708 if (ctx == NULL)
2710 error_at (gimple_location (stmt), "orphaned %qs construct",
2711 construct);
2712 return false;
2714 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2715 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2716 : 0)
2718 case 1:
2719 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2720 bad = "#pragma omp parallel";
2721 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2722 == BUILT_IN_GOMP_CANCEL
2723 && !integer_zerop (gimple_call_arg (stmt, 1)))
2724 ctx->cancellable = true;
2725 kind = "parallel";
2726 break;
2727 case 2:
2728 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2729 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2730 bad = "#pragma omp for";
2731 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2732 == BUILT_IN_GOMP_CANCEL
2733 && !integer_zerop (gimple_call_arg (stmt, 1)))
2735 ctx->cancellable = true;
2736 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2737 OMP_CLAUSE_NOWAIT))
2738 warning_at (gimple_location (stmt), 0,
2739 "%<#pragma omp cancel for%> inside "
2740 "%<nowait%> for construct");
2741 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2742 OMP_CLAUSE_ORDERED))
2743 warning_at (gimple_location (stmt), 0,
2744 "%<#pragma omp cancel for%> inside "
2745 "%<ordered%> for construct");
2747 kind = "for";
2748 break;
2749 case 4:
2750 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2751 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2752 bad = "#pragma omp sections";
2753 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2754 == BUILT_IN_GOMP_CANCEL
2755 && !integer_zerop (gimple_call_arg (stmt, 1)))
2757 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2759 ctx->cancellable = true;
2760 if (omp_find_clause (gimple_omp_sections_clauses
2761 (ctx->stmt),
2762 OMP_CLAUSE_NOWAIT))
2763 warning_at (gimple_location (stmt), 0,
2764 "%<#pragma omp cancel sections%> inside "
2765 "%<nowait%> sections construct");
2767 else
2769 gcc_assert (ctx->outer
2770 && gimple_code (ctx->outer->stmt)
2771 == GIMPLE_OMP_SECTIONS);
2772 ctx->outer->cancellable = true;
2773 if (omp_find_clause (gimple_omp_sections_clauses
2774 (ctx->outer->stmt),
2775 OMP_CLAUSE_NOWAIT))
2776 warning_at (gimple_location (stmt), 0,
2777 "%<#pragma omp cancel sections%> inside "
2778 "%<nowait%> sections construct");
2781 kind = "sections";
2782 break;
2783 case 8:
2784 if (!is_task_ctx (ctx)
2785 && (!is_taskloop_ctx (ctx)
2786 || ctx->outer == NULL
2787 || !is_task_ctx (ctx->outer)))
2788 bad = "#pragma omp task";
2789 else
2791 for (omp_context *octx = ctx->outer;
2792 octx; octx = octx->outer)
2794 switch (gimple_code (octx->stmt))
2796 case GIMPLE_OMP_TASKGROUP:
2797 break;
2798 case GIMPLE_OMP_TARGET:
2799 if (gimple_omp_target_kind (octx->stmt)
2800 != GF_OMP_TARGET_KIND_REGION)
2801 continue;
2802 /* FALLTHRU */
2803 case GIMPLE_OMP_PARALLEL:
2804 case GIMPLE_OMP_TEAMS:
2805 error_at (gimple_location (stmt),
2806 "%<%s taskgroup%> construct not closely "
2807 "nested inside of %<taskgroup%> region",
2808 construct);
2809 return false;
2810 case GIMPLE_OMP_TASK:
2811 if (gimple_omp_task_taskloop_p (octx->stmt)
2812 && octx->outer
2813 && is_taskloop_ctx (octx->outer))
2815 tree clauses
2816 = gimple_omp_for_clauses (octx->outer->stmt);
2817 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
2818 break;
2820 continue;
2821 default:
2822 continue;
2824 break;
2826 ctx->cancellable = true;
2828 kind = "taskgroup";
2829 break;
2830 default:
2831 error_at (gimple_location (stmt), "invalid arguments");
2832 return false;
2834 if (bad)
2836 error_at (gimple_location (stmt),
2837 "%<%s %s%> construct not closely nested inside of %qs",
2838 construct, kind, bad);
2839 return false;
2842 /* FALLTHRU */
2843 case GIMPLE_OMP_SECTIONS:
2844 case GIMPLE_OMP_SINGLE:
2845 for (; ctx != NULL; ctx = ctx->outer)
2846 switch (gimple_code (ctx->stmt))
2848 case GIMPLE_OMP_FOR:
2849 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2850 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2851 break;
2852 /* FALLTHRU */
2853 case GIMPLE_OMP_SECTIONS:
2854 case GIMPLE_OMP_SINGLE:
2855 case GIMPLE_OMP_ORDERED:
2856 case GIMPLE_OMP_MASTER:
2857 case GIMPLE_OMP_TASK:
2858 case GIMPLE_OMP_CRITICAL:
2859 if (is_gimple_call (stmt))
2861 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2862 != BUILT_IN_GOMP_BARRIER)
2863 return true;
2864 error_at (gimple_location (stmt),
2865 "barrier region may not be closely nested inside "
2866 "of work-sharing, %<critical%>, %<ordered%>, "
2867 "%<master%>, explicit %<task%> or %<taskloop%> "
2868 "region");
2869 return false;
2871 error_at (gimple_location (stmt),
2872 "work-sharing region may not be closely nested inside "
2873 "of work-sharing, %<critical%>, %<ordered%>, "
2874 "%<master%>, explicit %<task%> or %<taskloop%> region");
2875 return false;
2876 case GIMPLE_OMP_PARALLEL:
2877 case GIMPLE_OMP_TEAMS:
2878 return true;
2879 case GIMPLE_OMP_TARGET:
2880 if (gimple_omp_target_kind (ctx->stmt)
2881 == GF_OMP_TARGET_KIND_REGION)
2882 return true;
2883 break;
2884 default:
2885 break;
2887 break;
2888 case GIMPLE_OMP_MASTER:
2889 for (; ctx != NULL; ctx = ctx->outer)
2890 switch (gimple_code (ctx->stmt))
2892 case GIMPLE_OMP_FOR:
2893 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2894 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2895 break;
2896 /* FALLTHRU */
2897 case GIMPLE_OMP_SECTIONS:
2898 case GIMPLE_OMP_SINGLE:
2899 case GIMPLE_OMP_TASK:
2900 error_at (gimple_location (stmt),
2901 "%<master%> region may not be closely nested inside "
2902 "of work-sharing, explicit %<task%> or %<taskloop%> "
2903 "region");
2904 return false;
2905 case GIMPLE_OMP_PARALLEL:
2906 case GIMPLE_OMP_TEAMS:
2907 return true;
2908 case GIMPLE_OMP_TARGET:
2909 if (gimple_omp_target_kind (ctx->stmt)
2910 == GF_OMP_TARGET_KIND_REGION)
2911 return true;
2912 break;
2913 default:
2914 break;
2916 break;
2917 case GIMPLE_OMP_TASK:
2918 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2919 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2920 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2921 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2923 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2924 error_at (OMP_CLAUSE_LOCATION (c),
2925 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2926 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2927 return false;
2929 break;
2930 case GIMPLE_OMP_ORDERED:
2931 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2932 c; c = OMP_CLAUSE_CHAIN (c))
2934 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2936 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2937 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2938 continue;
2940 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2941 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2942 || kind == OMP_CLAUSE_DEPEND_SINK)
2944 tree oclause;
2945 /* Look for containing ordered(N) loop. */
2946 if (ctx == NULL
2947 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2948 || (oclause
2949 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2950 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2952 error_at (OMP_CLAUSE_LOCATION (c),
2953 "%<ordered%> construct with %<depend%> clause "
2954 "must be closely nested inside an %<ordered%> "
2955 "loop");
2956 return false;
2958 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2960 error_at (OMP_CLAUSE_LOCATION (c),
2961 "%<ordered%> construct with %<depend%> clause "
2962 "must be closely nested inside a loop with "
2963 "%<ordered%> clause with a parameter");
2964 return false;
2967 else
2969 error_at (OMP_CLAUSE_LOCATION (c),
2970 "invalid depend kind in omp %<ordered%> %<depend%>");
2971 return false;
2974 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2975 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2977 /* ordered simd must be closely nested inside of simd region,
2978 and simd region must not encounter constructs other than
2979 ordered simd, therefore ordered simd may be either orphaned,
2980 or ctx->stmt must be simd. The latter case is handled already
2981 earlier. */
2982 if (ctx != NULL)
2984 error_at (gimple_location (stmt),
2985 "%<ordered%> %<simd%> must be closely nested inside "
2986 "%<simd%> region");
2987 return false;
2990 for (; ctx != NULL; ctx = ctx->outer)
2991 switch (gimple_code (ctx->stmt))
2993 case GIMPLE_OMP_CRITICAL:
2994 case GIMPLE_OMP_TASK:
2995 case GIMPLE_OMP_ORDERED:
2996 ordered_in_taskloop:
2997 error_at (gimple_location (stmt),
2998 "%<ordered%> region may not be closely nested inside "
2999 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3000 "%<taskloop%> region");
3001 return false;
3002 case GIMPLE_OMP_FOR:
3003 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3004 goto ordered_in_taskloop;
3005 tree o;
3006 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3007 OMP_CLAUSE_ORDERED);
3008 if (o == NULL)
3010 error_at (gimple_location (stmt),
3011 "%<ordered%> region must be closely nested inside "
3012 "a loop region with an %<ordered%> clause");
3013 return false;
3015 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3016 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3018 error_at (gimple_location (stmt),
3019 "%<ordered%> region without %<depend%> clause may "
3020 "not be closely nested inside a loop region with "
3021 "an %<ordered%> clause with a parameter");
3022 return false;
3024 return true;
3025 case GIMPLE_OMP_TARGET:
3026 if (gimple_omp_target_kind (ctx->stmt)
3027 != GF_OMP_TARGET_KIND_REGION)
3028 break;
3029 /* FALLTHRU */
3030 case GIMPLE_OMP_PARALLEL:
3031 case GIMPLE_OMP_TEAMS:
3032 error_at (gimple_location (stmt),
3033 "%<ordered%> region must be closely nested inside "
3034 "a loop region with an %<ordered%> clause");
3035 return false;
3036 default:
3037 break;
3039 break;
3040 case GIMPLE_OMP_CRITICAL:
3042 tree this_stmt_name
3043 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3044 for (; ctx != NULL; ctx = ctx->outer)
3045 if (gomp_critical *other_crit
3046 = dyn_cast <gomp_critical *> (ctx->stmt))
3047 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3049 error_at (gimple_location (stmt),
3050 "%<critical%> region may not be nested inside "
3051 "a %<critical%> region with the same name");
3052 return false;
3055 break;
3056 case GIMPLE_OMP_TEAMS:
3057 if (ctx == NULL)
3058 break;
3059 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3060 || (gimple_omp_target_kind (ctx->stmt)
3061 != GF_OMP_TARGET_KIND_REGION))
3063 /* Teams construct can appear either strictly nested inside of
3064 target construct with no intervening stmts, or can be encountered
3065 only by initial task (so must not appear inside any OpenMP
3066 construct. */
3067 error_at (gimple_location (stmt),
3068 "%<teams%> construct must be closely nested inside of "
3069 "%<target%> construct or not nested in any OpenMP "
3070 "construct");
3071 return false;
3073 break;
3074 case GIMPLE_OMP_TARGET:
3075 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3076 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3077 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3078 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3080 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3081 error_at (OMP_CLAUSE_LOCATION (c),
3082 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3083 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3084 return false;
3086 if (is_gimple_omp_offloaded (stmt)
3087 && oacc_get_fn_attrib (cfun->decl) != NULL)
3089 error_at (gimple_location (stmt),
3090 "OpenACC region inside of OpenACC routine, nested "
3091 "parallelism not supported yet");
3092 return false;
3094 for (; ctx != NULL; ctx = ctx->outer)
3096 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3098 if (is_gimple_omp (stmt)
3099 && is_gimple_omp_oacc (stmt)
3100 && is_gimple_omp (ctx->stmt))
3102 error_at (gimple_location (stmt),
3103 "OpenACC construct inside of non-OpenACC region");
3104 return false;
3106 continue;
3109 const char *stmt_name, *ctx_stmt_name;
3110 switch (gimple_omp_target_kind (stmt))
3112 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3113 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3114 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3115 case GF_OMP_TARGET_KIND_ENTER_DATA:
3116 stmt_name = "target enter data"; break;
3117 case GF_OMP_TARGET_KIND_EXIT_DATA:
3118 stmt_name = "target exit data"; break;
3119 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3120 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3121 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3122 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3123 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3124 stmt_name = "enter/exit data"; break;
3125 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3126 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3127 break;
3128 default: gcc_unreachable ();
3130 switch (gimple_omp_target_kind (ctx->stmt))
3132 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3133 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3134 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3135 ctx_stmt_name = "parallel"; break;
3136 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3137 ctx_stmt_name = "kernels"; break;
3138 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3139 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3140 ctx_stmt_name = "host_data"; break;
3141 default: gcc_unreachable ();
3144 /* OpenACC/OpenMP mismatch? */
3145 if (is_gimple_omp_oacc (stmt)
3146 != is_gimple_omp_oacc (ctx->stmt))
3148 error_at (gimple_location (stmt),
3149 "%s %qs construct inside of %s %qs region",
3150 (is_gimple_omp_oacc (stmt)
3151 ? "OpenACC" : "OpenMP"), stmt_name,
3152 (is_gimple_omp_oacc (ctx->stmt)
3153 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3154 return false;
3156 if (is_gimple_omp_offloaded (ctx->stmt))
3158 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3159 if (is_gimple_omp_oacc (ctx->stmt))
3161 error_at (gimple_location (stmt),
3162 "%qs construct inside of %qs region",
3163 stmt_name, ctx_stmt_name);
3164 return false;
3166 else
3168 warning_at (gimple_location (stmt), 0,
3169 "%qs construct inside of %qs region",
3170 stmt_name, ctx_stmt_name);
3174 break;
3175 default:
3176 break;
3178 return true;
3182 /* Helper function scan_omp.
3184 Callback for walk_tree or operators in walk_gimple_stmt used to
3185 scan for OMP directives in TP. */
3187 static tree
3188 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3190 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3191 omp_context *ctx = (omp_context *) wi->info;
3192 tree t = *tp;
3194 switch (TREE_CODE (t))
3196 case VAR_DECL:
3197 case PARM_DECL:
3198 case LABEL_DECL:
3199 case RESULT_DECL:
3200 if (ctx)
3202 tree repl = remap_decl (t, &ctx->cb);
3203 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3204 *tp = repl;
3206 break;
3208 default:
3209 if (ctx && TYPE_P (t))
3210 *tp = remap_type (t, &ctx->cb);
3211 else if (!DECL_P (t))
3213 *walk_subtrees = 1;
3214 if (ctx)
3216 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3217 if (tem != TREE_TYPE (t))
3219 if (TREE_CODE (t) == INTEGER_CST)
3220 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3221 else
3222 TREE_TYPE (t) = tem;
3226 break;
3229 return NULL_TREE;
3232 /* Return true if FNDECL is a setjmp or a longjmp. */
3234 static bool
3235 setjmp_or_longjmp_p (const_tree fndecl)
3237 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3238 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3239 return true;
3241 tree declname = DECL_NAME (fndecl);
3242 if (!declname)
3243 return false;
3244 const char *name = IDENTIFIER_POINTER (declname);
3245 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3249 /* Helper function for scan_omp.
3251 Callback for walk_gimple_stmt used to scan for OMP directives in
3252 the current statement in GSI. */
3254 static tree
3255 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3256 struct walk_stmt_info *wi)
3258 gimple *stmt = gsi_stmt (*gsi);
3259 omp_context *ctx = (omp_context *) wi->info;
3261 if (gimple_has_location (stmt))
3262 input_location = gimple_location (stmt);
3264 /* Check the nesting restrictions. */
3265 bool remove = false;
3266 if (is_gimple_omp (stmt))
3267 remove = !check_omp_nesting_restrictions (stmt, ctx);
3268 else if (is_gimple_call (stmt))
3270 tree fndecl = gimple_call_fndecl (stmt);
3271 if (fndecl)
3273 if (setjmp_or_longjmp_p (fndecl)
3274 && ctx
3275 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3276 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3278 remove = true;
3279 error_at (gimple_location (stmt),
3280 "setjmp/longjmp inside simd construct");
3282 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3283 switch (DECL_FUNCTION_CODE (fndecl))
3285 case BUILT_IN_GOMP_BARRIER:
3286 case BUILT_IN_GOMP_CANCEL:
3287 case BUILT_IN_GOMP_CANCELLATION_POINT:
3288 case BUILT_IN_GOMP_TASKYIELD:
3289 case BUILT_IN_GOMP_TASKWAIT:
3290 case BUILT_IN_GOMP_TASKGROUP_START:
3291 case BUILT_IN_GOMP_TASKGROUP_END:
3292 remove = !check_omp_nesting_restrictions (stmt, ctx);
3293 break;
3294 default:
3295 break;
3299 if (remove)
3301 stmt = gimple_build_nop ();
3302 gsi_replace (gsi, stmt, false);
3305 *handled_ops_p = true;
3307 switch (gimple_code (stmt))
3309 case GIMPLE_OMP_PARALLEL:
3310 taskreg_nesting_level++;
3311 scan_omp_parallel (gsi, ctx);
3312 taskreg_nesting_level--;
3313 break;
3315 case GIMPLE_OMP_TASK:
3316 taskreg_nesting_level++;
3317 scan_omp_task (gsi, ctx);
3318 taskreg_nesting_level--;
3319 break;
3321 case GIMPLE_OMP_FOR:
3322 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3323 == GF_OMP_FOR_KIND_SIMD)
3324 && omp_maybe_offloaded_ctx (ctx)
3325 && omp_max_simt_vf ())
3326 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3327 else
3328 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3329 break;
3331 case GIMPLE_OMP_SECTIONS:
3332 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3333 break;
3335 case GIMPLE_OMP_SINGLE:
3336 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3337 break;
3339 case GIMPLE_OMP_SCAN:
3340 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
3342 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
3343 ctx->scan_inclusive = true;
3344 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
3345 ctx->scan_exclusive = true;
3347 /* FALLTHRU */
3348 case GIMPLE_OMP_SECTION:
3349 case GIMPLE_OMP_MASTER:
3350 case GIMPLE_OMP_ORDERED:
3351 case GIMPLE_OMP_CRITICAL:
3352 case GIMPLE_OMP_GRID_BODY:
3353 ctx = new_omp_context (stmt, ctx);
3354 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3355 break;
3357 case GIMPLE_OMP_TASKGROUP:
3358 ctx = new_omp_context (stmt, ctx);
3359 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3360 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3361 break;
3363 case GIMPLE_OMP_TARGET:
3364 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3365 break;
3367 case GIMPLE_OMP_TEAMS:
3368 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3370 taskreg_nesting_level++;
3371 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3372 taskreg_nesting_level--;
3374 else
3375 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3376 break;
3378 case GIMPLE_BIND:
3380 tree var;
3382 *handled_ops_p = false;
3383 if (ctx)
3384 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3385 var ;
3386 var = DECL_CHAIN (var))
3387 insert_decl_map (&ctx->cb, var, var);
3389 break;
3390 default:
3391 *handled_ops_p = false;
3392 break;
3395 return NULL_TREE;
3399 /* Scan all the statements starting at the current statement. CTX
3400 contains context information about the OMP directives and
3401 clauses found during the scan. */
3403 static void
3404 scan_omp (gimple_seq *body_p, omp_context *ctx)
3406 location_t saved_location;
3407 struct walk_stmt_info wi;
3409 memset (&wi, 0, sizeof (wi));
3410 wi.info = ctx;
3411 wi.want_locations = true;
3413 saved_location = input_location;
3414 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3415 input_location = saved_location;
3418 /* Re-gimplification and code generation routines. */
3420 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3421 of BIND if in a method. */
3423 static void
3424 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3426 if (DECL_ARGUMENTS (current_function_decl)
3427 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3428 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3429 == POINTER_TYPE))
3431 tree vars = gimple_bind_vars (bind);
3432 for (tree *pvar = &vars; *pvar; )
3433 if (omp_member_access_dummy_var (*pvar))
3434 *pvar = DECL_CHAIN (*pvar);
3435 else
3436 pvar = &DECL_CHAIN (*pvar);
3437 gimple_bind_set_vars (bind, vars);
3441 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3442 block and its subblocks. */
3444 static void
3445 remove_member_access_dummy_vars (tree block)
3447 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3448 if (omp_member_access_dummy_var (*pvar))
3449 *pvar = DECL_CHAIN (*pvar);
3450 else
3451 pvar = &DECL_CHAIN (*pvar);
3453 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3454 remove_member_access_dummy_vars (block);
3457 /* If a context was created for STMT when it was scanned, return it. */
3459 static omp_context *
3460 maybe_lookup_ctx (gimple *stmt)
3462 splay_tree_node n;
3463 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3464 return n ? (omp_context *) n->value : NULL;
3468 /* Find the mapping for DECL in CTX or the immediately enclosing
3469 context that has a mapping for DECL.
3471 If CTX is a nested parallel directive, we may have to use the decl
3472 mappings created in CTX's parent context. Suppose that we have the
3473 following parallel nesting (variable UIDs showed for clarity):
3475 iD.1562 = 0;
3476 #omp parallel shared(iD.1562) -> outer parallel
3477 iD.1562 = iD.1562 + 1;
3479 #omp parallel shared (iD.1562) -> inner parallel
3480 iD.1562 = iD.1562 - 1;
3482 Each parallel structure will create a distinct .omp_data_s structure
3483 for copying iD.1562 in/out of the directive:
3485 outer parallel .omp_data_s.1.i -> iD.1562
3486 inner parallel .omp_data_s.2.i -> iD.1562
3488 A shared variable mapping will produce a copy-out operation before
3489 the parallel directive and a copy-in operation after it. So, in
3490 this case we would have:
3492 iD.1562 = 0;
3493 .omp_data_o.1.i = iD.1562;
3494 #omp parallel shared(iD.1562) -> outer parallel
3495 .omp_data_i.1 = &.omp_data_o.1
3496 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3498 .omp_data_o.2.i = iD.1562; -> **
3499 #omp parallel shared(iD.1562) -> inner parallel
3500 .omp_data_i.2 = &.omp_data_o.2
3501 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3504 ** This is a problem. The symbol iD.1562 cannot be referenced
3505 inside the body of the outer parallel region. But since we are
3506 emitting this copy operation while expanding the inner parallel
3507 directive, we need to access the CTX structure of the outer
3508 parallel directive to get the correct mapping:
3510 .omp_data_o.2.i = .omp_data_i.1->i
3512 Since there may be other workshare or parallel directives enclosing
3513 the parallel directive, it may be necessary to walk up the context
3514 parent chain. This is not a problem in general because nested
3515 parallelism happens only rarely. */
3517 static tree
3518 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3520 tree t;
3521 omp_context *up;
3523 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3524 t = maybe_lookup_decl (decl, up);
3526 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3528 return t ? t : decl;
3532 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3533 in outer contexts. */
3535 static tree
3536 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3538 tree t = NULL;
3539 omp_context *up;
3541 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3542 t = maybe_lookup_decl (decl, up);
3544 return t ? t : decl;
3548 /* Construct the initialization value for reduction operation OP. */
3550 tree
3551 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3553 switch (op)
3555 case PLUS_EXPR:
3556 case MINUS_EXPR:
3557 case BIT_IOR_EXPR:
3558 case BIT_XOR_EXPR:
3559 case TRUTH_OR_EXPR:
3560 case TRUTH_ORIF_EXPR:
3561 case TRUTH_XOR_EXPR:
3562 case NE_EXPR:
3563 return build_zero_cst (type);
3565 case MULT_EXPR:
3566 case TRUTH_AND_EXPR:
3567 case TRUTH_ANDIF_EXPR:
3568 case EQ_EXPR:
3569 return fold_convert_loc (loc, type, integer_one_node);
3571 case BIT_AND_EXPR:
3572 return fold_convert_loc (loc, type, integer_minus_one_node);
3574 case MAX_EXPR:
3575 if (SCALAR_FLOAT_TYPE_P (type))
3577 REAL_VALUE_TYPE max, min;
3578 if (HONOR_INFINITIES (type))
3580 real_inf (&max);
3581 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3583 else
3584 real_maxval (&min, 1, TYPE_MODE (type));
3585 return build_real (type, min);
3587 else if (POINTER_TYPE_P (type))
3589 wide_int min
3590 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3591 return wide_int_to_tree (type, min);
3593 else
3595 gcc_assert (INTEGRAL_TYPE_P (type));
3596 return TYPE_MIN_VALUE (type);
3599 case MIN_EXPR:
3600 if (SCALAR_FLOAT_TYPE_P (type))
3602 REAL_VALUE_TYPE max;
3603 if (HONOR_INFINITIES (type))
3604 real_inf (&max);
3605 else
3606 real_maxval (&max, 0, TYPE_MODE (type));
3607 return build_real (type, max);
3609 else if (POINTER_TYPE_P (type))
3611 wide_int max
3612 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3613 return wide_int_to_tree (type, max);
3615 else
3617 gcc_assert (INTEGRAL_TYPE_P (type));
3618 return TYPE_MAX_VALUE (type);
3621 default:
3622 gcc_unreachable ();
3626 /* Construct the initialization value for reduction CLAUSE. */
3628 tree
3629 omp_reduction_init (tree clause, tree type)
3631 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3632 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3635 /* Return alignment to be assumed for var in CLAUSE, which should be
3636 OMP_CLAUSE_ALIGNED. */
3638 static tree
3639 omp_clause_aligned_alignment (tree clause)
3641 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3642 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3644 /* Otherwise return implementation defined alignment. */
3645 unsigned int al = 1;
3646 opt_scalar_mode mode_iter;
3647 auto_vector_sizes sizes;
3648 targetm.vectorize.autovectorize_vector_sizes (&sizes, true);
3649 poly_uint64 vs = 0;
3650 for (unsigned int i = 0; i < sizes.length (); ++i)
3651 vs = ordered_max (vs, sizes[i]);
3652 static enum mode_class classes[]
3653 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3654 for (int i = 0; i < 4; i += 2)
3655 /* The for loop above dictates that we only walk through scalar classes. */
3656 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3658 scalar_mode mode = mode_iter.require ();
3659 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3660 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3661 continue;
3662 while (maybe_ne (vs, 0U)
3663 && known_lt (GET_MODE_SIZE (vmode), vs)
3664 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3665 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3667 tree type = lang_hooks.types.type_for_mode (mode, 1);
3668 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3669 continue;
3670 poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3671 GET_MODE_SIZE (mode));
3672 type = build_vector_type (type, nelts);
3673 if (TYPE_MODE (type) != vmode)
3674 continue;
3675 if (TYPE_ALIGN_UNIT (type) > al)
3676 al = TYPE_ALIGN_UNIT (type);
3678 return build_int_cst (integer_type_node, al);
3682 /* This structure is part of the interface between lower_rec_simd_input_clauses
3683 and lower_rec_input_clauses. */
3685 struct omplow_simd_context {
3686 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3687 tree idx;
3688 tree lane;
3689 tree lastlane;
3690 vec<tree, va_heap> simt_eargs;
3691 gimple_seq simt_dlist;
3692 poly_uint64_pod max_vf;
3693 bool is_simt;
3696 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3697 privatization. */
3699 static bool
3700 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3701 omplow_simd_context *sctx, tree &ivar,
3702 tree &lvar, tree *rvar = NULL,
3703 tree *rvar2 = NULL)
3705 if (known_eq (sctx->max_vf, 0U))
3707 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3708 if (maybe_gt (sctx->max_vf, 1U))
3710 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3711 OMP_CLAUSE_SAFELEN);
3712 if (c)
3714 poly_uint64 safe_len;
3715 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3716 || maybe_lt (safe_len, 1U))
3717 sctx->max_vf = 1;
3718 else
3719 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3722 if (maybe_gt (sctx->max_vf, 1U))
3724 sctx->idx = create_tmp_var (unsigned_type_node);
3725 sctx->lane = create_tmp_var (unsigned_type_node);
3728 if (known_eq (sctx->max_vf, 1U))
3729 return false;
3731 if (sctx->is_simt)
3733 if (is_gimple_reg (new_var))
3735 ivar = lvar = new_var;
3736 return true;
3738 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3739 ivar = lvar = create_tmp_var (type);
3740 TREE_ADDRESSABLE (ivar) = 1;
3741 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3742 NULL, DECL_ATTRIBUTES (ivar));
3743 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3744 tree clobber = build_constructor (type, NULL);
3745 TREE_THIS_VOLATILE (clobber) = 1;
3746 gimple *g = gimple_build_assign (ivar, clobber);
3747 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3749 else
3751 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3752 tree avar = create_tmp_var_raw (atype);
3753 if (TREE_ADDRESSABLE (new_var))
3754 TREE_ADDRESSABLE (avar) = 1;
3755 DECL_ATTRIBUTES (avar)
3756 = tree_cons (get_identifier ("omp simd array"), NULL,
3757 DECL_ATTRIBUTES (avar));
3758 gimple_add_tmp_var (avar);
3759 tree iavar = avar;
3760 if (rvar)
3762 /* For inscan reductions, create another array temporary,
3763 which will hold the reduced value. */
3764 iavar = create_tmp_var_raw (atype);
3765 if (TREE_ADDRESSABLE (new_var))
3766 TREE_ADDRESSABLE (iavar) = 1;
3767 DECL_ATTRIBUTES (iavar)
3768 = tree_cons (get_identifier ("omp simd array"), NULL,
3769 tree_cons (get_identifier ("omp simd inscan"), NULL,
3770 DECL_ATTRIBUTES (iavar)));
3771 gimple_add_tmp_var (iavar);
3772 ctx->cb.decl_map->put (avar, iavar);
3773 if (sctx->lastlane == NULL_TREE)
3774 sctx->lastlane = create_tmp_var (unsigned_type_node);
3775 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
3776 sctx->lastlane, NULL_TREE, NULL_TREE);
3777 TREE_THIS_NOTRAP (*rvar) = 1;
3779 if (ctx->scan_exclusive)
3781 /* And for exclusive scan yet another one, which will
3782 hold the value during the scan phase. */
3783 tree savar = create_tmp_var_raw (atype);
3784 if (TREE_ADDRESSABLE (new_var))
3785 TREE_ADDRESSABLE (savar) = 1;
3786 DECL_ATTRIBUTES (savar)
3787 = tree_cons (get_identifier ("omp simd array"), NULL,
3788 tree_cons (get_identifier ("omp simd inscan "
3789 "exclusive"), NULL,
3790 DECL_ATTRIBUTES (savar)));
3791 gimple_add_tmp_var (savar);
3792 ctx->cb.decl_map->put (iavar, savar);
3793 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
3794 sctx->idx, NULL_TREE, NULL_TREE);
3795 TREE_THIS_NOTRAP (*rvar2) = 1;
3798 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
3799 NULL_TREE, NULL_TREE);
3800 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3801 NULL_TREE, NULL_TREE);
3802 TREE_THIS_NOTRAP (ivar) = 1;
3803 TREE_THIS_NOTRAP (lvar) = 1;
3805 if (DECL_P (new_var))
3807 SET_DECL_VALUE_EXPR (new_var, lvar);
3808 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3810 return true;
3813 /* Helper function of lower_rec_input_clauses. For a reference
3814 in simd reduction, add an underlying variable it will reference. */
3816 static void
3817 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3819 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3820 if (TREE_CONSTANT (z))
3822 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3823 get_name (new_vard));
3824 gimple_add_tmp_var (z);
3825 TREE_ADDRESSABLE (z) = 1;
3826 z = build_fold_addr_expr_loc (loc, z);
3827 gimplify_assign (new_vard, z, ilist);
3831 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
3832 code to emit (type) (tskred_temp[idx]). */
3834 static tree
3835 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
3836 unsigned idx)
3838 unsigned HOST_WIDE_INT sz
3839 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
3840 tree r = build2 (MEM_REF, pointer_sized_int_node,
3841 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
3842 idx * sz));
3843 tree v = create_tmp_var (pointer_sized_int_node);
3844 gimple *g = gimple_build_assign (v, r);
3845 gimple_seq_add_stmt (ilist, g);
3846 if (!useless_type_conversion_p (type, pointer_sized_int_node))
3848 v = create_tmp_var (type);
3849 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
3850 gimple_seq_add_stmt (ilist, g);
3852 return v;
3855 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3856 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3857 private variables. Initialization statements go in ILIST, while calls
3858 to destructors go in DLIST. */
3860 static void
3861 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3862 omp_context *ctx, struct omp_for_data *fd)
3864 tree c, copyin_seq, x, ptr;
3865 bool copyin_by_ref = false;
3866 bool lastprivate_firstprivate = false;
3867 bool reduction_omp_orig_ref = false;
3868 int pass;
3869 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3870 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3871 omplow_simd_context sctx = omplow_simd_context ();
3872 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3873 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3874 gimple_seq llist[4] = { };
3875 tree nonconst_simd_if = NULL_TREE;
3877 copyin_seq = NULL;
3878 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3880 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3881 with data sharing clauses referencing variable sized vars. That
3882 is unnecessarily hard to support and very unlikely to result in
3883 vectorized code anyway. */
3884 if (is_simd)
3885 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3886 switch (OMP_CLAUSE_CODE (c))
3888 case OMP_CLAUSE_LINEAR:
3889 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3890 sctx.max_vf = 1;
3891 /* FALLTHRU */
3892 case OMP_CLAUSE_PRIVATE:
3893 case OMP_CLAUSE_FIRSTPRIVATE:
3894 case OMP_CLAUSE_LASTPRIVATE:
3895 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3896 sctx.max_vf = 1;
3897 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
3899 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
3900 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
3901 sctx.max_vf = 1;
3903 break;
3904 case OMP_CLAUSE_REDUCTION:
3905 case OMP_CLAUSE_IN_REDUCTION:
3906 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3907 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3908 sctx.max_vf = 1;
3909 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
3911 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
3912 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
3913 sctx.max_vf = 1;
3915 break;
3916 case OMP_CLAUSE_IF:
3917 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
3918 sctx.max_vf = 1;
3919 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
3920 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
3921 break;
3922 case OMP_CLAUSE_SIMDLEN:
3923 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
3924 sctx.max_vf = 1;
3925 break;
3926 case OMP_CLAUSE__CONDTEMP_:
3927 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
3928 if (sctx.is_simt)
3929 sctx.max_vf = 1;
3930 break;
3931 default:
3932 continue;
3935 /* Add a placeholder for simduid. */
3936 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
3937 sctx.simt_eargs.safe_push (NULL_TREE);
3939 unsigned task_reduction_cnt = 0;
3940 unsigned task_reduction_cntorig = 0;
3941 unsigned task_reduction_cnt_full = 0;
3942 unsigned task_reduction_cntorig_full = 0;
3943 unsigned task_reduction_other_cnt = 0;
3944 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
3945 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
3946 /* Do all the fixed sized types in the first pass, and the variable sized
3947 types in the second pass. This makes sure that the scalar arguments to
3948 the variable sized types are processed before we use them in the
3949 variable sized operations. For task reductions we use 4 passes, in the
3950 first two we ignore them, in the third one gather arguments for
3951 GOMP_task_reduction_remap call and in the last pass actually handle
3952 the task reductions. */
3953 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
3954 ? 4 : 2); ++pass)
3956 if (pass == 2 && task_reduction_cnt)
3958 tskred_atype
3959 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
3960 + task_reduction_cntorig);
3961 tskred_avar = create_tmp_var_raw (tskred_atype);
3962 gimple_add_tmp_var (tskred_avar);
3963 TREE_ADDRESSABLE (tskred_avar) = 1;
3964 task_reduction_cnt_full = task_reduction_cnt;
3965 task_reduction_cntorig_full = task_reduction_cntorig;
3967 else if (pass == 3 && task_reduction_cnt)
3969 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
3970 gimple *g
3971 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
3972 size_int (task_reduction_cntorig),
3973 build_fold_addr_expr (tskred_avar));
3974 gimple_seq_add_stmt (ilist, g);
3976 if (pass == 3 && task_reduction_other_cnt)
3978 /* For reduction clauses, build
3979 tskred_base = (void *) tskred_temp[2]
3980 + omp_get_thread_num () * tskred_temp[1]
3981 or if tskred_temp[1] is known to be constant, that constant
3982 directly. This is the start of the private reduction copy block
3983 for the current thread. */
3984 tree v = create_tmp_var (integer_type_node);
3985 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
3986 gimple *g = gimple_build_call (x, 0);
3987 gimple_call_set_lhs (g, v);
3988 gimple_seq_add_stmt (ilist, g);
3989 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
3990 tskred_temp = OMP_CLAUSE_DECL (c);
3991 if (is_taskreg_ctx (ctx))
3992 tskred_temp = lookup_decl (tskred_temp, ctx);
3993 tree v2 = create_tmp_var (sizetype);
3994 g = gimple_build_assign (v2, NOP_EXPR, v);
3995 gimple_seq_add_stmt (ilist, g);
3996 if (ctx->task_reductions[0])
3997 v = fold_convert (sizetype, ctx->task_reductions[0]);
3998 else
3999 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4000 tree v3 = create_tmp_var (sizetype);
4001 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4002 gimple_seq_add_stmt (ilist, g);
4003 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4004 tskred_base = create_tmp_var (ptr_type_node);
4005 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4006 gimple_seq_add_stmt (ilist, g);
4008 task_reduction_cnt = 0;
4009 task_reduction_cntorig = 0;
4010 task_reduction_other_cnt = 0;
4011 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4013 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4014 tree var, new_var;
4015 bool by_ref;
4016 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4017 bool task_reduction_p = false;
4018 bool task_reduction_needs_orig_p = false;
4019 tree cond = NULL_TREE;
4021 switch (c_kind)
4023 case OMP_CLAUSE_PRIVATE:
4024 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4025 continue;
4026 break;
4027 case OMP_CLAUSE_SHARED:
4028 /* Ignore shared directives in teams construct inside
4029 of target construct. */
4030 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4031 && !is_host_teams_ctx (ctx))
4032 continue;
4033 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4035 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4036 || is_global_var (OMP_CLAUSE_DECL (c)));
4037 continue;
4039 case OMP_CLAUSE_FIRSTPRIVATE:
4040 case OMP_CLAUSE_COPYIN:
4041 break;
4042 case OMP_CLAUSE_LINEAR:
4043 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4044 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4045 lastprivate_firstprivate = true;
4046 break;
4047 case OMP_CLAUSE_REDUCTION:
4048 case OMP_CLAUSE_IN_REDUCTION:
4049 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
4051 task_reduction_p = true;
4052 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4054 task_reduction_other_cnt++;
4055 if (pass == 2)
4056 continue;
4058 else
4059 task_reduction_cnt++;
4060 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4062 var = OMP_CLAUSE_DECL (c);
4063 /* If var is a global variable that isn't privatized
4064 in outer contexts, we don't need to look up the
4065 original address, it is always the address of the
4066 global variable itself. */
4067 if (!DECL_P (var)
4068 || omp_is_reference (var)
4069 || !is_global_var
4070 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4072 task_reduction_needs_orig_p = true;
4073 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4074 task_reduction_cntorig++;
4078 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4079 reduction_omp_orig_ref = true;
4080 break;
4081 case OMP_CLAUSE__REDUCTEMP_:
4082 if (!is_taskreg_ctx (ctx))
4083 continue;
4084 /* FALLTHRU */
4085 case OMP_CLAUSE__LOOPTEMP_:
4086 /* Handle _looptemp_/_reductemp_ clauses only on
4087 parallel/task. */
4088 if (fd)
4089 continue;
4090 break;
4091 case OMP_CLAUSE_LASTPRIVATE:
4092 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4094 lastprivate_firstprivate = true;
4095 if (pass != 0 || is_taskloop_ctx (ctx))
4096 continue;
4098 /* Even without corresponding firstprivate, if
4099 decl is Fortran allocatable, it needs outer var
4100 reference. */
4101 else if (pass == 0
4102 && lang_hooks.decls.omp_private_outer_ref
4103 (OMP_CLAUSE_DECL (c)))
4104 lastprivate_firstprivate = true;
4105 break;
4106 case OMP_CLAUSE_ALIGNED:
4107 if (pass != 1)
4108 continue;
4109 var = OMP_CLAUSE_DECL (c);
4110 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4111 && !is_global_var (var))
4113 new_var = maybe_lookup_decl (var, ctx);
4114 if (new_var == NULL_TREE)
4115 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4116 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4117 tree alarg = omp_clause_aligned_alignment (c);
4118 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4119 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4120 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4121 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4122 gimplify_and_add (x, ilist);
4124 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4125 && is_global_var (var))
4127 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4128 new_var = lookup_decl (var, ctx);
4129 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4130 t = build_fold_addr_expr_loc (clause_loc, t);
4131 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4132 tree alarg = omp_clause_aligned_alignment (c);
4133 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4134 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4135 t = fold_convert_loc (clause_loc, ptype, t);
4136 x = create_tmp_var (ptype);
4137 t = build2 (MODIFY_EXPR, ptype, x, t);
4138 gimplify_and_add (t, ilist);
4139 t = build_simple_mem_ref_loc (clause_loc, x);
4140 SET_DECL_VALUE_EXPR (new_var, t);
4141 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4143 continue;
4144 case OMP_CLAUSE__CONDTEMP_:
4145 if (is_parallel_ctx (ctx)
4146 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
4147 break;
4148 continue;
4149 default:
4150 continue;
4153 if (task_reduction_p != (pass >= 2))
4154 continue;
4156 new_var = var = OMP_CLAUSE_DECL (c);
4157 if ((c_kind == OMP_CLAUSE_REDUCTION
4158 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4159 && TREE_CODE (var) == MEM_REF)
4161 var = TREE_OPERAND (var, 0);
4162 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4163 var = TREE_OPERAND (var, 0);
4164 if (TREE_CODE (var) == INDIRECT_REF
4165 || TREE_CODE (var) == ADDR_EXPR)
4166 var = TREE_OPERAND (var, 0);
4167 if (is_variable_sized (var))
4169 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4170 var = DECL_VALUE_EXPR (var);
4171 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4172 var = TREE_OPERAND (var, 0);
4173 gcc_assert (DECL_P (var));
4175 new_var = var;
4177 if (c_kind != OMP_CLAUSE_COPYIN)
4178 new_var = lookup_decl (var, ctx);
4180 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4182 if (pass != 0)
4183 continue;
4185 /* C/C++ array section reductions. */
4186 else if ((c_kind == OMP_CLAUSE_REDUCTION
4187 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4188 && var != OMP_CLAUSE_DECL (c))
4190 if (pass == 0)
4191 continue;
4193 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4194 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4196 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4198 tree b = TREE_OPERAND (orig_var, 1);
4199 b = maybe_lookup_decl (b, ctx);
4200 if (b == NULL)
4202 b = TREE_OPERAND (orig_var, 1);
4203 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4205 if (integer_zerop (bias))
4206 bias = b;
4207 else
4209 bias = fold_convert_loc (clause_loc,
4210 TREE_TYPE (b), bias);
4211 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4212 TREE_TYPE (b), b, bias);
4214 orig_var = TREE_OPERAND (orig_var, 0);
4216 if (pass == 2)
4218 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4219 if (is_global_var (out)
4220 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4221 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4222 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4223 != POINTER_TYPE)))
4224 x = var;
4225 else
4227 bool by_ref = use_pointer_for_field (var, NULL);
4228 x = build_receiver_ref (var, by_ref, ctx);
4229 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4230 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4231 == POINTER_TYPE))
4232 x = build_fold_addr_expr (x);
4234 if (TREE_CODE (orig_var) == INDIRECT_REF)
4235 x = build_simple_mem_ref (x);
4236 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4238 if (var == TREE_OPERAND (orig_var, 0))
4239 x = build_fold_addr_expr (x);
4241 bias = fold_convert (sizetype, bias);
4242 x = fold_convert (ptr_type_node, x);
4243 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4244 TREE_TYPE (x), x, bias);
4245 unsigned cnt = task_reduction_cnt - 1;
4246 if (!task_reduction_needs_orig_p)
4247 cnt += (task_reduction_cntorig_full
4248 - task_reduction_cntorig);
4249 else
4250 cnt = task_reduction_cntorig - 1;
4251 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4252 size_int (cnt), NULL_TREE, NULL_TREE);
4253 gimplify_assign (r, x, ilist);
4254 continue;
4257 if (TREE_CODE (orig_var) == INDIRECT_REF
4258 || TREE_CODE (orig_var) == ADDR_EXPR)
4259 orig_var = TREE_OPERAND (orig_var, 0);
4260 tree d = OMP_CLAUSE_DECL (c);
4261 tree type = TREE_TYPE (d);
4262 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4263 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4264 const char *name = get_name (orig_var);
4265 if (pass == 3)
4267 tree xv = create_tmp_var (ptr_type_node);
4268 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4270 unsigned cnt = task_reduction_cnt - 1;
4271 if (!task_reduction_needs_orig_p)
4272 cnt += (task_reduction_cntorig_full
4273 - task_reduction_cntorig);
4274 else
4275 cnt = task_reduction_cntorig - 1;
4276 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4277 size_int (cnt), NULL_TREE, NULL_TREE);
4279 gimple *g = gimple_build_assign (xv, x);
4280 gimple_seq_add_stmt (ilist, g);
4282 else
4284 unsigned int idx = *ctx->task_reduction_map->get (c);
4285 tree off;
4286 if (ctx->task_reductions[1 + idx])
4287 off = fold_convert (sizetype,
4288 ctx->task_reductions[1 + idx]);
4289 else
4290 off = task_reduction_read (ilist, tskred_temp, sizetype,
4291 7 + 3 * idx + 1);
4292 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4293 tskred_base, off);
4294 gimple_seq_add_stmt (ilist, g);
4296 x = fold_convert (build_pointer_type (boolean_type_node),
4297 xv);
4298 if (TREE_CONSTANT (v))
4299 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4300 TYPE_SIZE_UNIT (type));
4301 else
4303 tree t = maybe_lookup_decl (v, ctx);
4304 if (t)
4305 v = t;
4306 else
4307 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4308 gimplify_expr (&v, ilist, NULL, is_gimple_val,
4309 fb_rvalue);
4310 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4311 TREE_TYPE (v), v,
4312 build_int_cst (TREE_TYPE (v), 1));
4313 t = fold_build2_loc (clause_loc, MULT_EXPR,
4314 TREE_TYPE (v), t,
4315 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4316 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4318 cond = create_tmp_var (TREE_TYPE (x));
4319 gimplify_assign (cond, x, ilist);
4320 x = xv;
4322 else if (TREE_CONSTANT (v))
4324 x = create_tmp_var_raw (type, name);
4325 gimple_add_tmp_var (x);
4326 TREE_ADDRESSABLE (x) = 1;
4327 x = build_fold_addr_expr_loc (clause_loc, x);
4329 else
4331 tree atmp
4332 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4333 tree t = maybe_lookup_decl (v, ctx);
4334 if (t)
4335 v = t;
4336 else
4337 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4338 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4339 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4340 TREE_TYPE (v), v,
4341 build_int_cst (TREE_TYPE (v), 1));
4342 t = fold_build2_loc (clause_loc, MULT_EXPR,
4343 TREE_TYPE (v), t,
4344 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4345 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4346 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4349 tree ptype = build_pointer_type (TREE_TYPE (type));
4350 x = fold_convert_loc (clause_loc, ptype, x);
4351 tree y = create_tmp_var (ptype, name);
4352 gimplify_assign (y, x, ilist);
4353 x = y;
4354 tree yb = y;
4356 if (!integer_zerop (bias))
4358 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4359 bias);
4360 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4362 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4363 pointer_sized_int_node, yb, bias);
4364 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
4365 yb = create_tmp_var (ptype, name);
4366 gimplify_assign (yb, x, ilist);
4367 x = yb;
4370 d = TREE_OPERAND (d, 0);
4371 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4372 d = TREE_OPERAND (d, 0);
4373 if (TREE_CODE (d) == ADDR_EXPR)
4375 if (orig_var != var)
4377 gcc_assert (is_variable_sized (orig_var));
4378 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4380 gimplify_assign (new_var, x, ilist);
4381 tree new_orig_var = lookup_decl (orig_var, ctx);
4382 tree t = build_fold_indirect_ref (new_var);
4383 DECL_IGNORED_P (new_var) = 0;
4384 TREE_THIS_NOTRAP (t) = 1;
4385 SET_DECL_VALUE_EXPR (new_orig_var, t);
4386 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4388 else
4390 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4391 build_int_cst (ptype, 0));
4392 SET_DECL_VALUE_EXPR (new_var, x);
4393 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4396 else
4398 gcc_assert (orig_var == var);
4399 if (TREE_CODE (d) == INDIRECT_REF)
4401 x = create_tmp_var (ptype, name);
4402 TREE_ADDRESSABLE (x) = 1;
4403 gimplify_assign (x, yb, ilist);
4404 x = build_fold_addr_expr_loc (clause_loc, x);
4406 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4407 gimplify_assign (new_var, x, ilist);
4409 /* GOMP_taskgroup_reduction_register memsets the whole
4410 array to zero. If the initializer is zero, we don't
4411 need to initialize it again, just mark it as ever
4412 used unconditionally, i.e. cond = true. */
4413 if (cond
4414 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4415 && initializer_zerop (omp_reduction_init (c,
4416 TREE_TYPE (type))))
4418 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4419 boolean_true_node);
4420 gimple_seq_add_stmt (ilist, g);
4421 continue;
4423 tree end = create_artificial_label (UNKNOWN_LOCATION);
4424 if (cond)
4426 gimple *g;
4427 if (!is_parallel_ctx (ctx))
4429 tree condv = create_tmp_var (boolean_type_node);
4430 g = gimple_build_assign (condv,
4431 build_simple_mem_ref (cond));
4432 gimple_seq_add_stmt (ilist, g);
4433 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4434 g = gimple_build_cond (NE_EXPR, condv,
4435 boolean_false_node, end, lab1);
4436 gimple_seq_add_stmt (ilist, g);
4437 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4439 g = gimple_build_assign (build_simple_mem_ref (cond),
4440 boolean_true_node);
4441 gimple_seq_add_stmt (ilist, g);
4444 tree y1 = create_tmp_var (ptype);
4445 gimplify_assign (y1, y, ilist);
4446 tree i2 = NULL_TREE, y2 = NULL_TREE;
4447 tree body2 = NULL_TREE, end2 = NULL_TREE;
4448 tree y3 = NULL_TREE, y4 = NULL_TREE;
4449 if (task_reduction_needs_orig_p)
4451 y3 = create_tmp_var (ptype);
4452 tree ref;
4453 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4454 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4455 size_int (task_reduction_cnt_full
4456 + task_reduction_cntorig - 1),
4457 NULL_TREE, NULL_TREE);
4458 else
4460 unsigned int idx = *ctx->task_reduction_map->get (c);
4461 ref = task_reduction_read (ilist, tskred_temp, ptype,
4462 7 + 3 * idx);
4464 gimplify_assign (y3, ref, ilist);
4466 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4468 if (pass != 3)
4470 y2 = create_tmp_var (ptype);
4471 gimplify_assign (y2, y, ilist);
4473 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4475 tree ref = build_outer_var_ref (var, ctx);
4476 /* For ref build_outer_var_ref already performs this. */
4477 if (TREE_CODE (d) == INDIRECT_REF)
4478 gcc_assert (omp_is_reference (var));
4479 else if (TREE_CODE (d) == ADDR_EXPR)
4480 ref = build_fold_addr_expr (ref);
4481 else if (omp_is_reference (var))
4482 ref = build_fold_addr_expr (ref);
4483 ref = fold_convert_loc (clause_loc, ptype, ref);
4484 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4485 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4487 y3 = create_tmp_var (ptype);
4488 gimplify_assign (y3, unshare_expr (ref), ilist);
4490 if (is_simd)
4492 y4 = create_tmp_var (ptype);
4493 gimplify_assign (y4, ref, dlist);
4497 tree i = create_tmp_var (TREE_TYPE (v));
4498 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
4499 tree body = create_artificial_label (UNKNOWN_LOCATION);
4500 gimple_seq_add_stmt (ilist, gimple_build_label (body));
4501 if (y2)
4503 i2 = create_tmp_var (TREE_TYPE (v));
4504 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
4505 body2 = create_artificial_label (UNKNOWN_LOCATION);
4506 end2 = create_artificial_label (UNKNOWN_LOCATION);
4507 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
4509 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4511 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4512 tree decl_placeholder
4513 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
4514 SET_DECL_VALUE_EXPR (decl_placeholder,
4515 build_simple_mem_ref (y1));
4516 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4517 SET_DECL_VALUE_EXPR (placeholder,
4518 y3 ? build_simple_mem_ref (y3)
4519 : error_mark_node);
4520 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4521 x = lang_hooks.decls.omp_clause_default_ctor
4522 (c, build_simple_mem_ref (y1),
4523 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4524 if (x)
4525 gimplify_and_add (x, ilist);
4526 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4528 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4529 lower_omp (&tseq, ctx);
4530 gimple_seq_add_seq (ilist, tseq);
4532 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4533 if (is_simd)
4535 SET_DECL_VALUE_EXPR (decl_placeholder,
4536 build_simple_mem_ref (y2));
4537 SET_DECL_VALUE_EXPR (placeholder,
4538 build_simple_mem_ref (y4));
4539 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4540 lower_omp (&tseq, ctx);
4541 gimple_seq_add_seq (dlist, tseq);
4542 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4544 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4545 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
4546 if (y2)
4548 x = lang_hooks.decls.omp_clause_dtor
4549 (c, build_simple_mem_ref (y2));
4550 if (x)
4551 gimplify_and_add (x, dlist);
4554 else
4556 x = omp_reduction_init (c, TREE_TYPE (type));
4557 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4559 /* reduction(-:var) sums up the partial results, so it
4560 acts identically to reduction(+:var). */
4561 if (code == MINUS_EXPR)
4562 code = PLUS_EXPR;
4564 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4565 if (is_simd)
4567 x = build2 (code, TREE_TYPE (type),
4568 build_simple_mem_ref (y4),
4569 build_simple_mem_ref (y2));
4570 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4573 gimple *g
4574 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4575 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4576 gimple_seq_add_stmt (ilist, g);
4577 if (y3)
4579 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4580 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4581 gimple_seq_add_stmt (ilist, g);
4583 g = gimple_build_assign (i, PLUS_EXPR, i,
4584 build_int_cst (TREE_TYPE (i), 1));
4585 gimple_seq_add_stmt (ilist, g);
4586 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4587 gimple_seq_add_stmt (ilist, g);
4588 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4589 if (y2)
4591 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4592 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4593 gimple_seq_add_stmt (dlist, g);
4594 if (y4)
4596 g = gimple_build_assign
4597 (y4, POINTER_PLUS_EXPR, y4,
4598 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4599 gimple_seq_add_stmt (dlist, g);
4601 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4602 build_int_cst (TREE_TYPE (i2), 1));
4603 gimple_seq_add_stmt (dlist, g);
4604 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4605 gimple_seq_add_stmt (dlist, g);
4606 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4608 continue;
4610 else if (pass == 2)
4612 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4613 x = var;
4614 else
4616 bool by_ref = use_pointer_for_field (var, ctx);
4617 x = build_receiver_ref (var, by_ref, ctx);
4619 if (!omp_is_reference (var))
4620 x = build_fold_addr_expr (x);
4621 x = fold_convert (ptr_type_node, x);
4622 unsigned cnt = task_reduction_cnt - 1;
4623 if (!task_reduction_needs_orig_p)
4624 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
4625 else
4626 cnt = task_reduction_cntorig - 1;
4627 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4628 size_int (cnt), NULL_TREE, NULL_TREE);
4629 gimplify_assign (r, x, ilist);
4630 continue;
4632 else if (pass == 3)
4634 tree type = TREE_TYPE (new_var);
4635 if (!omp_is_reference (var))
4636 type = build_pointer_type (type);
4637 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4639 unsigned cnt = task_reduction_cnt - 1;
4640 if (!task_reduction_needs_orig_p)
4641 cnt += (task_reduction_cntorig_full
4642 - task_reduction_cntorig);
4643 else
4644 cnt = task_reduction_cntorig - 1;
4645 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4646 size_int (cnt), NULL_TREE, NULL_TREE);
4648 else
4650 unsigned int idx = *ctx->task_reduction_map->get (c);
4651 tree off;
4652 if (ctx->task_reductions[1 + idx])
4653 off = fold_convert (sizetype,
4654 ctx->task_reductions[1 + idx]);
4655 else
4656 off = task_reduction_read (ilist, tskred_temp, sizetype,
4657 7 + 3 * idx + 1);
4658 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
4659 tskred_base, off);
4661 x = fold_convert (type, x);
4662 tree t;
4663 if (omp_is_reference (var))
4665 gimplify_assign (new_var, x, ilist);
4666 t = new_var;
4667 new_var = build_simple_mem_ref (new_var);
4669 else
4671 t = create_tmp_var (type);
4672 gimplify_assign (t, x, ilist);
4673 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
4674 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4676 t = fold_convert (build_pointer_type (boolean_type_node), t);
4677 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
4678 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4679 cond = create_tmp_var (TREE_TYPE (t));
4680 gimplify_assign (cond, t, ilist);
4682 else if (is_variable_sized (var))
4684 /* For variable sized types, we need to allocate the
4685 actual storage here. Call alloca and store the
4686 result in the pointer decl that we created elsewhere. */
4687 if (pass == 0)
4688 continue;
4690 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4692 gcall *stmt;
4693 tree tmp, atmp;
4695 ptr = DECL_VALUE_EXPR (new_var);
4696 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4697 ptr = TREE_OPERAND (ptr, 0);
4698 gcc_assert (DECL_P (ptr));
4699 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4701 /* void *tmp = __builtin_alloca */
4702 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4703 stmt = gimple_build_call (atmp, 2, x,
4704 size_int (DECL_ALIGN (var)));
4705 tmp = create_tmp_var_raw (ptr_type_node);
4706 gimple_add_tmp_var (tmp);
4707 gimple_call_set_lhs (stmt, tmp);
4709 gimple_seq_add_stmt (ilist, stmt);
4711 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4712 gimplify_assign (ptr, x, ilist);
4715 else if (omp_is_reference (var)
4716 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
4717 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
4719 /* For references that are being privatized for Fortran,
4720 allocate new backing storage for the new pointer
4721 variable. This allows us to avoid changing all the
4722 code that expects a pointer to something that expects
4723 a direct variable. */
4724 if (pass == 0)
4725 continue;
4727 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4728 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4730 x = build_receiver_ref (var, false, ctx);
4731 x = build_fold_addr_expr_loc (clause_loc, x);
4733 else if (TREE_CONSTANT (x))
4735 /* For reduction in SIMD loop, defer adding the
4736 initialization of the reference, because if we decide
4737 to use SIMD array for it, the initilization could cause
4738 expansion ICE. Ditto for other privatization clauses. */
4739 if (is_simd)
4740 x = NULL_TREE;
4741 else
4743 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4744 get_name (var));
4745 gimple_add_tmp_var (x);
4746 TREE_ADDRESSABLE (x) = 1;
4747 x = build_fold_addr_expr_loc (clause_loc, x);
4750 else
4752 tree atmp
4753 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4754 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4755 tree al = size_int (TYPE_ALIGN (rtype));
4756 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4759 if (x)
4761 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4762 gimplify_assign (new_var, x, ilist);
4765 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4767 else if ((c_kind == OMP_CLAUSE_REDUCTION
4768 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4769 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4771 if (pass == 0)
4772 continue;
4774 else if (pass != 0)
4775 continue;
4777 switch (OMP_CLAUSE_CODE (c))
4779 case OMP_CLAUSE_SHARED:
4780 /* Ignore shared directives in teams construct inside
4781 target construct. */
4782 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4783 && !is_host_teams_ctx (ctx))
4784 continue;
4785 /* Shared global vars are just accessed directly. */
4786 if (is_global_var (new_var))
4787 break;
4788 /* For taskloop firstprivate/lastprivate, represented
4789 as firstprivate and shared clause on the task, new_var
4790 is the firstprivate var. */
4791 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4792 break;
4793 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4794 needs to be delayed until after fixup_child_record_type so
4795 that we get the correct type during the dereference. */
4796 by_ref = use_pointer_for_field (var, ctx);
4797 x = build_receiver_ref (var, by_ref, ctx);
4798 SET_DECL_VALUE_EXPR (new_var, x);
4799 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4801 /* ??? If VAR is not passed by reference, and the variable
4802 hasn't been initialized yet, then we'll get a warning for
4803 the store into the omp_data_s structure. Ideally, we'd be
4804 able to notice this and not store anything at all, but
4805 we're generating code too early. Suppress the warning. */
4806 if (!by_ref)
4807 TREE_NO_WARNING (var) = 1;
4808 break;
4810 case OMP_CLAUSE__CONDTEMP_:
4811 if (is_parallel_ctx (ctx))
4813 x = build_receiver_ref (var, false, ctx);
4814 SET_DECL_VALUE_EXPR (new_var, x);
4815 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4817 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
4819 x = build_zero_cst (TREE_TYPE (var));
4820 goto do_private;
4822 break;
4824 case OMP_CLAUSE_LASTPRIVATE:
4825 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4826 break;
4827 /* FALLTHRU */
4829 case OMP_CLAUSE_PRIVATE:
4830 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4831 x = build_outer_var_ref (var, ctx);
4832 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4834 if (is_task_ctx (ctx))
4835 x = build_receiver_ref (var, false, ctx);
4836 else
4837 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4839 else
4840 x = NULL;
4841 do_private:
4842 tree nx;
4843 nx = lang_hooks.decls.omp_clause_default_ctor
4844 (c, unshare_expr (new_var), x);
4845 if (is_simd)
4847 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4848 if ((TREE_ADDRESSABLE (new_var) || nx || y
4849 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4850 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
4851 || omp_is_reference (var))
4852 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4853 ivar, lvar))
4855 if (omp_is_reference (var))
4857 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4858 tree new_vard = TREE_OPERAND (new_var, 0);
4859 gcc_assert (DECL_P (new_vard));
4860 SET_DECL_VALUE_EXPR (new_vard,
4861 build_fold_addr_expr (lvar));
4862 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4865 if (nx)
4866 x = lang_hooks.decls.omp_clause_default_ctor
4867 (c, unshare_expr (ivar), x);
4868 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
4870 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
4871 unshare_expr (ivar), x);
4872 nx = x;
4874 if (nx && x)
4875 gimplify_and_add (x, &llist[0]);
4876 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4877 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
4879 tree v = new_var;
4880 if (!DECL_P (v))
4882 gcc_assert (TREE_CODE (v) == MEM_REF);
4883 v = TREE_OPERAND (v, 0);
4884 gcc_assert (DECL_P (v));
4886 v = *ctx->lastprivate_conditional_map->get (v);
4887 tree t = create_tmp_var (TREE_TYPE (v));
4888 tree z = build_zero_cst (TREE_TYPE (v));
4889 tree orig_v
4890 = build_outer_var_ref (var, ctx,
4891 OMP_CLAUSE_LASTPRIVATE);
4892 gimple_seq_add_stmt (dlist,
4893 gimple_build_assign (t, z));
4894 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
4895 tree civar = DECL_VALUE_EXPR (v);
4896 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
4897 civar = unshare_expr (civar);
4898 TREE_OPERAND (civar, 1) = sctx.idx;
4899 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
4900 unshare_expr (civar));
4901 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
4902 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
4903 orig_v, unshare_expr (ivar)));
4904 tree cond = build2 (LT_EXPR, boolean_type_node, t,
4905 civar);
4906 x = build3 (COND_EXPR, void_type_node, cond, x,
4907 void_node);
4908 gimple_seq tseq = NULL;
4909 gimplify_and_add (x, &tseq);
4910 if (ctx->outer)
4911 lower_omp (&tseq, ctx->outer);
4912 gimple_seq_add_seq (&llist[1], tseq);
4914 if (y)
4916 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4917 if (y)
4918 gimplify_and_add (y, &llist[1]);
4920 break;
4922 if (omp_is_reference (var))
4924 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4925 tree new_vard = TREE_OPERAND (new_var, 0);
4926 gcc_assert (DECL_P (new_vard));
4927 tree type = TREE_TYPE (TREE_TYPE (new_vard));
4928 x = TYPE_SIZE_UNIT (type);
4929 if (TREE_CONSTANT (x))
4931 x = create_tmp_var_raw (type, get_name (var));
4932 gimple_add_tmp_var (x);
4933 TREE_ADDRESSABLE (x) = 1;
4934 x = build_fold_addr_expr_loc (clause_loc, x);
4935 x = fold_convert_loc (clause_loc,
4936 TREE_TYPE (new_vard), x);
4937 gimplify_assign (new_vard, x, ilist);
4941 if (nx)
4942 gimplify_and_add (nx, ilist);
4943 /* FALLTHRU */
4945 do_dtor:
4946 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4947 if (x)
4948 gimplify_and_add (x, dlist);
4949 break;
4951 case OMP_CLAUSE_LINEAR:
4952 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4953 goto do_firstprivate;
4954 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4955 x = NULL;
4956 else
4957 x = build_outer_var_ref (var, ctx);
4958 goto do_private;
4960 case OMP_CLAUSE_FIRSTPRIVATE:
4961 if (is_task_ctx (ctx))
4963 if ((omp_is_reference (var)
4964 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
4965 || is_variable_sized (var))
4966 goto do_dtor;
4967 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4968 ctx))
4969 || use_pointer_for_field (var, NULL))
4971 x = build_receiver_ref (var, false, ctx);
4972 SET_DECL_VALUE_EXPR (new_var, x);
4973 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4974 goto do_dtor;
4977 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
4978 && omp_is_reference (var))
4980 x = build_outer_var_ref (var, ctx);
4981 gcc_assert (TREE_CODE (x) == MEM_REF
4982 && integer_zerop (TREE_OPERAND (x, 1)));
4983 x = TREE_OPERAND (x, 0);
4984 x = lang_hooks.decls.omp_clause_copy_ctor
4985 (c, unshare_expr (new_var), x);
4986 gimplify_and_add (x, ilist);
4987 goto do_dtor;
4989 do_firstprivate:
4990 x = build_outer_var_ref (var, ctx);
4991 if (is_simd)
4993 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4994 && gimple_omp_for_combined_into_p (ctx->stmt))
4996 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4997 tree stept = TREE_TYPE (t);
4998 tree ct = omp_find_clause (clauses,
4999 OMP_CLAUSE__LOOPTEMP_);
5000 gcc_assert (ct);
5001 tree l = OMP_CLAUSE_DECL (ct);
5002 tree n1 = fd->loop.n1;
5003 tree step = fd->loop.step;
5004 tree itype = TREE_TYPE (l);
5005 if (POINTER_TYPE_P (itype))
5006 itype = signed_type_for (itype);
5007 l = fold_build2 (MINUS_EXPR, itype, l, n1);
5008 if (TYPE_UNSIGNED (itype)
5009 && fd->loop.cond_code == GT_EXPR)
5010 l = fold_build2 (TRUNC_DIV_EXPR, itype,
5011 fold_build1 (NEGATE_EXPR, itype, l),
5012 fold_build1 (NEGATE_EXPR,
5013 itype, step));
5014 else
5015 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
5016 t = fold_build2 (MULT_EXPR, stept,
5017 fold_convert (stept, l), t);
5019 if (OMP_CLAUSE_LINEAR_ARRAY (c))
5021 if (omp_is_reference (var))
5023 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5024 tree new_vard = TREE_OPERAND (new_var, 0);
5025 gcc_assert (DECL_P (new_vard));
5026 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5027 nx = TYPE_SIZE_UNIT (type);
5028 if (TREE_CONSTANT (nx))
5030 nx = create_tmp_var_raw (type,
5031 get_name (var));
5032 gimple_add_tmp_var (nx);
5033 TREE_ADDRESSABLE (nx) = 1;
5034 nx = build_fold_addr_expr_loc (clause_loc,
5035 nx);
5036 nx = fold_convert_loc (clause_loc,
5037 TREE_TYPE (new_vard),
5038 nx);
5039 gimplify_assign (new_vard, nx, ilist);
5043 x = lang_hooks.decls.omp_clause_linear_ctor
5044 (c, new_var, x, t);
5045 gimplify_and_add (x, ilist);
5046 goto do_dtor;
5049 if (POINTER_TYPE_P (TREE_TYPE (x)))
5050 x = fold_build2 (POINTER_PLUS_EXPR,
5051 TREE_TYPE (x), x, t);
5052 else
5053 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
5056 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
5057 || TREE_ADDRESSABLE (new_var)
5058 || omp_is_reference (var))
5059 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5060 ivar, lvar))
5062 if (omp_is_reference (var))
5064 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5065 tree new_vard = TREE_OPERAND (new_var, 0);
5066 gcc_assert (DECL_P (new_vard));
5067 SET_DECL_VALUE_EXPR (new_vard,
5068 build_fold_addr_expr (lvar));
5069 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5071 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
5073 tree iv = create_tmp_var (TREE_TYPE (new_var));
5074 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
5075 gimplify_and_add (x, ilist);
5076 gimple_stmt_iterator gsi
5077 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5078 gassign *g
5079 = gimple_build_assign (unshare_expr (lvar), iv);
5080 gsi_insert_before_without_update (&gsi, g,
5081 GSI_SAME_STMT);
5082 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5083 enum tree_code code = PLUS_EXPR;
5084 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
5085 code = POINTER_PLUS_EXPR;
5086 g = gimple_build_assign (iv, code, iv, t);
5087 gsi_insert_before_without_update (&gsi, g,
5088 GSI_SAME_STMT);
5089 break;
5091 x = lang_hooks.decls.omp_clause_copy_ctor
5092 (c, unshare_expr (ivar), x);
5093 gimplify_and_add (x, &llist[0]);
5094 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5095 if (x)
5096 gimplify_and_add (x, &llist[1]);
5097 break;
5099 if (omp_is_reference (var))
5101 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5102 tree new_vard = TREE_OPERAND (new_var, 0);
5103 gcc_assert (DECL_P (new_vard));
5104 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5105 nx = TYPE_SIZE_UNIT (type);
5106 if (TREE_CONSTANT (nx))
5108 nx = create_tmp_var_raw (type, get_name (var));
5109 gimple_add_tmp_var (nx);
5110 TREE_ADDRESSABLE (nx) = 1;
5111 nx = build_fold_addr_expr_loc (clause_loc, nx);
5112 nx = fold_convert_loc (clause_loc,
5113 TREE_TYPE (new_vard), nx);
5114 gimplify_assign (new_vard, nx, ilist);
5118 x = lang_hooks.decls.omp_clause_copy_ctor
5119 (c, unshare_expr (new_var), x);
5120 gimplify_and_add (x, ilist);
5121 goto do_dtor;
5123 case OMP_CLAUSE__LOOPTEMP_:
5124 case OMP_CLAUSE__REDUCTEMP_:
5125 gcc_assert (is_taskreg_ctx (ctx));
5126 x = build_outer_var_ref (var, ctx);
5127 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5128 gimplify_and_add (x, ilist);
5129 break;
5131 case OMP_CLAUSE_COPYIN:
5132 by_ref = use_pointer_for_field (var, NULL);
5133 x = build_receiver_ref (var, by_ref, ctx);
5134 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
5135 append_to_statement_list (x, &copyin_seq);
5136 copyin_by_ref |= by_ref;
5137 break;
5139 case OMP_CLAUSE_REDUCTION:
5140 case OMP_CLAUSE_IN_REDUCTION:
5141 /* OpenACC reductions are initialized using the
5142 GOACC_REDUCTION internal function. */
5143 if (is_gimple_omp_oacc (ctx->stmt))
5144 break;
5145 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5147 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5148 gimple *tseq;
5149 tree ptype = TREE_TYPE (placeholder);
5150 if (cond)
5152 x = error_mark_node;
5153 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
5154 && !task_reduction_needs_orig_p)
5155 x = var;
5156 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5158 tree pptype = build_pointer_type (ptype);
5159 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5160 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5161 size_int (task_reduction_cnt_full
5162 + task_reduction_cntorig - 1),
5163 NULL_TREE, NULL_TREE);
5164 else
5166 unsigned int idx
5167 = *ctx->task_reduction_map->get (c);
5168 x = task_reduction_read (ilist, tskred_temp,
5169 pptype, 7 + 3 * idx);
5171 x = fold_convert (pptype, x);
5172 x = build_simple_mem_ref (x);
5175 else
5177 x = build_outer_var_ref (var, ctx);
5179 if (omp_is_reference (var)
5180 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
5181 x = build_fold_addr_expr_loc (clause_loc, x);
5183 SET_DECL_VALUE_EXPR (placeholder, x);
5184 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5185 tree new_vard = new_var;
5186 if (omp_is_reference (var))
5188 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5189 new_vard = TREE_OPERAND (new_var, 0);
5190 gcc_assert (DECL_P (new_vard));
5192 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5193 if (is_simd
5194 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5195 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5196 rvarp = &rvar;
5197 if (is_simd
5198 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5199 ivar, lvar, rvarp,
5200 &rvar2))
5202 if (new_vard == new_var)
5204 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
5205 SET_DECL_VALUE_EXPR (new_var, ivar);
5207 else
5209 SET_DECL_VALUE_EXPR (new_vard,
5210 build_fold_addr_expr (ivar));
5211 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5213 x = lang_hooks.decls.omp_clause_default_ctor
5214 (c, unshare_expr (ivar),
5215 build_outer_var_ref (var, ctx));
5216 if (rvarp)
5218 if (x)
5220 gimplify_and_add (x, &llist[0]);
5222 tree ivar2 = unshare_expr (lvar);
5223 TREE_OPERAND (ivar2, 1) = sctx.idx;
5224 x = lang_hooks.decls.omp_clause_default_ctor
5225 (c, ivar2, build_outer_var_ref (var, ctx));
5226 gimplify_and_add (x, &llist[0]);
5228 if (rvar2)
5230 x = lang_hooks.decls.omp_clause_default_ctor
5231 (c, unshare_expr (rvar2),
5232 build_outer_var_ref (var, ctx));
5233 gimplify_and_add (x, &llist[0]);
5236 /* For types that need construction, add another
5237 private var which will be default constructed
5238 and optionally initialized with
5239 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5240 loop we want to assign this value instead of
5241 constructing and destructing it in each
5242 iteration. */
5243 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
5244 gimple_add_tmp_var (nv);
5245 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
5246 ? rvar2
5247 : ivar, 0),
5248 nv);
5249 x = lang_hooks.decls.omp_clause_default_ctor
5250 (c, nv, build_outer_var_ref (var, ctx));
5251 gimplify_and_add (x, ilist);
5253 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5255 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5256 x = DECL_VALUE_EXPR (new_vard);
5257 tree vexpr = nv;
5258 if (new_vard != new_var)
5259 vexpr = build_fold_addr_expr (nv);
5260 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5261 lower_omp (&tseq, ctx);
5262 SET_DECL_VALUE_EXPR (new_vard, x);
5263 gimple_seq_add_seq (ilist, tseq);
5264 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5267 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5268 if (x)
5269 gimplify_and_add (x, dlist);
5272 tree ref = build_outer_var_ref (var, ctx);
5273 x = unshare_expr (ivar);
5274 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5275 ref);
5276 gimplify_and_add (x, &llist[0]);
5278 ref = build_outer_var_ref (var, ctx);
5279 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
5280 rvar);
5281 gimplify_and_add (x, &llist[3]);
5283 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5284 if (new_vard == new_var)
5285 SET_DECL_VALUE_EXPR (new_var, lvar);
5286 else
5287 SET_DECL_VALUE_EXPR (new_vard,
5288 build_fold_addr_expr (lvar));
5290 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5291 if (x)
5292 gimplify_and_add (x, &llist[1]);
5294 tree ivar2 = unshare_expr (lvar);
5295 TREE_OPERAND (ivar2, 1) = sctx.idx;
5296 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
5297 if (x)
5298 gimplify_and_add (x, &llist[1]);
5300 if (rvar2)
5302 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
5303 if (x)
5304 gimplify_and_add (x, &llist[1]);
5306 break;
5308 if (x)
5309 gimplify_and_add (x, &llist[0]);
5310 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5312 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5313 lower_omp (&tseq, ctx);
5314 gimple_seq_add_seq (&llist[0], tseq);
5316 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5317 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5318 lower_omp (&tseq, ctx);
5319 gimple_seq_add_seq (&llist[1], tseq);
5320 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5321 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5322 if (new_vard == new_var)
5323 SET_DECL_VALUE_EXPR (new_var, lvar);
5324 else
5325 SET_DECL_VALUE_EXPR (new_vard,
5326 build_fold_addr_expr (lvar));
5327 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5328 if (x)
5329 gimplify_and_add (x, &llist[1]);
5330 break;
5332 /* If this is a reference to constant size reduction var
5333 with placeholder, we haven't emitted the initializer
5334 for it because it is undesirable if SIMD arrays are used.
5335 But if they aren't used, we need to emit the deferred
5336 initialization now. */
5337 else if (omp_is_reference (var) && is_simd)
5338 handle_simd_reference (clause_loc, new_vard, ilist);
5340 tree lab2 = NULL_TREE;
5341 if (cond)
5343 gimple *g;
5344 if (!is_parallel_ctx (ctx))
5346 tree condv = create_tmp_var (boolean_type_node);
5347 tree m = build_simple_mem_ref (cond);
5348 g = gimple_build_assign (condv, m);
5349 gimple_seq_add_stmt (ilist, g);
5350 tree lab1
5351 = create_artificial_label (UNKNOWN_LOCATION);
5352 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5353 g = gimple_build_cond (NE_EXPR, condv,
5354 boolean_false_node,
5355 lab2, lab1);
5356 gimple_seq_add_stmt (ilist, g);
5357 gimple_seq_add_stmt (ilist,
5358 gimple_build_label (lab1));
5360 g = gimple_build_assign (build_simple_mem_ref (cond),
5361 boolean_true_node);
5362 gimple_seq_add_stmt (ilist, g);
5364 x = lang_hooks.decls.omp_clause_default_ctor
5365 (c, unshare_expr (new_var),
5366 cond ? NULL_TREE
5367 : build_outer_var_ref (var, ctx));
5368 if (x)
5369 gimplify_and_add (x, ilist);
5371 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5372 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5374 if (x || (!is_simd
5375 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
5377 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
5378 gimple_add_tmp_var (nv);
5379 ctx->cb.decl_map->put (new_vard, nv);
5380 x = lang_hooks.decls.omp_clause_default_ctor
5381 (c, nv, build_outer_var_ref (var, ctx));
5382 if (x)
5383 gimplify_and_add (x, ilist);
5384 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5386 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5387 tree vexpr = nv;
5388 if (new_vard != new_var)
5389 vexpr = build_fold_addr_expr (nv);
5390 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5391 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5392 lower_omp (&tseq, ctx);
5393 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
5394 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
5395 gimple_seq_add_seq (ilist, tseq);
5397 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5398 if (is_simd && ctx->scan_exclusive)
5400 tree nv2
5401 = create_tmp_var_raw (TREE_TYPE (new_var));
5402 gimple_add_tmp_var (nv2);
5403 ctx->cb.decl_map->put (nv, nv2);
5404 x = lang_hooks.decls.omp_clause_default_ctor
5405 (c, nv2, build_outer_var_ref (var, ctx));
5406 gimplify_and_add (x, ilist);
5407 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5408 if (x)
5409 gimplify_and_add (x, dlist);
5411 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5412 if (x)
5413 gimplify_and_add (x, dlist);
5415 else if (is_simd
5416 && ctx->scan_exclusive
5417 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
5419 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
5420 gimple_add_tmp_var (nv2);
5421 ctx->cb.decl_map->put (new_vard, nv2);
5422 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5423 if (x)
5424 gimplify_and_add (x, dlist);
5426 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5427 goto do_dtor;
5430 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5432 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5433 lower_omp (&tseq, ctx);
5434 gimple_seq_add_seq (ilist, tseq);
5436 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5437 if (is_simd)
5439 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5440 lower_omp (&tseq, ctx);
5441 gimple_seq_add_seq (dlist, tseq);
5442 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5444 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5445 if (cond)
5447 if (lab2)
5448 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5449 break;
5451 goto do_dtor;
5453 else
5455 x = omp_reduction_init (c, TREE_TYPE (new_var));
5456 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
5457 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5459 if (cond)
5461 gimple *g;
5462 tree lab2 = NULL_TREE;
5463 /* GOMP_taskgroup_reduction_register memsets the whole
5464 array to zero. If the initializer is zero, we don't
5465 need to initialize it again, just mark it as ever
5466 used unconditionally, i.e. cond = true. */
5467 if (initializer_zerop (x))
5469 g = gimple_build_assign (build_simple_mem_ref (cond),
5470 boolean_true_node);
5471 gimple_seq_add_stmt (ilist, g);
5472 break;
5475 /* Otherwise, emit
5476 if (!cond) { cond = true; new_var = x; } */
5477 if (!is_parallel_ctx (ctx))
5479 tree condv = create_tmp_var (boolean_type_node);
5480 tree m = build_simple_mem_ref (cond);
5481 g = gimple_build_assign (condv, m);
5482 gimple_seq_add_stmt (ilist, g);
5483 tree lab1
5484 = create_artificial_label (UNKNOWN_LOCATION);
5485 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5486 g = gimple_build_cond (NE_EXPR, condv,
5487 boolean_false_node,
5488 lab2, lab1);
5489 gimple_seq_add_stmt (ilist, g);
5490 gimple_seq_add_stmt (ilist,
5491 gimple_build_label (lab1));
5493 g = gimple_build_assign (build_simple_mem_ref (cond),
5494 boolean_true_node);
5495 gimple_seq_add_stmt (ilist, g);
5496 gimplify_assign (new_var, x, ilist);
5497 if (lab2)
5498 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5499 break;
5502 /* reduction(-:var) sums up the partial results, so it
5503 acts identically to reduction(+:var). */
5504 if (code == MINUS_EXPR)
5505 code = PLUS_EXPR;
5507 tree new_vard = new_var;
5508 if (is_simd && omp_is_reference (var))
5510 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5511 new_vard = TREE_OPERAND (new_var, 0);
5512 gcc_assert (DECL_P (new_vard));
5514 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5515 if (is_simd
5516 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5517 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5518 rvarp = &rvar;
5519 if (is_simd
5520 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5521 ivar, lvar, rvarp,
5522 &rvar2))
5524 if (new_vard != new_var)
5526 SET_DECL_VALUE_EXPR (new_vard,
5527 build_fold_addr_expr (lvar));
5528 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5531 tree ref = build_outer_var_ref (var, ctx);
5533 if (rvarp)
5535 gimplify_assign (ivar, ref, &llist[0]);
5536 ref = build_outer_var_ref (var, ctx);
5537 gimplify_assign (ref, rvar, &llist[3]);
5538 break;
5541 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
5543 if (sctx.is_simt)
5545 if (!simt_lane)
5546 simt_lane = create_tmp_var (unsigned_type_node);
5547 x = build_call_expr_internal_loc
5548 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
5549 TREE_TYPE (ivar), 2, ivar, simt_lane);
5550 x = build2 (code, TREE_TYPE (ivar), ivar, x);
5551 gimplify_assign (ivar, x, &llist[2]);
5553 x = build2 (code, TREE_TYPE (ref), ref, ivar);
5554 ref = build_outer_var_ref (var, ctx);
5555 gimplify_assign (ref, x, &llist[1]);
5558 else
5560 if (omp_is_reference (var) && is_simd)
5561 handle_simd_reference (clause_loc, new_vard, ilist);
5562 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5563 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5564 break;
5565 gimplify_assign (new_var, x, ilist);
5566 if (is_simd)
5568 tree ref = build_outer_var_ref (var, ctx);
5570 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5571 ref = build_outer_var_ref (var, ctx);
5572 gimplify_assign (ref, x, dlist);
5576 break;
5578 default:
5579 gcc_unreachable ();
5583 if (tskred_avar)
5585 tree clobber = build_constructor (TREE_TYPE (tskred_avar), NULL);
5586 TREE_THIS_VOLATILE (clobber) = 1;
5587 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
5590 if (known_eq (sctx.max_vf, 1U))
5592 sctx.is_simt = false;
5593 if (ctx->lastprivate_conditional_map)
5595 if (gimple_omp_for_combined_into_p (ctx->stmt))
5597 /* Signal to lower_omp_1 that it should use parent context. */
5598 ctx->combined_into_simd_safelen0 = true;
5599 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5600 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5601 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5603 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
5604 tree *v
5605 = ctx->lastprivate_conditional_map->get (o);
5606 tree po = lookup_decl (OMP_CLAUSE_DECL (c), ctx->outer);
5607 tree *pv
5608 = ctx->outer->lastprivate_conditional_map->get (po);
5609 *v = *pv;
5612 else
5614 /* When not vectorized, treat lastprivate(conditional:) like
5615 normal lastprivate, as there will be just one simd lane
5616 writing the privatized variable. */
5617 delete ctx->lastprivate_conditional_map;
5618 ctx->lastprivate_conditional_map = NULL;
5623 if (nonconst_simd_if)
5625 if (sctx.lane == NULL_TREE)
5627 sctx.idx = create_tmp_var (unsigned_type_node);
5628 sctx.lane = create_tmp_var (unsigned_type_node);
5630 /* FIXME: For now. */
5631 sctx.is_simt = false;
5634 if (sctx.lane || sctx.is_simt)
5636 uid = create_tmp_var (ptr_type_node, "simduid");
5637 /* Don't want uninit warnings on simduid, it is always uninitialized,
5638 but we use it not for the value, but for the DECL_UID only. */
5639 TREE_NO_WARNING (uid) = 1;
5640 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
5641 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
5642 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5643 gimple_omp_for_set_clauses (ctx->stmt, c);
5645 /* Emit calls denoting privatized variables and initializing a pointer to
5646 structure that holds private variables as fields after ompdevlow pass. */
5647 if (sctx.is_simt)
5649 sctx.simt_eargs[0] = uid;
5650 gimple *g
5651 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
5652 gimple_call_set_lhs (g, uid);
5653 gimple_seq_add_stmt (ilist, g);
5654 sctx.simt_eargs.release ();
5656 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
5657 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
5658 gimple_call_set_lhs (g, simtrec);
5659 gimple_seq_add_stmt (ilist, g);
5661 if (sctx.lane)
5663 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
5664 2 + (nonconst_simd_if != NULL),
5665 uid, integer_zero_node,
5666 nonconst_simd_if);
5667 gimple_call_set_lhs (g, sctx.lane);
5668 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5669 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
5670 g = gimple_build_assign (sctx.lane, INTEGER_CST,
5671 build_int_cst (unsigned_type_node, 0));
5672 gimple_seq_add_stmt (ilist, g);
5673 if (sctx.lastlane)
5675 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
5676 2, uid, sctx.lane);
5677 gimple_call_set_lhs (g, sctx.lastlane);
5678 gimple_seq_add_stmt (dlist, g);
5679 gimple_seq_add_seq (dlist, llist[3]);
5681 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
5682 if (llist[2])
5684 tree simt_vf = create_tmp_var (unsigned_type_node);
5685 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
5686 gimple_call_set_lhs (g, simt_vf);
5687 gimple_seq_add_stmt (dlist, g);
5689 tree t = build_int_cst (unsigned_type_node, 1);
5690 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
5691 gimple_seq_add_stmt (dlist, g);
5693 t = build_int_cst (unsigned_type_node, 0);
5694 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
5695 gimple_seq_add_stmt (dlist, g);
5697 tree body = create_artificial_label (UNKNOWN_LOCATION);
5698 tree header = create_artificial_label (UNKNOWN_LOCATION);
5699 tree end = create_artificial_label (UNKNOWN_LOCATION);
5700 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
5701 gimple_seq_add_stmt (dlist, gimple_build_label (body));
5703 gimple_seq_add_seq (dlist, llist[2]);
5705 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
5706 gimple_seq_add_stmt (dlist, g);
5708 gimple_seq_add_stmt (dlist, gimple_build_label (header));
5709 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
5710 gimple_seq_add_stmt (dlist, g);
5712 gimple_seq_add_stmt (dlist, gimple_build_label (end));
5714 for (int i = 0; i < 2; i++)
5715 if (llist[i])
5717 tree vf = create_tmp_var (unsigned_type_node);
5718 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
5719 gimple_call_set_lhs (g, vf);
5720 gimple_seq *seq = i == 0 ? ilist : dlist;
5721 gimple_seq_add_stmt (seq, g);
5722 tree t = build_int_cst (unsigned_type_node, 0);
5723 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
5724 gimple_seq_add_stmt (seq, g);
5725 tree body = create_artificial_label (UNKNOWN_LOCATION);
5726 tree header = create_artificial_label (UNKNOWN_LOCATION);
5727 tree end = create_artificial_label (UNKNOWN_LOCATION);
5728 gimple_seq_add_stmt (seq, gimple_build_goto (header));
5729 gimple_seq_add_stmt (seq, gimple_build_label (body));
5730 gimple_seq_add_seq (seq, llist[i]);
5731 t = build_int_cst (unsigned_type_node, 1);
5732 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
5733 gimple_seq_add_stmt (seq, g);
5734 gimple_seq_add_stmt (seq, gimple_build_label (header));
5735 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
5736 gimple_seq_add_stmt (seq, g);
5737 gimple_seq_add_stmt (seq, gimple_build_label (end));
5740 if (sctx.is_simt)
5742 gimple_seq_add_seq (dlist, sctx.simt_dlist);
5743 gimple *g
5744 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
5745 gimple_seq_add_stmt (dlist, g);
5748 /* The copyin sequence is not to be executed by the main thread, since
5749 that would result in self-copies. Perhaps not visible to scalars,
5750 but it certainly is to C++ operator=. */
5751 if (copyin_seq)
5753 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
5755 x = build2 (NE_EXPR, boolean_type_node, x,
5756 build_int_cst (TREE_TYPE (x), 0));
5757 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
5758 gimplify_and_add (x, ilist);
5761 /* If any copyin variable is passed by reference, we must ensure the
5762 master thread doesn't modify it before it is copied over in all
5763 threads. Similarly for variables in both firstprivate and
5764 lastprivate clauses we need to ensure the lastprivate copying
5765 happens after firstprivate copying in all threads. And similarly
5766 for UDRs if initializer expression refers to omp_orig. */
5767 if (copyin_by_ref || lastprivate_firstprivate
5768 || (reduction_omp_orig_ref
5769 && !ctx->scan_inclusive
5770 && !ctx->scan_exclusive))
5772 /* Don't add any barrier for #pragma omp simd or
5773 #pragma omp distribute. */
5774 if (!is_task_ctx (ctx)
5775 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
5776 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
5777 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
5780 /* If max_vf is non-zero, then we can use only a vectorization factor
5781 up to the max_vf we chose. So stick it into the safelen clause. */
5782 if (maybe_ne (sctx.max_vf, 0U))
5784 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
5785 OMP_CLAUSE_SAFELEN);
5786 poly_uint64 safe_len;
5787 if (c == NULL_TREE
5788 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
5789 && maybe_gt (safe_len, sctx.max_vf)))
5791 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
5792 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
5793 sctx.max_vf);
5794 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5795 gimple_omp_for_set_clauses (ctx->stmt, c);
5800 /* Create temporary variables for lastprivate(conditional:) implementation
5801 in context CTX with CLAUSES. */
5803 static void
5804 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
5806 tree iter_type = NULL_TREE;
5807 tree cond_ptr = NULL_TREE;
5808 tree iter_var = NULL_TREE;
5809 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5810 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
5811 tree next = *clauses;
5812 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
5813 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5814 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5816 if (is_simd)
5818 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
5819 gcc_assert (cc);
5820 if (iter_type == NULL_TREE)
5822 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
5823 iter_var = create_tmp_var_raw (iter_type);
5824 DECL_CONTEXT (iter_var) = current_function_decl;
5825 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
5826 DECL_CHAIN (iter_var) = ctx->block_vars;
5827 ctx->block_vars = iter_var;
5828 tree c3
5829 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
5830 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
5831 OMP_CLAUSE_DECL (c3) = iter_var;
5832 OMP_CLAUSE_CHAIN (c3) = *clauses;
5833 *clauses = c3;
5834 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
5836 next = OMP_CLAUSE_CHAIN (cc);
5837 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
5838 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
5839 ctx->lastprivate_conditional_map->put (o, v);
5840 continue;
5842 if (iter_type == NULL)
5844 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
5846 struct omp_for_data fd;
5847 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
5848 NULL);
5849 iter_type = unsigned_type_for (fd.iter_type);
5851 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
5852 iter_type = unsigned_type_node;
5853 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
5854 if (c2)
5856 cond_ptr
5857 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
5858 OMP_CLAUSE_DECL (c2) = cond_ptr;
5860 else
5862 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
5863 DECL_CONTEXT (cond_ptr) = current_function_decl;
5864 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
5865 DECL_CHAIN (cond_ptr) = ctx->block_vars;
5866 ctx->block_vars = cond_ptr;
5867 c2 = build_omp_clause (UNKNOWN_LOCATION,
5868 OMP_CLAUSE__CONDTEMP_);
5869 OMP_CLAUSE_DECL (c2) = cond_ptr;
5870 OMP_CLAUSE_CHAIN (c2) = *clauses;
5871 *clauses = c2;
5873 iter_var = create_tmp_var_raw (iter_type);
5874 DECL_CONTEXT (iter_var) = current_function_decl;
5875 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
5876 DECL_CHAIN (iter_var) = ctx->block_vars;
5877 ctx->block_vars = iter_var;
5878 tree c3
5879 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
5880 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
5881 OMP_CLAUSE_DECL (c3) = iter_var;
5882 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
5883 OMP_CLAUSE_CHAIN (c2) = c3;
5884 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
5886 tree v = create_tmp_var_raw (iter_type);
5887 DECL_CONTEXT (v) = current_function_decl;
5888 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
5889 DECL_CHAIN (v) = ctx->block_vars;
5890 ctx->block_vars = v;
5891 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
5892 ctx->lastprivate_conditional_map->put (o, v);
5897 /* Generate code to implement the LASTPRIVATE clauses. This is used for
5898 both parallel and workshare constructs. PREDICATE may be NULL if it's
5899 always true. BODY_P is the sequence to insert early initialization
5900 if needed, STMT_LIST is where the non-conditional lastprivate handling
5901 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
5902 section. */
5904 static void
5905 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
5906 gimple_seq *stmt_list, gimple_seq *cstmt_list,
5907 omp_context *ctx)
5909 tree x, c, label = NULL, orig_clauses = clauses;
5910 bool par_clauses = false;
5911 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
5912 unsigned HOST_WIDE_INT conditional_off = 0;
5914 /* Early exit if there are no lastprivate or linear clauses. */
5915 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
5916 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
5917 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
5918 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
5919 break;
5920 if (clauses == NULL)
5922 /* If this was a workshare clause, see if it had been combined
5923 with its parallel. In that case, look for the clauses on the
5924 parallel statement itself. */
5925 if (is_parallel_ctx (ctx))
5926 return;
5928 ctx = ctx->outer;
5929 if (ctx == NULL || !is_parallel_ctx (ctx))
5930 return;
5932 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
5933 OMP_CLAUSE_LASTPRIVATE);
5934 if (clauses == NULL)
5935 return;
5936 par_clauses = true;
5939 bool maybe_simt = false;
5940 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5941 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5943 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
5944 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
5945 if (simduid)
5946 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
5949 if (predicate)
5951 gcond *stmt;
5952 tree label_true, arm1, arm2;
5953 enum tree_code pred_code = TREE_CODE (predicate);
5955 label = create_artificial_label (UNKNOWN_LOCATION);
5956 label_true = create_artificial_label (UNKNOWN_LOCATION);
5957 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
5959 arm1 = TREE_OPERAND (predicate, 0);
5960 arm2 = TREE_OPERAND (predicate, 1);
5961 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
5962 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
5964 else
5966 arm1 = predicate;
5967 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
5968 arm2 = boolean_false_node;
5969 pred_code = NE_EXPR;
5971 if (maybe_simt)
5973 c = build2 (pred_code, boolean_type_node, arm1, arm2);
5974 c = fold_convert (integer_type_node, c);
5975 simtcond = create_tmp_var (integer_type_node);
5976 gimplify_assign (simtcond, c, stmt_list);
5977 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
5978 1, simtcond);
5979 c = create_tmp_var (integer_type_node);
5980 gimple_call_set_lhs (g, c);
5981 gimple_seq_add_stmt (stmt_list, g);
5982 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
5983 label_true, label);
5985 else
5986 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
5987 gimple_seq_add_stmt (stmt_list, stmt);
5988 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
5991 tree cond_ptr = NULL_TREE;
5992 for (c = clauses; c ;)
5994 tree var, new_var;
5995 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5996 gimple_seq *this_stmt_list = stmt_list;
5997 tree lab2 = NULL_TREE;
5999 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6000 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6001 && ctx->lastprivate_conditional_map
6002 && !ctx->combined_into_simd_safelen0)
6004 gcc_assert (body_p);
6005 if (simduid)
6006 goto next;
6007 if (cond_ptr == NULL_TREE)
6009 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
6010 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
6012 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
6013 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6014 tree v = *ctx->lastprivate_conditional_map->get (o);
6015 gimplify_assign (v, build_zero_cst (type), body_p);
6016 this_stmt_list = cstmt_list;
6017 tree mem;
6018 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
6020 mem = build2 (MEM_REF, type, cond_ptr,
6021 build_int_cst (TREE_TYPE (cond_ptr),
6022 conditional_off));
6023 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
6025 else
6026 mem = build4 (ARRAY_REF, type, cond_ptr,
6027 size_int (conditional_off++), NULL_TREE, NULL_TREE);
6028 tree mem2 = copy_node (mem);
6029 gimple_seq seq = NULL;
6030 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
6031 gimple_seq_add_seq (this_stmt_list, seq);
6032 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
6033 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6034 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
6035 gimple_seq_add_stmt (this_stmt_list, g);
6036 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
6037 gimplify_assign (mem2, v, this_stmt_list);
6040 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6041 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6042 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6044 var = OMP_CLAUSE_DECL (c);
6045 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6046 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6047 && is_taskloop_ctx (ctx))
6049 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
6050 new_var = lookup_decl (var, ctx->outer);
6052 else
6054 new_var = lookup_decl (var, ctx);
6055 /* Avoid uninitialized warnings for lastprivate and
6056 for linear iterators. */
6057 if (predicate
6058 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6059 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
6060 TREE_NO_WARNING (new_var) = 1;
6063 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
6065 tree val = DECL_VALUE_EXPR (new_var);
6066 if (TREE_CODE (val) == ARRAY_REF
6067 && VAR_P (TREE_OPERAND (val, 0))
6068 && lookup_attribute ("omp simd array",
6069 DECL_ATTRIBUTES (TREE_OPERAND (val,
6070 0))))
6072 if (lastlane == NULL)
6074 lastlane = create_tmp_var (unsigned_type_node);
6075 gcall *g
6076 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6077 2, simduid,
6078 TREE_OPERAND (val, 1));
6079 gimple_call_set_lhs (g, lastlane);
6080 gimple_seq_add_stmt (this_stmt_list, g);
6082 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
6083 TREE_OPERAND (val, 0), lastlane,
6084 NULL_TREE, NULL_TREE);
6085 TREE_THIS_NOTRAP (new_var) = 1;
6088 else if (maybe_simt)
6090 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
6091 ? DECL_VALUE_EXPR (new_var)
6092 : new_var);
6093 if (simtlast == NULL)
6095 simtlast = create_tmp_var (unsigned_type_node);
6096 gcall *g = gimple_build_call_internal
6097 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
6098 gimple_call_set_lhs (g, simtlast);
6099 gimple_seq_add_stmt (this_stmt_list, g);
6101 x = build_call_expr_internal_loc
6102 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
6103 TREE_TYPE (val), 2, val, simtlast);
6104 new_var = unshare_expr (new_var);
6105 gimplify_assign (new_var, x, this_stmt_list);
6106 new_var = unshare_expr (new_var);
6109 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6110 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
6112 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
6113 gimple_seq_add_seq (this_stmt_list,
6114 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6115 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
6117 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6118 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
6120 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
6121 gimple_seq_add_seq (this_stmt_list,
6122 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6123 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
6126 x = NULL_TREE;
6127 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6128 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
6130 gcc_checking_assert (is_taskloop_ctx (ctx));
6131 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
6132 ctx->outer->outer);
6133 if (is_global_var (ovar))
6134 x = ovar;
6136 if (!x)
6137 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
6138 if (omp_is_reference (var))
6139 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6140 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
6141 gimplify_and_add (x, this_stmt_list);
6143 if (lab2)
6144 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
6147 next:
6148 c = OMP_CLAUSE_CHAIN (c);
6149 if (c == NULL && !par_clauses)
6151 /* If this was a workshare clause, see if it had been combined
6152 with its parallel. In that case, continue looking for the
6153 clauses also on the parallel statement itself. */
6154 if (is_parallel_ctx (ctx))
6155 break;
6157 ctx = ctx->outer;
6158 if (ctx == NULL || !is_parallel_ctx (ctx))
6159 break;
6161 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6162 OMP_CLAUSE_LASTPRIVATE);
6163 par_clauses = true;
6167 if (label)
6168 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
6171 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6172 (which might be a placeholder). INNER is true if this is an inner
6173 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6174 join markers. Generate the before-loop forking sequence in
6175 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6176 general form of these sequences is
6178 GOACC_REDUCTION_SETUP
6179 GOACC_FORK
6180 GOACC_REDUCTION_INIT
6182 GOACC_REDUCTION_FINI
6183 GOACC_JOIN
6184 GOACC_REDUCTION_TEARDOWN. */
6186 static void
6187 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
6188 gcall *fork, gcall *join, gimple_seq *fork_seq,
6189 gimple_seq *join_seq, omp_context *ctx)
6191 gimple_seq before_fork = NULL;
6192 gimple_seq after_fork = NULL;
6193 gimple_seq before_join = NULL;
6194 gimple_seq after_join = NULL;
6195 tree init_code = NULL_TREE, fini_code = NULL_TREE,
6196 setup_code = NULL_TREE, teardown_code = NULL_TREE;
6197 unsigned offset = 0;
6199 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6200 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
6202 tree orig = OMP_CLAUSE_DECL (c);
6203 tree var = maybe_lookup_decl (orig, ctx);
6204 tree ref_to_res = NULL_TREE;
6205 tree incoming, outgoing, v1, v2, v3;
6206 bool is_private = false;
6208 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
6209 if (rcode == MINUS_EXPR)
6210 rcode = PLUS_EXPR;
6211 else if (rcode == TRUTH_ANDIF_EXPR)
6212 rcode = BIT_AND_EXPR;
6213 else if (rcode == TRUTH_ORIF_EXPR)
6214 rcode = BIT_IOR_EXPR;
6215 tree op = build_int_cst (unsigned_type_node, rcode);
6217 if (!var)
6218 var = orig;
6220 incoming = outgoing = var;
6222 if (!inner)
6224 /* See if an outer construct also reduces this variable. */
6225 omp_context *outer = ctx;
6227 while (omp_context *probe = outer->outer)
6229 enum gimple_code type = gimple_code (probe->stmt);
6230 tree cls;
6232 switch (type)
6234 case GIMPLE_OMP_FOR:
6235 cls = gimple_omp_for_clauses (probe->stmt);
6236 break;
6238 case GIMPLE_OMP_TARGET:
6239 if (gimple_omp_target_kind (probe->stmt)
6240 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
6241 goto do_lookup;
6243 cls = gimple_omp_target_clauses (probe->stmt);
6244 break;
6246 default:
6247 goto do_lookup;
6250 outer = probe;
6251 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
6252 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
6253 && orig == OMP_CLAUSE_DECL (cls))
6255 incoming = outgoing = lookup_decl (orig, probe);
6256 goto has_outer_reduction;
6258 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
6259 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
6260 && orig == OMP_CLAUSE_DECL (cls))
6262 is_private = true;
6263 goto do_lookup;
6267 do_lookup:
6268 /* This is the outermost construct with this reduction,
6269 see if there's a mapping for it. */
6270 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
6271 && maybe_lookup_field (orig, outer) && !is_private)
6273 ref_to_res = build_receiver_ref (orig, false, outer);
6274 if (omp_is_reference (orig))
6275 ref_to_res = build_simple_mem_ref (ref_to_res);
6277 tree type = TREE_TYPE (var);
6278 if (POINTER_TYPE_P (type))
6279 type = TREE_TYPE (type);
6281 outgoing = var;
6282 incoming = omp_reduction_init_op (loc, rcode, type);
6284 else
6286 /* Try to look at enclosing contexts for reduction var,
6287 use original if no mapping found. */
6288 tree t = NULL_TREE;
6289 omp_context *c = ctx->outer;
6290 while (c && !t)
6292 t = maybe_lookup_decl (orig, c);
6293 c = c->outer;
6295 incoming = outgoing = (t ? t : orig);
6298 has_outer_reduction:;
6301 if (!ref_to_res)
6302 ref_to_res = integer_zero_node;
6304 if (omp_is_reference (orig))
6306 tree type = TREE_TYPE (var);
6307 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
6309 if (!inner)
6311 tree x = create_tmp_var (TREE_TYPE (type), id);
6312 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
6315 v1 = create_tmp_var (type, id);
6316 v2 = create_tmp_var (type, id);
6317 v3 = create_tmp_var (type, id);
6319 gimplify_assign (v1, var, fork_seq);
6320 gimplify_assign (v2, var, fork_seq);
6321 gimplify_assign (v3, var, fork_seq);
6323 var = build_simple_mem_ref (var);
6324 v1 = build_simple_mem_ref (v1);
6325 v2 = build_simple_mem_ref (v2);
6326 v3 = build_simple_mem_ref (v3);
6327 outgoing = build_simple_mem_ref (outgoing);
6329 if (!TREE_CONSTANT (incoming))
6330 incoming = build_simple_mem_ref (incoming);
6332 else
6333 v1 = v2 = v3 = var;
6335 /* Determine position in reduction buffer, which may be used
6336 by target. The parser has ensured that this is not a
6337 variable-sized type. */
6338 fixed_size_mode mode
6339 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
6340 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6341 offset = (offset + align - 1) & ~(align - 1);
6342 tree off = build_int_cst (sizetype, offset);
6343 offset += GET_MODE_SIZE (mode);
6345 if (!init_code)
6347 init_code = build_int_cst (integer_type_node,
6348 IFN_GOACC_REDUCTION_INIT);
6349 fini_code = build_int_cst (integer_type_node,
6350 IFN_GOACC_REDUCTION_FINI);
6351 setup_code = build_int_cst (integer_type_node,
6352 IFN_GOACC_REDUCTION_SETUP);
6353 teardown_code = build_int_cst (integer_type_node,
6354 IFN_GOACC_REDUCTION_TEARDOWN);
6357 tree setup_call
6358 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6359 TREE_TYPE (var), 6, setup_code,
6360 unshare_expr (ref_to_res),
6361 incoming, level, op, off);
6362 tree init_call
6363 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6364 TREE_TYPE (var), 6, init_code,
6365 unshare_expr (ref_to_res),
6366 v1, level, op, off);
6367 tree fini_call
6368 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6369 TREE_TYPE (var), 6, fini_code,
6370 unshare_expr (ref_to_res),
6371 v2, level, op, off);
6372 tree teardown_call
6373 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6374 TREE_TYPE (var), 6, teardown_code,
6375 ref_to_res, v3, level, op, off);
6377 gimplify_assign (v1, setup_call, &before_fork);
6378 gimplify_assign (v2, init_call, &after_fork);
6379 gimplify_assign (v3, fini_call, &before_join);
6380 gimplify_assign (outgoing, teardown_call, &after_join);
6383 /* Now stitch things together. */
6384 gimple_seq_add_seq (fork_seq, before_fork);
6385 if (fork)
6386 gimple_seq_add_stmt (fork_seq, fork);
6387 gimple_seq_add_seq (fork_seq, after_fork);
6389 gimple_seq_add_seq (join_seq, before_join);
6390 if (join)
6391 gimple_seq_add_stmt (join_seq, join);
6392 gimple_seq_add_seq (join_seq, after_join);
6395 /* Generate code to implement the REDUCTION clauses, append it
6396 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6397 that should be emitted also inside of the critical section,
6398 in that case clear *CLIST afterwards, otherwise leave it as is
6399 and let the caller emit it itself. */
6401 static void
6402 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
6403 gimple_seq *clist, omp_context *ctx)
6405 gimple_seq sub_seq = NULL;
6406 gimple *stmt;
6407 tree x, c;
6408 int count = 0;
6410 /* OpenACC loop reductions are handled elsewhere. */
6411 if (is_gimple_omp_oacc (ctx->stmt))
6412 return;
6414 /* SIMD reductions are handled in lower_rec_input_clauses. */
6415 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6416 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
6417 return;
6419 /* inscan reductions are handled elsewhere. */
6420 if (ctx->scan_inclusive || ctx->scan_exclusive)
6421 return;
6423 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6424 update in that case, otherwise use a lock. */
6425 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
6426 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6427 && !OMP_CLAUSE_REDUCTION_TASK (c))
6429 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
6430 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6432 /* Never use OMP_ATOMIC for array reductions or UDRs. */
6433 count = -1;
6434 break;
6436 count++;
6439 if (count == 0)
6440 return;
6442 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6444 tree var, ref, new_var, orig_var;
6445 enum tree_code code;
6446 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6448 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6449 || OMP_CLAUSE_REDUCTION_TASK (c))
6450 continue;
6452 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
6453 orig_var = var = OMP_CLAUSE_DECL (c);
6454 if (TREE_CODE (var) == MEM_REF)
6456 var = TREE_OPERAND (var, 0);
6457 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
6458 var = TREE_OPERAND (var, 0);
6459 if (TREE_CODE (var) == ADDR_EXPR)
6460 var = TREE_OPERAND (var, 0);
6461 else
6463 /* If this is a pointer or referenced based array
6464 section, the var could be private in the outer
6465 context e.g. on orphaned loop construct. Pretend this
6466 is private variable's outer reference. */
6467 ccode = OMP_CLAUSE_PRIVATE;
6468 if (TREE_CODE (var) == INDIRECT_REF)
6469 var = TREE_OPERAND (var, 0);
6471 orig_var = var;
6472 if (is_variable_sized (var))
6474 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
6475 var = DECL_VALUE_EXPR (var);
6476 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
6477 var = TREE_OPERAND (var, 0);
6478 gcc_assert (DECL_P (var));
6481 new_var = lookup_decl (var, ctx);
6482 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
6483 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6484 ref = build_outer_var_ref (var, ctx, ccode);
6485 code = OMP_CLAUSE_REDUCTION_CODE (c);
6487 /* reduction(-:var) sums up the partial results, so it acts
6488 identically to reduction(+:var). */
6489 if (code == MINUS_EXPR)
6490 code = PLUS_EXPR;
6492 if (count == 1)
6494 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
6496 addr = save_expr (addr);
6497 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
6498 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
6499 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
6500 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
6501 gimplify_and_add (x, stmt_seqp);
6502 return;
6504 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6506 tree d = OMP_CLAUSE_DECL (c);
6507 tree type = TREE_TYPE (d);
6508 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
6509 tree i = create_tmp_var (TREE_TYPE (v));
6510 tree ptype = build_pointer_type (TREE_TYPE (type));
6511 tree bias = TREE_OPERAND (d, 1);
6512 d = TREE_OPERAND (d, 0);
6513 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
6515 tree b = TREE_OPERAND (d, 1);
6516 b = maybe_lookup_decl (b, ctx);
6517 if (b == NULL)
6519 b = TREE_OPERAND (d, 1);
6520 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
6522 if (integer_zerop (bias))
6523 bias = b;
6524 else
6526 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
6527 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
6528 TREE_TYPE (b), b, bias);
6530 d = TREE_OPERAND (d, 0);
6532 /* For ref build_outer_var_ref already performs this, so
6533 only new_var needs a dereference. */
6534 if (TREE_CODE (d) == INDIRECT_REF)
6536 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6537 gcc_assert (omp_is_reference (var) && var == orig_var);
6539 else if (TREE_CODE (d) == ADDR_EXPR)
6541 if (orig_var == var)
6543 new_var = build_fold_addr_expr (new_var);
6544 ref = build_fold_addr_expr (ref);
6547 else
6549 gcc_assert (orig_var == var);
6550 if (omp_is_reference (var))
6551 ref = build_fold_addr_expr (ref);
6553 if (DECL_P (v))
6555 tree t = maybe_lookup_decl (v, ctx);
6556 if (t)
6557 v = t;
6558 else
6559 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
6560 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
6562 if (!integer_zerop (bias))
6564 bias = fold_convert_loc (clause_loc, sizetype, bias);
6565 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6566 TREE_TYPE (new_var), new_var,
6567 unshare_expr (bias));
6568 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6569 TREE_TYPE (ref), ref, bias);
6571 new_var = fold_convert_loc (clause_loc, ptype, new_var);
6572 ref = fold_convert_loc (clause_loc, ptype, ref);
6573 tree m = create_tmp_var (ptype);
6574 gimplify_assign (m, new_var, stmt_seqp);
6575 new_var = m;
6576 m = create_tmp_var (ptype);
6577 gimplify_assign (m, ref, stmt_seqp);
6578 ref = m;
6579 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
6580 tree body = create_artificial_label (UNKNOWN_LOCATION);
6581 tree end = create_artificial_label (UNKNOWN_LOCATION);
6582 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
6583 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
6584 tree out = build_simple_mem_ref_loc (clause_loc, ref);
6585 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6587 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6588 tree decl_placeholder
6589 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
6590 SET_DECL_VALUE_EXPR (placeholder, out);
6591 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6592 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
6593 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
6594 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6595 gimple_seq_add_seq (&sub_seq,
6596 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6597 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6598 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6599 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
6601 else
6603 x = build2 (code, TREE_TYPE (out), out, priv);
6604 out = unshare_expr (out);
6605 gimplify_assign (out, x, &sub_seq);
6607 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
6608 TYPE_SIZE_UNIT (TREE_TYPE (type)));
6609 gimple_seq_add_stmt (&sub_seq, g);
6610 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
6611 TYPE_SIZE_UNIT (TREE_TYPE (type)));
6612 gimple_seq_add_stmt (&sub_seq, g);
6613 g = gimple_build_assign (i, PLUS_EXPR, i,
6614 build_int_cst (TREE_TYPE (i), 1));
6615 gimple_seq_add_stmt (&sub_seq, g);
6616 g = gimple_build_cond (LE_EXPR, i, v, body, end);
6617 gimple_seq_add_stmt (&sub_seq, g);
6618 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
6620 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6622 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6624 if (omp_is_reference (var)
6625 && !useless_type_conversion_p (TREE_TYPE (placeholder),
6626 TREE_TYPE (ref)))
6627 ref = build_fold_addr_expr_loc (clause_loc, ref);
6628 SET_DECL_VALUE_EXPR (placeholder, ref);
6629 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6630 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6631 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6632 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6633 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6635 else
6637 x = build2 (code, TREE_TYPE (ref), ref, new_var);
6638 ref = build_outer_var_ref (var, ctx);
6639 gimplify_assign (ref, x, &sub_seq);
6643 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
6645 gimple_seq_add_stmt (stmt_seqp, stmt);
6647 gimple_seq_add_seq (stmt_seqp, sub_seq);
6649 if (clist)
6651 gimple_seq_add_seq (stmt_seqp, *clist);
6652 *clist = NULL;
6655 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
6657 gimple_seq_add_stmt (stmt_seqp, stmt);
6661 /* Generate code to implement the COPYPRIVATE clauses. */
6663 static void
6664 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
6665 omp_context *ctx)
6667 tree c;
6669 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6671 tree var, new_var, ref, x;
6672 bool by_ref;
6673 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6675 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
6676 continue;
6678 var = OMP_CLAUSE_DECL (c);
6679 by_ref = use_pointer_for_field (var, NULL);
6681 ref = build_sender_ref (var, ctx);
6682 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
6683 if (by_ref)
6685 x = build_fold_addr_expr_loc (clause_loc, new_var);
6686 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
6688 gimplify_assign (ref, x, slist);
6690 ref = build_receiver_ref (var, false, ctx);
6691 if (by_ref)
6693 ref = fold_convert_loc (clause_loc,
6694 build_pointer_type (TREE_TYPE (new_var)),
6695 ref);
6696 ref = build_fold_indirect_ref_loc (clause_loc, ref);
6698 if (omp_is_reference (var))
6700 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
6701 ref = build_simple_mem_ref_loc (clause_loc, ref);
6702 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6704 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
6705 gimplify_and_add (x, rlist);
6710 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
6711 and REDUCTION from the sender (aka parent) side. */
6713 static void
6714 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
6715 omp_context *ctx)
6717 tree c, t;
6718 int ignored_looptemp = 0;
6719 bool is_taskloop = false;
6721 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
6722 by GOMP_taskloop. */
6723 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
6725 ignored_looptemp = 2;
6726 is_taskloop = true;
6729 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6731 tree val, ref, x, var;
6732 bool by_ref, do_in = false, do_out = false;
6733 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6735 switch (OMP_CLAUSE_CODE (c))
6737 case OMP_CLAUSE_PRIVATE:
6738 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
6739 break;
6740 continue;
6741 case OMP_CLAUSE_FIRSTPRIVATE:
6742 case OMP_CLAUSE_COPYIN:
6743 case OMP_CLAUSE_LASTPRIVATE:
6744 case OMP_CLAUSE_IN_REDUCTION:
6745 case OMP_CLAUSE__REDUCTEMP_:
6746 break;
6747 case OMP_CLAUSE_REDUCTION:
6748 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
6749 continue;
6750 break;
6751 case OMP_CLAUSE_SHARED:
6752 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
6753 break;
6754 continue;
6755 case OMP_CLAUSE__LOOPTEMP_:
6756 if (ignored_looptemp)
6758 ignored_looptemp--;
6759 continue;
6761 break;
6762 default:
6763 continue;
6766 val = OMP_CLAUSE_DECL (c);
6767 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6768 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
6769 && TREE_CODE (val) == MEM_REF)
6771 val = TREE_OPERAND (val, 0);
6772 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
6773 val = TREE_OPERAND (val, 0);
6774 if (TREE_CODE (val) == INDIRECT_REF
6775 || TREE_CODE (val) == ADDR_EXPR)
6776 val = TREE_OPERAND (val, 0);
6777 if (is_variable_sized (val))
6778 continue;
6781 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
6782 outer taskloop region. */
6783 omp_context *ctx_for_o = ctx;
6784 if (is_taskloop
6785 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
6786 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
6787 ctx_for_o = ctx->outer;
6789 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
6791 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
6792 && is_global_var (var)
6793 && (val == OMP_CLAUSE_DECL (c)
6794 || !is_task_ctx (ctx)
6795 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
6796 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
6797 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
6798 != POINTER_TYPE)))))
6799 continue;
6801 t = omp_member_access_dummy_var (var);
6802 if (t)
6804 var = DECL_VALUE_EXPR (var);
6805 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
6806 if (o != t)
6807 var = unshare_and_remap (var, t, o);
6808 else
6809 var = unshare_expr (var);
6812 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
6814 /* Handle taskloop firstprivate/lastprivate, where the
6815 lastprivate on GIMPLE_OMP_TASK is represented as
6816 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
6817 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
6818 x = omp_build_component_ref (ctx->sender_decl, f);
6819 if (use_pointer_for_field (val, ctx))
6820 var = build_fold_addr_expr (var);
6821 gimplify_assign (x, var, ilist);
6822 DECL_ABSTRACT_ORIGIN (f) = NULL;
6823 continue;
6826 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6827 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
6828 || val == OMP_CLAUSE_DECL (c))
6829 && is_variable_sized (val))
6830 continue;
6831 by_ref = use_pointer_for_field (val, NULL);
6833 switch (OMP_CLAUSE_CODE (c))
6835 case OMP_CLAUSE_FIRSTPRIVATE:
6836 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
6837 && !by_ref
6838 && is_task_ctx (ctx))
6839 TREE_NO_WARNING (var) = 1;
6840 do_in = true;
6841 break;
6843 case OMP_CLAUSE_PRIVATE:
6844 case OMP_CLAUSE_COPYIN:
6845 case OMP_CLAUSE__LOOPTEMP_:
6846 case OMP_CLAUSE__REDUCTEMP_:
6847 do_in = true;
6848 break;
6850 case OMP_CLAUSE_LASTPRIVATE:
6851 if (by_ref || omp_is_reference (val))
6853 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
6854 continue;
6855 do_in = true;
6857 else
6859 do_out = true;
6860 if (lang_hooks.decls.omp_private_outer_ref (val))
6861 do_in = true;
6863 break;
6865 case OMP_CLAUSE_REDUCTION:
6866 case OMP_CLAUSE_IN_REDUCTION:
6867 do_in = true;
6868 if (val == OMP_CLAUSE_DECL (c))
6870 if (is_task_ctx (ctx))
6871 by_ref = use_pointer_for_field (val, ctx);
6872 else
6873 do_out = !(by_ref || omp_is_reference (val));
6875 else
6876 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
6877 break;
6879 default:
6880 gcc_unreachable ();
6883 if (do_in)
6885 ref = build_sender_ref (val, ctx);
6886 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
6887 gimplify_assign (ref, x, ilist);
6888 if (is_task_ctx (ctx))
6889 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
6892 if (do_out)
6894 ref = build_sender_ref (val, ctx);
6895 gimplify_assign (var, ref, olist);
6900 /* Generate code to implement SHARED from the sender (aka parent)
6901 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
6902 list things that got automatically shared. */
6904 static void
6905 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
6907 tree var, ovar, nvar, t, f, x, record_type;
6909 if (ctx->record_type == NULL)
6910 return;
6912 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
6913 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
6915 ovar = DECL_ABSTRACT_ORIGIN (f);
6916 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
6917 continue;
6919 nvar = maybe_lookup_decl (ovar, ctx);
6920 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
6921 continue;
6923 /* If CTX is a nested parallel directive. Find the immediately
6924 enclosing parallel or workshare construct that contains a
6925 mapping for OVAR. */
6926 var = lookup_decl_in_outer_ctx (ovar, ctx);
6928 t = omp_member_access_dummy_var (var);
6929 if (t)
6931 var = DECL_VALUE_EXPR (var);
6932 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
6933 if (o != t)
6934 var = unshare_and_remap (var, t, o);
6935 else
6936 var = unshare_expr (var);
6939 if (use_pointer_for_field (ovar, ctx))
6941 x = build_sender_ref (ovar, ctx);
6942 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
6943 && TREE_TYPE (f) == TREE_TYPE (ovar))
6945 gcc_assert (is_parallel_ctx (ctx)
6946 && DECL_ARTIFICIAL (ovar));
6947 /* _condtemp_ clause. */
6948 var = build_constructor (TREE_TYPE (x), NULL);
6950 else
6951 var = build_fold_addr_expr (var);
6952 gimplify_assign (x, var, ilist);
6954 else
6956 x = build_sender_ref (ovar, ctx);
6957 gimplify_assign (x, var, ilist);
6959 if (!TREE_READONLY (var)
6960 /* We don't need to receive a new reference to a result
6961 or parm decl. In fact we may not store to it as we will
6962 invalidate any pending RSO and generate wrong gimple
6963 during inlining. */
6964 && !((TREE_CODE (var) == RESULT_DECL
6965 || TREE_CODE (var) == PARM_DECL)
6966 && DECL_BY_REFERENCE (var)))
6968 x = build_sender_ref (ovar, ctx);
6969 gimplify_assign (var, x, olist);
6975 /* Emit an OpenACC head marker call, encapulating the partitioning and
6976 other information that must be processed by the target compiler.
6977 Return the maximum number of dimensions the associated loop might
6978 be partitioned over. */
6980 static unsigned
6981 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
6982 gimple_seq *seq, omp_context *ctx)
6984 unsigned levels = 0;
6985 unsigned tag = 0;
6986 tree gang_static = NULL_TREE;
6987 auto_vec<tree, 5> args;
6989 args.quick_push (build_int_cst
6990 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
6991 args.quick_push (ddvar);
6992 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6994 switch (OMP_CLAUSE_CODE (c))
6996 case OMP_CLAUSE_GANG:
6997 tag |= OLF_DIM_GANG;
6998 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
6999 /* static:* is represented by -1, and we can ignore it, as
7000 scheduling is always static. */
7001 if (gang_static && integer_minus_onep (gang_static))
7002 gang_static = NULL_TREE;
7003 levels++;
7004 break;
7006 case OMP_CLAUSE_WORKER:
7007 tag |= OLF_DIM_WORKER;
7008 levels++;
7009 break;
7011 case OMP_CLAUSE_VECTOR:
7012 tag |= OLF_DIM_VECTOR;
7013 levels++;
7014 break;
7016 case OMP_CLAUSE_SEQ:
7017 tag |= OLF_SEQ;
7018 break;
7020 case OMP_CLAUSE_AUTO:
7021 tag |= OLF_AUTO;
7022 break;
7024 case OMP_CLAUSE_INDEPENDENT:
7025 tag |= OLF_INDEPENDENT;
7026 break;
7028 case OMP_CLAUSE_TILE:
7029 tag |= OLF_TILE;
7030 break;
7032 default:
7033 continue;
7037 if (gang_static)
7039 if (DECL_P (gang_static))
7040 gang_static = build_outer_var_ref (gang_static, ctx);
7041 tag |= OLF_GANG_STATIC;
7044 /* In a parallel region, loops are implicitly INDEPENDENT. */
7045 omp_context *tgt = enclosing_target_ctx (ctx);
7046 if (!tgt || is_oacc_parallel (tgt))
7047 tag |= OLF_INDEPENDENT;
7049 if (tag & OLF_TILE)
7050 /* Tiling could use all 3 levels. */
7051 levels = 3;
7052 else
7054 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7055 Ensure at least one level, or 2 for possible auto
7056 partitioning */
7057 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
7058 << OLF_DIM_BASE) | OLF_SEQ));
7060 if (levels < 1u + maybe_auto)
7061 levels = 1u + maybe_auto;
7064 args.quick_push (build_int_cst (integer_type_node, levels));
7065 args.quick_push (build_int_cst (integer_type_node, tag));
7066 if (gang_static)
7067 args.quick_push (gang_static);
7069 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
7070 gimple_set_location (call, loc);
7071 gimple_set_lhs (call, ddvar);
7072 gimple_seq_add_stmt (seq, call);
7074 return levels;
7077 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7078 partitioning level of the enclosed region. */
7080 static void
7081 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
7082 tree tofollow, gimple_seq *seq)
7084 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
7085 : IFN_UNIQUE_OACC_TAIL_MARK);
7086 tree marker = build_int_cst (integer_type_node, marker_kind);
7087 int nargs = 2 + (tofollow != NULL_TREE);
7088 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
7089 marker, ddvar, tofollow);
7090 gimple_set_location (call, loc);
7091 gimple_set_lhs (call, ddvar);
7092 gimple_seq_add_stmt (seq, call);
7095 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7096 the loop clauses, from which we extract reductions. Initialize
7097 HEAD and TAIL. */
7099 static void
7100 lower_oacc_head_tail (location_t loc, tree clauses,
7101 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
7103 bool inner = false;
7104 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
7105 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
7107 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
7108 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
7109 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
7111 gcc_assert (count);
7112 for (unsigned done = 1; count; count--, done++)
7114 gimple_seq fork_seq = NULL;
7115 gimple_seq join_seq = NULL;
7117 tree place = build_int_cst (integer_type_node, -1);
7118 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
7119 fork_kind, ddvar, place);
7120 gimple_set_location (fork, loc);
7121 gimple_set_lhs (fork, ddvar);
7123 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
7124 join_kind, ddvar, place);
7125 gimple_set_location (join, loc);
7126 gimple_set_lhs (join, ddvar);
7128 /* Mark the beginning of this level sequence. */
7129 if (inner)
7130 lower_oacc_loop_marker (loc, ddvar, true,
7131 build_int_cst (integer_type_node, count),
7132 &fork_seq);
7133 lower_oacc_loop_marker (loc, ddvar, false,
7134 build_int_cst (integer_type_node, done),
7135 &join_seq);
7137 lower_oacc_reductions (loc, clauses, place, inner,
7138 fork, join, &fork_seq, &join_seq, ctx);
7140 /* Append this level to head. */
7141 gimple_seq_add_seq (head, fork_seq);
7142 /* Prepend it to tail. */
7143 gimple_seq_add_seq (&join_seq, *tail);
7144 *tail = join_seq;
7146 inner = true;
7149 /* Mark the end of the sequence. */
7150 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
7151 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
7154 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7155 catch handler and return it. This prevents programs from violating the
7156 structured block semantics with throws. */
7158 static gimple_seq
7159 maybe_catch_exception (gimple_seq body)
7161 gimple *g;
7162 tree decl;
7164 if (!flag_exceptions)
7165 return body;
7167 if (lang_hooks.eh_protect_cleanup_actions != NULL)
7168 decl = lang_hooks.eh_protect_cleanup_actions ();
7169 else
7170 decl = builtin_decl_explicit (BUILT_IN_TRAP);
7172 g = gimple_build_eh_must_not_throw (decl);
7173 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
7174 GIMPLE_TRY_CATCH);
7176 return gimple_seq_alloc_with_stmt (g);
7180 /* Routines to lower OMP directives into OMP-GIMPLE. */
7182 /* If ctx is a worksharing context inside of a cancellable parallel
7183 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7184 and conditional branch to parallel's cancel_label to handle
7185 cancellation in the implicit barrier. */
7187 static void
7188 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
7189 gimple_seq *body)
7191 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
7192 if (gimple_omp_return_nowait_p (omp_return))
7193 return;
7194 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7195 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7196 && outer->cancellable)
7198 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
7199 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
7200 tree lhs = create_tmp_var (c_bool_type);
7201 gimple_omp_return_set_lhs (omp_return, lhs);
7202 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
7203 gimple *g = gimple_build_cond (NE_EXPR, lhs,
7204 fold_convert (c_bool_type,
7205 boolean_false_node),
7206 outer->cancel_label, fallthru_label);
7207 gimple_seq_add_stmt (body, g);
7208 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
7210 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7211 return;
7214 /* Find the first task_reduction or reduction clause or return NULL
7215 if there are none. */
7217 static inline tree
7218 omp_task_reductions_find_first (tree clauses, enum tree_code code,
7219 enum omp_clause_code ccode)
7221 while (1)
7223 clauses = omp_find_clause (clauses, ccode);
7224 if (clauses == NULL_TREE)
7225 return NULL_TREE;
7226 if (ccode != OMP_CLAUSE_REDUCTION
7227 || code == OMP_TASKLOOP
7228 || OMP_CLAUSE_REDUCTION_TASK (clauses))
7229 return clauses;
7230 clauses = OMP_CLAUSE_CHAIN (clauses);
7234 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
7235 gimple_seq *, gimple_seq *);
7237 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7238 CTX is the enclosing OMP context for the current statement. */
7240 static void
7241 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7243 tree block, control;
7244 gimple_stmt_iterator tgsi;
7245 gomp_sections *stmt;
7246 gimple *t;
7247 gbind *new_stmt, *bind;
7248 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
7250 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
7252 push_gimplify_context ();
7254 dlist = NULL;
7255 ilist = NULL;
7257 tree rclauses
7258 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
7259 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
7260 tree rtmp = NULL_TREE;
7261 if (rclauses)
7263 tree type = build_pointer_type (pointer_sized_int_node);
7264 tree temp = create_tmp_var (type);
7265 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
7266 OMP_CLAUSE_DECL (c) = temp;
7267 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
7268 gimple_omp_sections_set_clauses (stmt, c);
7269 lower_omp_task_reductions (ctx, OMP_SECTIONS,
7270 gimple_omp_sections_clauses (stmt),
7271 &ilist, &tred_dlist);
7272 rclauses = c;
7273 rtmp = make_ssa_name (type);
7274 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
7277 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
7278 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
7280 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
7281 &ilist, &dlist, ctx, NULL);
7283 control = create_tmp_var (unsigned_type_node, ".section");
7284 gimple_omp_sections_set_control (stmt, control);
7286 new_body = gimple_omp_body (stmt);
7287 gimple_omp_set_body (stmt, NULL);
7288 tgsi = gsi_start (new_body);
7289 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
7291 omp_context *sctx;
7292 gimple *sec_start;
7294 sec_start = gsi_stmt (tgsi);
7295 sctx = maybe_lookup_ctx (sec_start);
7296 gcc_assert (sctx);
7298 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
7299 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
7300 GSI_CONTINUE_LINKING);
7301 gimple_omp_set_body (sec_start, NULL);
7303 if (gsi_one_before_end_p (tgsi))
7305 gimple_seq l = NULL;
7306 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
7307 &ilist, &l, &clist, ctx);
7308 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
7309 gimple_omp_section_set_last (sec_start);
7312 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
7313 GSI_CONTINUE_LINKING);
7316 block = make_node (BLOCK);
7317 bind = gimple_build_bind (NULL, new_body, block);
7319 olist = NULL;
7320 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
7321 &clist, ctx);
7322 if (clist)
7324 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
7325 gcall *g = gimple_build_call (fndecl, 0);
7326 gimple_seq_add_stmt (&olist, g);
7327 gimple_seq_add_seq (&olist, clist);
7328 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
7329 g = gimple_build_call (fndecl, 0);
7330 gimple_seq_add_stmt (&olist, g);
7333 block = make_node (BLOCK);
7334 new_stmt = gimple_build_bind (NULL, NULL, block);
7335 gsi_replace (gsi_p, new_stmt, true);
7337 pop_gimplify_context (new_stmt);
7338 gimple_bind_append_vars (new_stmt, ctx->block_vars);
7339 BLOCK_VARS (block) = gimple_bind_vars (bind);
7340 if (BLOCK_VARS (block))
7341 TREE_USED (block) = 1;
7343 new_body = NULL;
7344 gimple_seq_add_seq (&new_body, ilist);
7345 gimple_seq_add_stmt (&new_body, stmt);
7346 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
7347 gimple_seq_add_stmt (&new_body, bind);
7349 t = gimple_build_omp_continue (control, control);
7350 gimple_seq_add_stmt (&new_body, t);
7352 gimple_seq_add_seq (&new_body, olist);
7353 if (ctx->cancellable)
7354 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7355 gimple_seq_add_seq (&new_body, dlist);
7357 new_body = maybe_catch_exception (new_body);
7359 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
7360 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7361 t = gimple_build_omp_return (nowait);
7362 gimple_seq_add_stmt (&new_body, t);
7363 gimple_seq_add_seq (&new_body, tred_dlist);
7364 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
7366 if (rclauses)
7367 OMP_CLAUSE_DECL (rclauses) = rtmp;
7369 gimple_bind_set_body (new_stmt, new_body);
7373 /* A subroutine of lower_omp_single. Expand the simple form of
7374 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7376 if (GOMP_single_start ())
7377 BODY;
7378 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7380 FIXME. It may be better to delay expanding the logic of this until
7381 pass_expand_omp. The expanded logic may make the job more difficult
7382 to a synchronization analysis pass. */
7384 static void
7385 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
7387 location_t loc = gimple_location (single_stmt);
7388 tree tlabel = create_artificial_label (loc);
7389 tree flabel = create_artificial_label (loc);
7390 gimple *call, *cond;
7391 tree lhs, decl;
7393 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
7394 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
7395 call = gimple_build_call (decl, 0);
7396 gimple_call_set_lhs (call, lhs);
7397 gimple_seq_add_stmt (pre_p, call);
7399 cond = gimple_build_cond (EQ_EXPR, lhs,
7400 fold_convert_loc (loc, TREE_TYPE (lhs),
7401 boolean_true_node),
7402 tlabel, flabel);
7403 gimple_seq_add_stmt (pre_p, cond);
7404 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
7405 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7406 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
7410 /* A subroutine of lower_omp_single. Expand the simple form of
7411 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7413 #pragma omp single copyprivate (a, b, c)
7415 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7418 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7420 BODY;
7421 copyout.a = a;
7422 copyout.b = b;
7423 copyout.c = c;
7424 GOMP_single_copy_end (&copyout);
7426 else
7428 a = copyout_p->a;
7429 b = copyout_p->b;
7430 c = copyout_p->c;
7432 GOMP_barrier ();
7435 FIXME. It may be better to delay expanding the logic of this until
7436 pass_expand_omp. The expanded logic may make the job more difficult
7437 to a synchronization analysis pass. */
7439 static void
7440 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
7441 omp_context *ctx)
7443 tree ptr_type, t, l0, l1, l2, bfn_decl;
7444 gimple_seq copyin_seq;
7445 location_t loc = gimple_location (single_stmt);
7447 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
7449 ptr_type = build_pointer_type (ctx->record_type);
7450 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
7452 l0 = create_artificial_label (loc);
7453 l1 = create_artificial_label (loc);
7454 l2 = create_artificial_label (loc);
7456 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
7457 t = build_call_expr_loc (loc, bfn_decl, 0);
7458 t = fold_convert_loc (loc, ptr_type, t);
7459 gimplify_assign (ctx->receiver_decl, t, pre_p);
7461 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
7462 build_int_cst (ptr_type, 0));
7463 t = build3 (COND_EXPR, void_type_node, t,
7464 build_and_jump (&l0), build_and_jump (&l1));
7465 gimplify_and_add (t, pre_p);
7467 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
7469 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7471 copyin_seq = NULL;
7472 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
7473 &copyin_seq, ctx);
7475 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7476 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
7477 t = build_call_expr_loc (loc, bfn_decl, 1, t);
7478 gimplify_and_add (t, pre_p);
7480 t = build_and_jump (&l2);
7481 gimplify_and_add (t, pre_p);
7483 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
7485 gimple_seq_add_seq (pre_p, copyin_seq);
7487 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
7491 /* Expand code for an OpenMP single directive. */
7493 static void
7494 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7496 tree block;
7497 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
7498 gbind *bind;
7499 gimple_seq bind_body, bind_body_tail = NULL, dlist;
7501 push_gimplify_context ();
7503 block = make_node (BLOCK);
7504 bind = gimple_build_bind (NULL, NULL, block);
7505 gsi_replace (gsi_p, bind, true);
7506 bind_body = NULL;
7507 dlist = NULL;
7508 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
7509 &bind_body, &dlist, ctx, NULL);
7510 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
7512 gimple_seq_add_stmt (&bind_body, single_stmt);
7514 if (ctx->record_type)
7515 lower_omp_single_copy (single_stmt, &bind_body, ctx);
7516 else
7517 lower_omp_single_simple (single_stmt, &bind_body);
7519 gimple_omp_set_body (single_stmt, NULL);
7521 gimple_seq_add_seq (&bind_body, dlist);
7523 bind_body = maybe_catch_exception (bind_body);
7525 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
7526 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7527 gimple *g = gimple_build_omp_return (nowait);
7528 gimple_seq_add_stmt (&bind_body_tail, g);
7529 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
7530 if (ctx->record_type)
7532 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
7533 tree clobber = build_constructor (ctx->record_type, NULL);
7534 TREE_THIS_VOLATILE (clobber) = 1;
7535 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
7536 clobber), GSI_SAME_STMT);
7538 gimple_seq_add_seq (&bind_body, bind_body_tail);
7539 gimple_bind_set_body (bind, bind_body);
7541 pop_gimplify_context (bind);
7543 gimple_bind_append_vars (bind, ctx->block_vars);
7544 BLOCK_VARS (block) = ctx->block_vars;
7545 if (BLOCK_VARS (block))
7546 TREE_USED (block) = 1;
7550 /* Expand code for an OpenMP master directive. */
7552 static void
7553 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7555 tree block, lab = NULL, x, bfn_decl;
7556 gimple *stmt = gsi_stmt (*gsi_p);
7557 gbind *bind;
7558 location_t loc = gimple_location (stmt);
7559 gimple_seq tseq;
7561 push_gimplify_context ();
7563 block = make_node (BLOCK);
7564 bind = gimple_build_bind (NULL, NULL, block);
7565 gsi_replace (gsi_p, bind, true);
7566 gimple_bind_add_stmt (bind, stmt);
7568 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
7569 x = build_call_expr_loc (loc, bfn_decl, 0);
7570 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
7571 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
7572 tseq = NULL;
7573 gimplify_and_add (x, &tseq);
7574 gimple_bind_add_seq (bind, tseq);
7576 lower_omp (gimple_omp_body_ptr (stmt), ctx);
7577 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
7578 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7579 gimple_omp_set_body (stmt, NULL);
7581 gimple_bind_add_stmt (bind, gimple_build_label (lab));
7583 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
7585 pop_gimplify_context (bind);
7587 gimple_bind_append_vars (bind, ctx->block_vars);
7588 BLOCK_VARS (block) = ctx->block_vars;
7591 /* Helper function for lower_omp_task_reductions. For a specific PASS
7592 find out the current clause it should be processed, or return false
7593 if all have been processed already. */
7595 static inline bool
7596 omp_task_reduction_iterate (int pass, enum tree_code code,
7597 enum omp_clause_code ccode, tree *c, tree *decl,
7598 tree *type, tree *next)
7600 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
7602 if (ccode == OMP_CLAUSE_REDUCTION
7603 && code != OMP_TASKLOOP
7604 && !OMP_CLAUSE_REDUCTION_TASK (*c))
7605 continue;
7606 *decl = OMP_CLAUSE_DECL (*c);
7607 *type = TREE_TYPE (*decl);
7608 if (TREE_CODE (*decl) == MEM_REF)
7610 if (pass != 1)
7611 continue;
7613 else
7615 if (omp_is_reference (*decl))
7616 *type = TREE_TYPE (*type);
7617 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
7618 continue;
7620 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
7621 return true;
7623 *decl = NULL_TREE;
7624 *type = NULL_TREE;
7625 *next = NULL_TREE;
7626 return false;
7629 /* Lower task_reduction and reduction clauses (the latter unless CODE is
7630 OMP_TASKGROUP only with task modifier). Register mapping of those in
7631 START sequence and reducing them and unregister them in the END sequence. */
7633 static void
7634 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
7635 gimple_seq *start, gimple_seq *end)
7637 enum omp_clause_code ccode
7638 = (code == OMP_TASKGROUP
7639 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
7640 tree cancellable = NULL_TREE;
7641 clauses = omp_task_reductions_find_first (clauses, code, ccode);
7642 if (clauses == NULL_TREE)
7643 return;
7644 if (code == OMP_FOR || code == OMP_SECTIONS)
7646 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7647 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7648 && outer->cancellable)
7650 cancellable = error_mark_node;
7651 break;
7653 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7654 break;
7656 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
7657 tree *last = &TYPE_FIELDS (record_type);
7658 unsigned cnt = 0;
7659 if (cancellable)
7661 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
7662 ptr_type_node);
7663 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
7664 integer_type_node);
7665 *last = field;
7666 DECL_CHAIN (field) = ifield;
7667 last = &DECL_CHAIN (ifield);
7668 DECL_CONTEXT (field) = record_type;
7669 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
7670 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
7671 DECL_CONTEXT (ifield) = record_type;
7672 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
7673 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
7675 for (int pass = 0; pass < 2; pass++)
7677 tree decl, type, next;
7678 for (tree c = clauses;
7679 omp_task_reduction_iterate (pass, code, ccode,
7680 &c, &decl, &type, &next); c = next)
7682 ++cnt;
7683 tree new_type = type;
7684 if (ctx->outer)
7685 new_type = remap_type (type, &ctx->outer->cb);
7686 tree field
7687 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
7688 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
7689 new_type);
7690 if (DECL_P (decl) && type == TREE_TYPE (decl))
7692 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
7693 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
7694 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
7696 else
7697 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
7698 DECL_CONTEXT (field) = record_type;
7699 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
7700 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
7701 *last = field;
7702 last = &DECL_CHAIN (field);
7703 tree bfield
7704 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
7705 boolean_type_node);
7706 DECL_CONTEXT (bfield) = record_type;
7707 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
7708 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
7709 *last = bfield;
7710 last = &DECL_CHAIN (bfield);
7713 *last = NULL_TREE;
7714 layout_type (record_type);
7716 /* Build up an array which registers with the runtime all the reductions
7717 and deregisters them at the end. Format documented in libgomp/task.c. */
7718 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
7719 tree avar = create_tmp_var_raw (atype);
7720 gimple_add_tmp_var (avar);
7721 TREE_ADDRESSABLE (avar) = 1;
7722 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
7723 NULL_TREE, NULL_TREE);
7724 tree t = build_int_cst (pointer_sized_int_node, cnt);
7725 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7726 gimple_seq seq = NULL;
7727 tree sz = fold_convert (pointer_sized_int_node,
7728 TYPE_SIZE_UNIT (record_type));
7729 int cachesz = 64;
7730 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
7731 build_int_cst (pointer_sized_int_node, cachesz - 1));
7732 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
7733 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
7734 ctx->task_reductions.create (1 + cnt);
7735 ctx->task_reduction_map = new hash_map<tree, unsigned>;
7736 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
7737 ? sz : NULL_TREE);
7738 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
7739 gimple_seq_add_seq (start, seq);
7740 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
7741 NULL_TREE, NULL_TREE);
7742 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
7743 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
7744 NULL_TREE, NULL_TREE);
7745 t = build_int_cst (pointer_sized_int_node,
7746 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
7747 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7748 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
7749 NULL_TREE, NULL_TREE);
7750 t = build_int_cst (pointer_sized_int_node, -1);
7751 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7752 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
7753 NULL_TREE, NULL_TREE);
7754 t = build_int_cst (pointer_sized_int_node, 0);
7755 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7757 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
7758 and for each task reduction checks a bool right after the private variable
7759 within that thread's chunk; if the bool is clear, it hasn't been
7760 initialized and thus isn't going to be reduced nor destructed, otherwise
7761 reduce and destruct it. */
7762 tree idx = create_tmp_var (size_type_node);
7763 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
7764 tree num_thr_sz = create_tmp_var (size_type_node);
7765 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7766 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
7767 tree lab3 = NULL_TREE;
7768 gimple *g;
7769 if (code == OMP_FOR || code == OMP_SECTIONS)
7771 /* For worksharing constructs, only perform it in the master thread,
7772 with the exception of cancelled implicit barriers - then only handle
7773 the current thread. */
7774 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
7775 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
7776 tree thr_num = create_tmp_var (integer_type_node);
7777 g = gimple_build_call (t, 0);
7778 gimple_call_set_lhs (g, thr_num);
7779 gimple_seq_add_stmt (end, g);
7780 if (cancellable)
7782 tree c;
7783 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7784 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
7785 lab3 = create_artificial_label (UNKNOWN_LOCATION);
7786 if (code == OMP_FOR)
7787 c = gimple_omp_for_clauses (ctx->stmt);
7788 else /* if (code == OMP_SECTIONS) */
7789 c = gimple_omp_sections_clauses (ctx->stmt);
7790 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
7791 cancellable = c;
7792 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
7793 lab5, lab6);
7794 gimple_seq_add_stmt (end, g);
7795 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7796 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
7797 gimple_seq_add_stmt (end, g);
7798 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
7799 build_one_cst (TREE_TYPE (idx)));
7800 gimple_seq_add_stmt (end, g);
7801 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
7802 gimple_seq_add_stmt (end, gimple_build_label (lab6));
7804 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
7805 gimple_seq_add_stmt (end, g);
7806 gimple_seq_add_stmt (end, gimple_build_label (lab4));
7808 if (code != OMP_PARALLEL)
7810 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
7811 tree num_thr = create_tmp_var (integer_type_node);
7812 g = gimple_build_call (t, 0);
7813 gimple_call_set_lhs (g, num_thr);
7814 gimple_seq_add_stmt (end, g);
7815 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
7816 gimple_seq_add_stmt (end, g);
7817 if (cancellable)
7818 gimple_seq_add_stmt (end, gimple_build_label (lab3));
7820 else
7822 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7823 OMP_CLAUSE__REDUCTEMP_);
7824 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
7825 t = fold_convert (size_type_node, t);
7826 gimplify_assign (num_thr_sz, t, end);
7828 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
7829 NULL_TREE, NULL_TREE);
7830 tree data = create_tmp_var (pointer_sized_int_node);
7831 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
7832 gimple_seq_add_stmt (end, gimple_build_label (lab1));
7833 tree ptr;
7834 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
7835 ptr = create_tmp_var (build_pointer_type (record_type));
7836 else
7837 ptr = create_tmp_var (ptr_type_node);
7838 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
7840 tree field = TYPE_FIELDS (record_type);
7841 cnt = 0;
7842 if (cancellable)
7843 field = DECL_CHAIN (DECL_CHAIN (field));
7844 for (int pass = 0; pass < 2; pass++)
7846 tree decl, type, next;
7847 for (tree c = clauses;
7848 omp_task_reduction_iterate (pass, code, ccode,
7849 &c, &decl, &type, &next); c = next)
7851 tree var = decl, ref;
7852 if (TREE_CODE (decl) == MEM_REF)
7854 var = TREE_OPERAND (var, 0);
7855 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7856 var = TREE_OPERAND (var, 0);
7857 tree v = var;
7858 if (TREE_CODE (var) == ADDR_EXPR)
7859 var = TREE_OPERAND (var, 0);
7860 else if (TREE_CODE (var) == INDIRECT_REF)
7861 var = TREE_OPERAND (var, 0);
7862 tree orig_var = var;
7863 if (is_variable_sized (var))
7865 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7866 var = DECL_VALUE_EXPR (var);
7867 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7868 var = TREE_OPERAND (var, 0);
7869 gcc_assert (DECL_P (var));
7871 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
7872 if (orig_var != var)
7873 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
7874 else if (TREE_CODE (v) == ADDR_EXPR)
7875 t = build_fold_addr_expr (t);
7876 else if (TREE_CODE (v) == INDIRECT_REF)
7877 t = build_fold_indirect_ref (t);
7878 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
7880 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
7881 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7882 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
7884 if (!integer_zerop (TREE_OPERAND (decl, 1)))
7885 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
7886 fold_convert (size_type_node,
7887 TREE_OPERAND (decl, 1)));
7889 else
7891 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
7892 if (!omp_is_reference (decl))
7893 t = build_fold_addr_expr (t);
7895 t = fold_convert (pointer_sized_int_node, t);
7896 seq = NULL;
7897 t = force_gimple_operand (t, &seq, true, NULL_TREE);
7898 gimple_seq_add_seq (start, seq);
7899 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7900 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
7901 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7902 t = unshare_expr (byte_position (field));
7903 t = fold_convert (pointer_sized_int_node, t);
7904 ctx->task_reduction_map->put (c, cnt);
7905 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
7906 ? t : NULL_TREE);
7907 seq = NULL;
7908 t = force_gimple_operand (t, &seq, true, NULL_TREE);
7909 gimple_seq_add_seq (start, seq);
7910 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7911 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
7912 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7914 tree bfield = DECL_CHAIN (field);
7915 tree cond;
7916 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
7917 /* In parallel or worksharing all threads unconditionally
7918 initialize all their task reduction private variables. */
7919 cond = boolean_true_node;
7920 else if (TREE_TYPE (ptr) == ptr_type_node)
7922 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
7923 unshare_expr (byte_position (bfield)));
7924 seq = NULL;
7925 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
7926 gimple_seq_add_seq (end, seq);
7927 tree pbool = build_pointer_type (TREE_TYPE (bfield));
7928 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
7929 build_int_cst (pbool, 0));
7931 else
7932 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
7933 build_simple_mem_ref (ptr), bfield, NULL_TREE);
7934 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
7935 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
7936 tree condv = create_tmp_var (boolean_type_node);
7937 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
7938 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
7939 lab3, lab4);
7940 gimple_seq_add_stmt (end, g);
7941 gimple_seq_add_stmt (end, gimple_build_label (lab3));
7942 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
7944 /* If this reduction doesn't need destruction and parallel
7945 has been cancelled, there is nothing to do for this
7946 reduction, so jump around the merge operation. */
7947 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7948 g = gimple_build_cond (NE_EXPR, cancellable,
7949 build_zero_cst (TREE_TYPE (cancellable)),
7950 lab4, lab5);
7951 gimple_seq_add_stmt (end, g);
7952 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7955 tree new_var;
7956 if (TREE_TYPE (ptr) == ptr_type_node)
7958 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
7959 unshare_expr (byte_position (field)));
7960 seq = NULL;
7961 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
7962 gimple_seq_add_seq (end, seq);
7963 tree pbool = build_pointer_type (TREE_TYPE (field));
7964 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
7965 build_int_cst (pbool, 0));
7967 else
7968 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
7969 build_simple_mem_ref (ptr), field, NULL_TREE);
7971 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7972 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
7973 ref = build_simple_mem_ref (ref);
7974 /* reduction(-:var) sums up the partial results, so it acts
7975 identically to reduction(+:var). */
7976 if (rcode == MINUS_EXPR)
7977 rcode = PLUS_EXPR;
7978 if (TREE_CODE (decl) == MEM_REF)
7980 tree type = TREE_TYPE (new_var);
7981 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7982 tree i = create_tmp_var (TREE_TYPE (v));
7983 tree ptype = build_pointer_type (TREE_TYPE (type));
7984 if (DECL_P (v))
7986 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7987 tree vv = create_tmp_var (TREE_TYPE (v));
7988 gimplify_assign (vv, v, start);
7989 v = vv;
7991 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7992 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
7993 new_var = build_fold_addr_expr (new_var);
7994 new_var = fold_convert (ptype, new_var);
7995 ref = fold_convert (ptype, ref);
7996 tree m = create_tmp_var (ptype);
7997 gimplify_assign (m, new_var, end);
7998 new_var = m;
7999 m = create_tmp_var (ptype);
8000 gimplify_assign (m, ref, end);
8001 ref = m;
8002 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
8003 tree body = create_artificial_label (UNKNOWN_LOCATION);
8004 tree endl = create_artificial_label (UNKNOWN_LOCATION);
8005 gimple_seq_add_stmt (end, gimple_build_label (body));
8006 tree priv = build_simple_mem_ref (new_var);
8007 tree out = build_simple_mem_ref (ref);
8008 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8010 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8011 tree decl_placeholder
8012 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
8013 tree lab6 = NULL_TREE;
8014 if (cancellable)
8016 /* If this reduction needs destruction and parallel
8017 has been cancelled, jump around the merge operation
8018 to the destruction. */
8019 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8020 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8021 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8022 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8023 lab6, lab5);
8024 gimple_seq_add_stmt (end, g);
8025 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8027 SET_DECL_VALUE_EXPR (placeholder, out);
8028 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8029 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
8030 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
8031 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8032 gimple_seq_add_seq (end,
8033 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8034 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8035 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8037 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8038 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
8040 if (cancellable)
8041 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8042 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
8043 if (x)
8045 gimple_seq tseq = NULL;
8046 gimplify_stmt (&x, &tseq);
8047 gimple_seq_add_seq (end, tseq);
8050 else
8052 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
8053 out = unshare_expr (out);
8054 gimplify_assign (out, x, end);
8056 gimple *g
8057 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
8058 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8059 gimple_seq_add_stmt (end, g);
8060 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
8061 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8062 gimple_seq_add_stmt (end, g);
8063 g = gimple_build_assign (i, PLUS_EXPR, i,
8064 build_int_cst (TREE_TYPE (i), 1));
8065 gimple_seq_add_stmt (end, g);
8066 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
8067 gimple_seq_add_stmt (end, g);
8068 gimple_seq_add_stmt (end, gimple_build_label (endl));
8070 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8072 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8073 tree oldv = NULL_TREE;
8074 tree lab6 = NULL_TREE;
8075 if (cancellable)
8077 /* If this reduction needs destruction and parallel
8078 has been cancelled, jump around the merge operation
8079 to the destruction. */
8080 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8081 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8082 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8083 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8084 lab6, lab5);
8085 gimple_seq_add_stmt (end, g);
8086 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8088 if (omp_is_reference (decl)
8089 && !useless_type_conversion_p (TREE_TYPE (placeholder),
8090 TREE_TYPE (ref)))
8091 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8092 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8093 tree refv = create_tmp_var (TREE_TYPE (ref));
8094 gimplify_assign (refv, ref, end);
8095 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
8096 SET_DECL_VALUE_EXPR (placeholder, ref);
8097 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8098 tree d = maybe_lookup_decl (decl, ctx);
8099 gcc_assert (d);
8100 if (DECL_HAS_VALUE_EXPR_P (d))
8101 oldv = DECL_VALUE_EXPR (d);
8102 if (omp_is_reference (var))
8104 tree v = fold_convert (TREE_TYPE (d),
8105 build_fold_addr_expr (new_var));
8106 SET_DECL_VALUE_EXPR (d, v);
8108 else
8109 SET_DECL_VALUE_EXPR (d, new_var);
8110 DECL_HAS_VALUE_EXPR_P (d) = 1;
8111 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8112 if (oldv)
8113 SET_DECL_VALUE_EXPR (d, oldv);
8114 else
8116 SET_DECL_VALUE_EXPR (d, NULL_TREE);
8117 DECL_HAS_VALUE_EXPR_P (d) = 0;
8119 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8120 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8121 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8122 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8123 if (cancellable)
8124 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8125 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
8126 if (x)
8128 gimple_seq tseq = NULL;
8129 gimplify_stmt (&x, &tseq);
8130 gimple_seq_add_seq (end, tseq);
8133 else
8135 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
8136 ref = unshare_expr (ref);
8137 gimplify_assign (ref, x, end);
8139 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8140 ++cnt;
8141 field = DECL_CHAIN (bfield);
8145 if (code == OMP_TASKGROUP)
8147 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
8148 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8149 gimple_seq_add_stmt (start, g);
8151 else
8153 tree c;
8154 if (code == OMP_FOR)
8155 c = gimple_omp_for_clauses (ctx->stmt);
8156 else if (code == OMP_SECTIONS)
8157 c = gimple_omp_sections_clauses (ctx->stmt);
8158 else
8159 c = gimple_omp_taskreg_clauses (ctx->stmt);
8160 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
8161 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
8162 build_fold_addr_expr (avar));
8163 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
8166 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
8167 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
8168 size_one_node));
8169 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
8170 gimple_seq_add_stmt (end, g);
8171 gimple_seq_add_stmt (end, gimple_build_label (lab2));
8172 if (code == OMP_FOR || code == OMP_SECTIONS)
8174 enum built_in_function bfn
8175 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
8176 t = builtin_decl_explicit (bfn);
8177 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
8178 tree arg;
8179 if (cancellable)
8181 arg = create_tmp_var (c_bool_type);
8182 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
8183 cancellable));
8185 else
8186 arg = build_int_cst (c_bool_type, 0);
8187 g = gimple_build_call (t, 1, arg);
8189 else
8191 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
8192 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8194 gimple_seq_add_stmt (end, g);
8195 t = build_constructor (atype, NULL);
8196 TREE_THIS_VOLATILE (t) = 1;
8197 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
8200 /* Expand code for an OpenMP taskgroup directive. */
8202 static void
8203 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8205 gimple *stmt = gsi_stmt (*gsi_p);
8206 gcall *x;
8207 gbind *bind;
8208 gimple_seq dseq = NULL;
8209 tree block = make_node (BLOCK);
8211 bind = gimple_build_bind (NULL, NULL, block);
8212 gsi_replace (gsi_p, bind, true);
8213 gimple_bind_add_stmt (bind, stmt);
8215 push_gimplify_context ();
8217 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
8219 gimple_bind_add_stmt (bind, x);
8221 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
8222 gimple_omp_taskgroup_clauses (stmt),
8223 gimple_bind_body_ptr (bind), &dseq);
8225 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8226 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8227 gimple_omp_set_body (stmt, NULL);
8229 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8230 gimple_bind_add_seq (bind, dseq);
8232 pop_gimplify_context (bind);
8234 gimple_bind_append_vars (bind, ctx->block_vars);
8235 BLOCK_VARS (block) = ctx->block_vars;
8239 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8241 static void
8242 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
8243 omp_context *ctx)
8245 struct omp_for_data fd;
8246 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
8247 return;
8249 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
8250 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
8251 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
8252 if (!fd.ordered)
8253 return;
8255 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8256 tree c = gimple_omp_ordered_clauses (ord_stmt);
8257 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
8258 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8260 /* Merge depend clauses from multiple adjacent
8261 #pragma omp ordered depend(sink:...) constructs
8262 into one #pragma omp ordered depend(sink:...), so that
8263 we can optimize them together. */
8264 gimple_stmt_iterator gsi = *gsi_p;
8265 gsi_next (&gsi);
8266 while (!gsi_end_p (gsi))
8268 gimple *stmt = gsi_stmt (gsi);
8269 if (is_gimple_debug (stmt)
8270 || gimple_code (stmt) == GIMPLE_NOP)
8272 gsi_next (&gsi);
8273 continue;
8275 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
8276 break;
8277 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
8278 c = gimple_omp_ordered_clauses (ord_stmt2);
8279 if (c == NULL_TREE
8280 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
8281 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8282 break;
8283 while (*list_p)
8284 list_p = &OMP_CLAUSE_CHAIN (*list_p);
8285 *list_p = c;
8286 gsi_remove (&gsi, true);
8290 /* Canonicalize sink dependence clauses into one folded clause if
8291 possible.
8293 The basic algorithm is to create a sink vector whose first
8294 element is the GCD of all the first elements, and whose remaining
8295 elements are the minimum of the subsequent columns.
8297 We ignore dependence vectors whose first element is zero because
8298 such dependencies are known to be executed by the same thread.
8300 We take into account the direction of the loop, so a minimum
8301 becomes a maximum if the loop is iterating forwards. We also
8302 ignore sink clauses where the loop direction is unknown, or where
8303 the offsets are clearly invalid because they are not a multiple
8304 of the loop increment.
8306 For example:
8308 #pragma omp for ordered(2)
8309 for (i=0; i < N; ++i)
8310 for (j=0; j < M; ++j)
8312 #pragma omp ordered \
8313 depend(sink:i-8,j-2) \
8314 depend(sink:i,j-1) \ // Completely ignored because i+0.
8315 depend(sink:i-4,j-3) \
8316 depend(sink:i-6,j-4)
8317 #pragma omp ordered depend(source)
8320 Folded clause is:
8322 depend(sink:-gcd(8,4,6),-min(2,3,4))
8323 -or-
8324 depend(sink:-2,-2)
8327 /* FIXME: Computing GCD's where the first element is zero is
8328 non-trivial in the presence of collapsed loops. Do this later. */
8329 if (fd.collapse > 1)
8330 return;
8332 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
8334 /* wide_int is not a POD so it must be default-constructed. */
8335 for (unsigned i = 0; i != 2 * len - 1; ++i)
8336 new (static_cast<void*>(folded_deps + i)) wide_int ();
8338 tree folded_dep = NULL_TREE;
8339 /* TRUE if the first dimension's offset is negative. */
8340 bool neg_offset_p = false;
8342 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8343 unsigned int i;
8344 while ((c = *list_p) != NULL)
8346 bool remove = false;
8348 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
8349 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8350 goto next_ordered_clause;
8352 tree vec;
8353 for (vec = OMP_CLAUSE_DECL (c), i = 0;
8354 vec && TREE_CODE (vec) == TREE_LIST;
8355 vec = TREE_CHAIN (vec), ++i)
8357 gcc_assert (i < len);
8359 /* omp_extract_for_data has canonicalized the condition. */
8360 gcc_assert (fd.loops[i].cond_code == LT_EXPR
8361 || fd.loops[i].cond_code == GT_EXPR);
8362 bool forward = fd.loops[i].cond_code == LT_EXPR;
8363 bool maybe_lexically_later = true;
8365 /* While the committee makes up its mind, bail if we have any
8366 non-constant steps. */
8367 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
8368 goto lower_omp_ordered_ret;
8370 tree itype = TREE_TYPE (TREE_VALUE (vec));
8371 if (POINTER_TYPE_P (itype))
8372 itype = sizetype;
8373 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
8374 TYPE_PRECISION (itype),
8375 TYPE_SIGN (itype));
8377 /* Ignore invalid offsets that are not multiples of the step. */
8378 if (!wi::multiple_of_p (wi::abs (offset),
8379 wi::abs (wi::to_wide (fd.loops[i].step)),
8380 UNSIGNED))
8382 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8383 "ignoring sink clause with offset that is not "
8384 "a multiple of the loop step");
8385 remove = true;
8386 goto next_ordered_clause;
8389 /* Calculate the first dimension. The first dimension of
8390 the folded dependency vector is the GCD of the first
8391 elements, while ignoring any first elements whose offset
8392 is 0. */
8393 if (i == 0)
8395 /* Ignore dependence vectors whose first dimension is 0. */
8396 if (offset == 0)
8398 remove = true;
8399 goto next_ordered_clause;
8401 else
8403 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
8405 error_at (OMP_CLAUSE_LOCATION (c),
8406 "first offset must be in opposite direction "
8407 "of loop iterations");
8408 goto lower_omp_ordered_ret;
8410 if (forward)
8411 offset = -offset;
8412 neg_offset_p = forward;
8413 /* Initialize the first time around. */
8414 if (folded_dep == NULL_TREE)
8416 folded_dep = c;
8417 folded_deps[0] = offset;
8419 else
8420 folded_deps[0] = wi::gcd (folded_deps[0],
8421 offset, UNSIGNED);
8424 /* Calculate minimum for the remaining dimensions. */
8425 else
8427 folded_deps[len + i - 1] = offset;
8428 if (folded_dep == c)
8429 folded_deps[i] = offset;
8430 else if (maybe_lexically_later
8431 && !wi::eq_p (folded_deps[i], offset))
8433 if (forward ^ wi::gts_p (folded_deps[i], offset))
8435 unsigned int j;
8436 folded_dep = c;
8437 for (j = 1; j <= i; j++)
8438 folded_deps[j] = folded_deps[len + j - 1];
8440 else
8441 maybe_lexically_later = false;
8445 gcc_assert (i == len);
8447 remove = true;
8449 next_ordered_clause:
8450 if (remove)
8451 *list_p = OMP_CLAUSE_CHAIN (c);
8452 else
8453 list_p = &OMP_CLAUSE_CHAIN (c);
8456 if (folded_dep)
8458 if (neg_offset_p)
8459 folded_deps[0] = -folded_deps[0];
8461 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
8462 if (POINTER_TYPE_P (itype))
8463 itype = sizetype;
8465 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
8466 = wide_int_to_tree (itype, folded_deps[0]);
8467 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
8468 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
8471 lower_omp_ordered_ret:
8473 /* Ordered without clauses is #pragma omp threads, while we want
8474 a nop instead if we remove all clauses. */
8475 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
8476 gsi_replace (gsi_p, gimple_build_nop (), true);
8480 /* Expand code for an OpenMP ordered directive. */
8482 static void
8483 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8485 tree block;
8486 gimple *stmt = gsi_stmt (*gsi_p), *g;
8487 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
8488 gcall *x;
8489 gbind *bind;
8490 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8491 OMP_CLAUSE_SIMD);
8492 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
8493 loop. */
8494 bool maybe_simt
8495 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
8496 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8497 OMP_CLAUSE_THREADS);
8499 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8500 OMP_CLAUSE_DEPEND))
8502 /* FIXME: This is needs to be moved to the expansion to verify various
8503 conditions only testable on cfg with dominators computed, and also
8504 all the depend clauses to be merged still might need to be available
8505 for the runtime checks. */
8506 if (0)
8507 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
8508 return;
8511 push_gimplify_context ();
8513 block = make_node (BLOCK);
8514 bind = gimple_build_bind (NULL, NULL, block);
8515 gsi_replace (gsi_p, bind, true);
8516 gimple_bind_add_stmt (bind, stmt);
8518 if (simd)
8520 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
8521 build_int_cst (NULL_TREE, threads));
8522 cfun->has_simduid_loops = true;
8524 else
8525 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
8527 gimple_bind_add_stmt (bind, x);
8529 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
8530 if (maybe_simt)
8532 counter = create_tmp_var (integer_type_node);
8533 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
8534 gimple_call_set_lhs (g, counter);
8535 gimple_bind_add_stmt (bind, g);
8537 body = create_artificial_label (UNKNOWN_LOCATION);
8538 test = create_artificial_label (UNKNOWN_LOCATION);
8539 gimple_bind_add_stmt (bind, gimple_build_label (body));
8541 tree simt_pred = create_tmp_var (integer_type_node);
8542 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
8543 gimple_call_set_lhs (g, simt_pred);
8544 gimple_bind_add_stmt (bind, g);
8546 tree t = create_artificial_label (UNKNOWN_LOCATION);
8547 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
8548 gimple_bind_add_stmt (bind, g);
8550 gimple_bind_add_stmt (bind, gimple_build_label (t));
8552 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8553 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8554 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8555 gimple_omp_set_body (stmt, NULL);
8557 if (maybe_simt)
8559 gimple_bind_add_stmt (bind, gimple_build_label (test));
8560 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
8561 gimple_bind_add_stmt (bind, g);
8563 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
8564 tree nonneg = create_tmp_var (integer_type_node);
8565 gimple_seq tseq = NULL;
8566 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
8567 gimple_bind_add_seq (bind, tseq);
8569 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
8570 gimple_call_set_lhs (g, nonneg);
8571 gimple_bind_add_stmt (bind, g);
8573 tree end = create_artificial_label (UNKNOWN_LOCATION);
8574 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
8575 gimple_bind_add_stmt (bind, g);
8577 gimple_bind_add_stmt (bind, gimple_build_label (end));
8579 if (simd)
8580 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
8581 build_int_cst (NULL_TREE, threads));
8582 else
8583 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
8585 gimple_bind_add_stmt (bind, x);
8587 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8589 pop_gimplify_context (bind);
8591 gimple_bind_append_vars (bind, ctx->block_vars);
8592 BLOCK_VARS (block) = gimple_bind_vars (bind);
8596 /* Expand code for an OpenMP scan directive and the structured block
8597 before the scan directive. */
8599 static void
8600 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8602 gimple *stmt = gsi_stmt (*gsi_p);
8603 bool has_clauses
8604 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
8605 tree lane = NULL_TREE;
8606 gimple_seq before = NULL;
8607 omp_context *octx = ctx->outer;
8608 gcc_assert (octx);
8609 if (octx->scan_exclusive && !has_clauses)
8611 gimple_stmt_iterator gsi2 = *gsi_p;
8612 gsi_next (&gsi2);
8613 gimple *stmt2 = gsi_stmt (gsi2);
8614 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
8615 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
8616 the one with exclusive clause(s), comes first. */
8617 if (stmt2
8618 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
8619 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
8621 gsi_remove (gsi_p, false);
8622 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
8623 ctx = maybe_lookup_ctx (stmt2);
8624 gcc_assert (ctx);
8625 lower_omp_scan (gsi_p, ctx);
8626 return;
8630 bool input_phase = has_clauses ^ octx->scan_inclusive;
8631 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
8632 && (gimple_omp_for_kind (octx->stmt) & GF_OMP_FOR_SIMD)
8633 && !gimple_omp_for_combined_into_p (octx->stmt));
8634 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
8635 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
8636 && !gimple_omp_for_combined_p (octx->stmt));
8637 if (is_simd)
8638 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
8639 OMP_CLAUSE__SIMDUID_))
8641 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
8642 lane = create_tmp_var (unsigned_type_node);
8643 tree t = build_int_cst (integer_type_node,
8644 input_phase ? 1
8645 : octx->scan_inclusive ? 2 : 3);
8646 gimple *g
8647 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
8648 gimple_call_set_lhs (g, lane);
8649 gimple_seq_add_stmt (&before, g);
8652 if (is_simd || is_for)
8654 for (tree c = gimple_omp_for_clauses (octx->stmt);
8655 c; c = OMP_CLAUSE_CHAIN (c))
8656 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8657 && OMP_CLAUSE_REDUCTION_INSCAN (c))
8659 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8660 tree var = OMP_CLAUSE_DECL (c);
8661 tree new_var = lookup_decl (var, octx);
8662 tree val = new_var;
8663 tree var2 = NULL_TREE;
8664 tree var3 = NULL_TREE;
8665 tree var4 = NULL_TREE;
8666 tree lane0 = NULL_TREE;
8667 tree new_vard = new_var;
8668 if (omp_is_reference (var))
8670 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
8671 val = new_var;
8673 if (DECL_HAS_VALUE_EXPR_P (new_vard))
8675 val = DECL_VALUE_EXPR (new_vard);
8676 if (new_vard != new_var)
8678 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
8679 val = TREE_OPERAND (val, 0);
8681 if (TREE_CODE (val) == ARRAY_REF
8682 && VAR_P (TREE_OPERAND (val, 0)))
8684 tree v = TREE_OPERAND (val, 0);
8685 if (lookup_attribute ("omp simd array",
8686 DECL_ATTRIBUTES (v)))
8688 val = unshare_expr (val);
8689 lane0 = TREE_OPERAND (val, 1);
8690 TREE_OPERAND (val, 1) = lane;
8691 var2 = lookup_decl (v, octx);
8692 if (octx->scan_exclusive)
8693 var4 = lookup_decl (var2, octx);
8694 if (input_phase
8695 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8696 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
8697 if (!input_phase)
8699 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
8700 var2, lane, NULL_TREE, NULL_TREE);
8701 TREE_THIS_NOTRAP (var2) = 1;
8702 if (octx->scan_exclusive)
8704 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
8705 var4, lane, NULL_TREE,
8706 NULL_TREE);
8707 TREE_THIS_NOTRAP (var4) = 1;
8710 else
8711 var2 = val;
8714 gcc_assert (var2);
8716 else
8718 var2 = build_outer_var_ref (var, octx);
8719 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8721 var3 = maybe_lookup_decl (new_vard, octx);
8722 if (var3 == new_vard || var3 == NULL_TREE)
8723 var3 = NULL_TREE;
8724 else if (is_simd && octx->scan_exclusive && !input_phase)
8726 var4 = maybe_lookup_decl (var3, octx);
8727 if (var4 == var3 || var4 == NULL_TREE)
8729 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
8731 var4 = var3;
8732 var3 = NULL_TREE;
8734 else
8735 var4 = NULL_TREE;
8739 if (is_simd
8740 && octx->scan_exclusive
8741 && !input_phase
8742 && var4 == NULL_TREE)
8743 var4 = create_tmp_var (TREE_TYPE (val));
8745 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8747 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8748 if (input_phase)
8750 if (var3)
8752 /* If we've added a separate identity element
8753 variable, copy it over into val. */
8754 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
8755 var3);
8756 gimplify_and_add (x, &before);
8758 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
8760 /* Otherwise, assign to it the identity element. */
8761 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
8762 if (is_for)
8763 tseq = copy_gimple_seq_and_replace_locals (tseq);
8764 tree ref = build_outer_var_ref (var, octx);
8765 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
8766 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
8767 if (x)
8769 if (new_vard != new_var)
8770 val = build_fold_addr_expr_loc (clause_loc, val);
8771 SET_DECL_VALUE_EXPR (new_vard, val);
8773 SET_DECL_VALUE_EXPR (placeholder, ref);
8774 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8775 lower_omp (&tseq, octx);
8776 if (x)
8777 SET_DECL_VALUE_EXPR (new_vard, x);
8778 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
8779 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
8780 gimple_seq_add_seq (&before, tseq);
8781 if (is_simd)
8782 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8785 else if (is_simd)
8787 tree x;
8788 if (octx->scan_exclusive)
8790 tree v4 = unshare_expr (var4);
8791 tree v2 = unshare_expr (var2);
8792 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
8793 gimplify_and_add (x, &before);
8795 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
8796 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
8797 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
8798 tree vexpr = val;
8799 if (x && new_vard != new_var)
8800 vexpr = build_fold_addr_expr_loc (clause_loc, val);
8801 if (x)
8802 SET_DECL_VALUE_EXPR (new_vard, vexpr);
8803 SET_DECL_VALUE_EXPR (placeholder, var2);
8804 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8805 lower_omp (&tseq, octx);
8806 gimple_seq_add_seq (&before, tseq);
8807 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8808 if (x)
8809 SET_DECL_VALUE_EXPR (new_vard, x);
8810 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
8811 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
8812 if (octx->scan_inclusive)
8814 x = lang_hooks.decls.omp_clause_assign_op (c, val,
8815 var2);
8816 gimplify_and_add (x, &before);
8818 else if (lane0 == NULL_TREE)
8820 x = lang_hooks.decls.omp_clause_assign_op (c, val,
8821 var4);
8822 gimplify_and_add (x, &before);
8826 else
8828 if (input_phase)
8830 /* input phase. Set val to initializer before
8831 the body. */
8832 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
8833 gimplify_assign (val, x, &before);
8835 else if (is_simd)
8837 /* scan phase. */
8838 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
8839 if (code == MINUS_EXPR)
8840 code = PLUS_EXPR;
8842 tree x = build2 (code, TREE_TYPE (var2),
8843 unshare_expr (var2), unshare_expr (val));
8844 if (octx->scan_inclusive)
8846 gimplify_assign (unshare_expr (var2), x, &before);
8847 gimplify_assign (val, var2, &before);
8849 else
8851 gimplify_assign (unshare_expr (var4),
8852 unshare_expr (var2), &before);
8853 gimplify_assign (var2, x, &before);
8854 if (lane0 == NULL_TREE)
8855 gimplify_assign (val, var4, &before);
8859 if (octx->scan_exclusive && !input_phase && lane0)
8861 tree vexpr = unshare_expr (var4);
8862 TREE_OPERAND (vexpr, 1) = lane0;
8863 if (new_vard != new_var)
8864 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
8865 SET_DECL_VALUE_EXPR (new_vard, vexpr);
8869 else if (has_clauses)
8870 sorry_at (gimple_location (stmt),
8871 "%<#pragma omp scan%> not supported yet");
8872 if (!is_for)
8874 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
8875 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
8876 gsi_replace (gsi_p, gimple_build_nop (), true);
8878 else if (before)
8880 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
8881 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
8886 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
8887 substitution of a couple of function calls. But in the NAMED case,
8888 requires that languages coordinate a symbol name. It is therefore
8889 best put here in common code. */
8891 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
8893 static void
8894 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8896 tree block;
8897 tree name, lock, unlock;
8898 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
8899 gbind *bind;
8900 location_t loc = gimple_location (stmt);
8901 gimple_seq tbody;
8903 name = gimple_omp_critical_name (stmt);
8904 if (name)
8906 tree decl;
8908 if (!critical_name_mutexes)
8909 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
8911 tree *n = critical_name_mutexes->get (name);
8912 if (n == NULL)
8914 char *new_str;
8916 decl = create_tmp_var_raw (ptr_type_node);
8918 new_str = ACONCAT ((".gomp_critical_user_",
8919 IDENTIFIER_POINTER (name), NULL));
8920 DECL_NAME (decl) = get_identifier (new_str);
8921 TREE_PUBLIC (decl) = 1;
8922 TREE_STATIC (decl) = 1;
8923 DECL_COMMON (decl) = 1;
8924 DECL_ARTIFICIAL (decl) = 1;
8925 DECL_IGNORED_P (decl) = 1;
8927 varpool_node::finalize_decl (decl);
8929 critical_name_mutexes->put (name, decl);
8931 else
8932 decl = *n;
8934 /* If '#pragma omp critical' is inside offloaded region or
8935 inside function marked as offloadable, the symbol must be
8936 marked as offloadable too. */
8937 omp_context *octx;
8938 if (cgraph_node::get (current_function_decl)->offloadable)
8939 varpool_node::get_create (decl)->offloadable = 1;
8940 else
8941 for (octx = ctx->outer; octx; octx = octx->outer)
8942 if (is_gimple_omp_offloaded (octx->stmt))
8944 varpool_node::get_create (decl)->offloadable = 1;
8945 break;
8948 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
8949 lock = build_call_expr_loc (loc, lock, 1,
8950 build_fold_addr_expr_loc (loc, decl));
8952 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
8953 unlock = build_call_expr_loc (loc, unlock, 1,
8954 build_fold_addr_expr_loc (loc, decl));
8956 else
8958 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
8959 lock = build_call_expr_loc (loc, lock, 0);
8961 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
8962 unlock = build_call_expr_loc (loc, unlock, 0);
8965 push_gimplify_context ();
8967 block = make_node (BLOCK);
8968 bind = gimple_build_bind (NULL, NULL, block);
8969 gsi_replace (gsi_p, bind, true);
8970 gimple_bind_add_stmt (bind, stmt);
8972 tbody = gimple_bind_body (bind);
8973 gimplify_and_add (lock, &tbody);
8974 gimple_bind_set_body (bind, tbody);
8976 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8977 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8978 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8979 gimple_omp_set_body (stmt, NULL);
8981 tbody = gimple_bind_body (bind);
8982 gimplify_and_add (unlock, &tbody);
8983 gimple_bind_set_body (bind, tbody);
8985 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8987 pop_gimplify_context (bind);
8988 gimple_bind_append_vars (bind, ctx->block_vars);
8989 BLOCK_VARS (block) = gimple_bind_vars (bind);
8992 /* A subroutine of lower_omp_for. Generate code to emit the predicate
8993 for a lastprivate clause. Given a loop control predicate of (V
8994 cond N2), we gate the clause on (!(V cond N2)). The lowered form
8995 is appended to *DLIST, iterator initialization is appended to
8996 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
8997 to be emitted in a critical section. */
8999 static void
9000 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
9001 gimple_seq *dlist, gimple_seq *clist,
9002 struct omp_context *ctx)
9004 tree clauses, cond, vinit;
9005 enum tree_code cond_code;
9006 gimple_seq stmts;
9008 cond_code = fd->loop.cond_code;
9009 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
9011 /* When possible, use a strict equality expression. This can let VRP
9012 type optimizations deduce the value and remove a copy. */
9013 if (tree_fits_shwi_p (fd->loop.step))
9015 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
9016 if (step == 1 || step == -1)
9017 cond_code = EQ_EXPR;
9020 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
9021 || gimple_omp_for_grid_phony (fd->for_stmt))
9022 cond = omp_grid_lastprivate_predicate (fd);
9023 else
9025 tree n2 = fd->loop.n2;
9026 if (fd->collapse > 1
9027 && TREE_CODE (n2) != INTEGER_CST
9028 && gimple_omp_for_combined_into_p (fd->for_stmt))
9030 struct omp_context *taskreg_ctx = NULL;
9031 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
9033 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
9034 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
9035 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
9037 if (gimple_omp_for_combined_into_p (gfor))
9039 gcc_assert (ctx->outer->outer
9040 && is_parallel_ctx (ctx->outer->outer));
9041 taskreg_ctx = ctx->outer->outer;
9043 else
9045 struct omp_for_data outer_fd;
9046 omp_extract_for_data (gfor, &outer_fd, NULL);
9047 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
9050 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
9051 taskreg_ctx = ctx->outer->outer;
9053 else if (is_taskreg_ctx (ctx->outer))
9054 taskreg_ctx = ctx->outer;
9055 if (taskreg_ctx)
9057 int i;
9058 tree taskreg_clauses
9059 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
9060 tree innerc = omp_find_clause (taskreg_clauses,
9061 OMP_CLAUSE__LOOPTEMP_);
9062 gcc_assert (innerc);
9063 for (i = 0; i < fd->collapse; i++)
9065 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9066 OMP_CLAUSE__LOOPTEMP_);
9067 gcc_assert (innerc);
9069 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9070 OMP_CLAUSE__LOOPTEMP_);
9071 if (innerc)
9072 n2 = fold_convert (TREE_TYPE (n2),
9073 lookup_decl (OMP_CLAUSE_DECL (innerc),
9074 taskreg_ctx));
9077 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
9080 clauses = gimple_omp_for_clauses (fd->for_stmt);
9081 stmts = NULL;
9082 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
9083 if (!gimple_seq_empty_p (stmts))
9085 gimple_seq_add_seq (&stmts, *dlist);
9086 *dlist = stmts;
9088 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9089 vinit = fd->loop.n1;
9090 if (cond_code == EQ_EXPR
9091 && tree_fits_shwi_p (fd->loop.n2)
9092 && ! integer_zerop (fd->loop.n2))
9093 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
9094 else
9095 vinit = unshare_expr (vinit);
9097 /* Initialize the iterator variable, so that threads that don't execute
9098 any iterations don't execute the lastprivate clauses by accident. */
9099 gimplify_assign (fd->loop.v, vinit, body_p);
9103 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9105 tree
9106 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9107 struct walk_stmt_info *wi)
9109 gimple *stmt = gsi_stmt (*gsi_p);
9111 *handled_ops_p = true;
9112 switch (gimple_code (stmt))
9114 WALK_SUBSTMTS;
9116 case GIMPLE_OMP_SCAN:
9117 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
9118 return integer_zero_node;
9119 default:
9120 break;
9122 return NULL;
9125 /* Helper function for lower_omp_for, add transformations for a worksharing
9126 loop with scan directives inside of it.
9127 For worksharing loop not combined with simd, transform:
9128 #pragma omp for reduction(inscan,+:r) private(i)
9129 for (i = 0; i < n; i = i + 1)
9132 update (r);
9134 #pragma omp scan inclusive(r)
9136 use (r);
9140 into two worksharing loops + code to merge results:
9142 num_threads = omp_get_num_threads ();
9143 thread_num = omp_get_thread_num ();
9144 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9145 <D.2099>:
9146 var2 = r;
9147 goto <D.2101>;
9148 <D.2100>:
9149 // For UDRs this is UDR init, or if ctors are needed, copy from
9150 // var3 that has been constructed to contain the neutral element.
9151 var2 = 0;
9152 <D.2101>:
9153 ivar = 0;
9154 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9155 // a shared array with num_threads elements and rprivb to a local array
9156 // number of elements equal to the number of (contiguous) iterations the
9157 // current thread will perform. controlb and controlp variables are
9158 // temporaries to handle deallocation of rprivb at the end of second
9159 // GOMP_FOR.
9160 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9161 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9162 for (i = 0; i < n; i = i + 1)
9165 // For UDRs this is UDR init or copy from var3.
9166 r = 0;
9167 // This is the input phase from user code.
9168 update (r);
9171 // For UDRs this is UDR merge.
9172 var2 = var2 + r;
9173 // Rather than handing it over to the user, save to local thread's
9174 // array.
9175 rprivb[ivar] = var2;
9176 // For exclusive scan, the above two statements are swapped.
9177 ivar = ivar + 1;
9180 // And remember the final value from this thread's into the shared
9181 // rpriva array.
9182 rpriva[(sizetype) thread_num] = var2;
9183 // If more than one thread, compute using Work-Efficient prefix sum
9184 // the inclusive parallel scan of the rpriva array.
9185 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9186 <D.2102>:
9187 GOMP_barrier ();
9188 down = 0;
9189 k = 1;
9190 num_threadsu = (unsigned int) num_threads;
9191 thread_numup1 = (unsigned int) thread_num + 1;
9192 <D.2108>:
9193 twok = k << 1;
9194 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9195 <D.2110>:
9196 down = 4294967295;
9197 k = k >> 1;
9198 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9199 <D.2112>:
9200 k = k >> 1;
9201 <D.2111>:
9202 twok = k << 1;
9203 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9204 mul = REALPART_EXPR <cplx>;
9205 ovf = IMAGPART_EXPR <cplx>;
9206 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9207 <D.2116>:
9208 andv = k & down;
9209 andvm1 = andv + 4294967295;
9210 l = mul + andvm1;
9211 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9212 <D.2120>:
9213 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9214 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9215 rpriva[l] = rpriva[l - k] + rpriva[l];
9216 <D.2117>:
9217 if (down == 0) goto <D.2121>; else goto <D.2122>;
9218 <D.2121>:
9219 k = k << 1;
9220 goto <D.2123>;
9221 <D.2122>:
9222 k = k >> 1;
9223 <D.2123>:
9224 GOMP_barrier ();
9225 if (k != 0) goto <D.2108>; else goto <D.2103>;
9226 <D.2103>:
9227 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9228 <D.2124>:
9229 // For UDRs this is UDR init or copy from var3.
9230 var2 = 0;
9231 goto <D.2126>;
9232 <D.2125>:
9233 var2 = rpriva[thread_num - 1];
9234 <D.2126>:
9235 ivar = 0;
9236 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9237 reduction(inscan,+:r) private(i)
9238 for (i = 0; i < n; i = i + 1)
9241 // For UDRs, this is UDR merge (rprivb[ivar], var2); r = rprivb[ivar];
9242 r = rprivb[ivar] + var2;
9245 // This is the scan phase from user code.
9246 use (r);
9247 // Plus a bump of the iterator.
9248 ivar = ivar + 1;
9250 } */
9252 static void
9253 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
9254 struct omp_for_data *fd, omp_context *ctx)
9256 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
9258 gimple_seq body = gimple_omp_body (stmt);
9259 gimple_stmt_iterator input1_gsi = gsi_none ();
9260 struct walk_stmt_info wi;
9261 memset (&wi, 0, sizeof (wi));
9262 wi.val_only = true;
9263 wi.info = (void *) &input1_gsi;
9264 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
9265 gcc_assert (!gsi_end_p (input1_gsi));
9267 gimple *input_stmt1 = gsi_stmt (input1_gsi);
9268 gimple_stmt_iterator gsi = input1_gsi;
9269 gsi_next (&gsi);
9270 gimple_stmt_iterator scan1_gsi = gsi;
9271 gimple *scan_stmt1 = gsi_stmt (gsi);
9272 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
9274 gimple_seq input_body = gimple_omp_body (input_stmt1);
9275 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
9276 gimple_omp_set_body (input_stmt1, NULL);
9277 gimple_omp_set_body (scan_stmt1, NULL);
9278 gimple_omp_set_body (stmt, NULL);
9280 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
9281 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
9282 gimple_omp_set_body (stmt, body);
9283 gimple_omp_set_body (input_stmt1, input_body);
9285 gimple_stmt_iterator input2_gsi = gsi_none ();
9286 memset (&wi, 0, sizeof (wi));
9287 wi.val_only = true;
9288 wi.info = (void *) &input2_gsi;
9289 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
9290 gcc_assert (!gsi_end_p (input2_gsi));
9292 gimple *input_stmt2 = gsi_stmt (input2_gsi);
9293 gsi = input2_gsi;
9294 gsi_next (&gsi);
9295 gimple_stmt_iterator scan2_gsi = gsi;
9296 gimple *scan_stmt2 = gsi_stmt (gsi);
9297 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
9298 gimple_omp_set_body (scan_stmt2, scan_body);
9300 tree num_threads = create_tmp_var (integer_type_node);
9301 tree thread_num = create_tmp_var (integer_type_node);
9302 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9303 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9304 gimple *g = gimple_build_call (nthreads_decl, 0);
9305 gimple_call_set_lhs (g, num_threads);
9306 gimple_seq_add_stmt (body_p, g);
9307 g = gimple_build_call (threadnum_decl, 0);
9308 gimple_call_set_lhs (g, thread_num);
9309 gimple_seq_add_stmt (body_p, g);
9311 tree ivar = create_tmp_var (sizetype);
9312 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
9313 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
9314 tree k = create_tmp_var (unsigned_type_node);
9315 tree l = create_tmp_var (unsigned_type_node);
9317 gimple_seq clist = NULL, mdlist = NULL;
9318 gimple_seq thr01_list = NULL, thrn1_list = NULL;
9319 gimple_seq thr02_list = NULL, thrn2_list = NULL;
9320 gimple_seq scan1_list = NULL, input2_list = NULL;
9321 gimple_seq last_list = NULL, reduc_list = NULL;
9322 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
9323 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9324 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9326 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9327 tree var = OMP_CLAUSE_DECL (c);
9328 tree new_var = lookup_decl (var, ctx);
9329 tree var3 = NULL_TREE;
9330 tree new_vard = new_var;
9331 if (omp_is_reference (var))
9332 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9333 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9335 var3 = maybe_lookup_decl (new_vard, ctx);
9336 if (var3 == new_vard)
9337 var3 = NULL_TREE;
9340 tree ptype = build_pointer_type (TREE_TYPE (new_var));
9341 tree rpriva = create_tmp_var (ptype);
9342 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9343 OMP_CLAUSE_DECL (nc) = rpriva;
9344 *cp1 = nc;
9345 cp1 = &OMP_CLAUSE_CHAIN (nc);
9347 tree rprivb = create_tmp_var (ptype);
9348 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9349 OMP_CLAUSE_DECL (nc) = rprivb;
9350 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
9351 *cp1 = nc;
9352 cp1 = &OMP_CLAUSE_CHAIN (nc);
9354 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
9355 if (new_vard != new_var)
9356 TREE_ADDRESSABLE (var2) = 1;
9357 gimple_add_tmp_var (var2);
9359 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
9360 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9361 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9362 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9363 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
9365 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
9366 thread_num, integer_minus_one_node);
9367 x = fold_convert_loc (clause_loc, sizetype, x);
9368 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9369 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9370 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9371 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
9373 x = fold_convert_loc (clause_loc, sizetype, l);
9374 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9375 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9376 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9377 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
9379 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
9380 x = fold_convert_loc (clause_loc, sizetype, x);
9381 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9382 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9383 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9384 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
9386 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
9387 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9388 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
9389 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
9391 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9393 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9394 tree val = var2;
9395 if (new_vard != new_var)
9396 val = build_fold_addr_expr_loc (clause_loc, val);
9398 x = lang_hooks.decls.omp_clause_default_ctor
9399 (c, var2, build_outer_var_ref (var, ctx));
9400 if (x)
9401 gimplify_and_add (x, &clist);
9403 x = build_outer_var_ref (var, ctx);
9404 x = lang_hooks.decls.omp_clause_assign_op (c, var2, x);
9405 gimplify_and_add (x, &thr01_list);
9407 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
9408 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9409 if (var3)
9411 x = lang_hooks.decls.omp_clause_assign_op (c, var2, var3);
9412 gimplify_and_add (x, &thrn1_list);
9413 x = lang_hooks.decls.omp_clause_assign_op (c, var2, var3);
9414 gimplify_and_add (x, &thr02_list);
9416 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9418 /* Otherwise, assign to it the identity element. */
9419 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9420 tseq = copy_gimple_seq_and_replace_locals (tseq);
9421 SET_DECL_VALUE_EXPR (new_vard, val);
9422 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9423 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
9424 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9425 lower_omp (&tseq, ctx);
9426 gimple_seq_add_seq (&thrn1_list, tseq);
9427 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9428 lower_omp (&tseq, ctx);
9429 gimple_seq_add_seq (&thr02_list, tseq);
9430 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9431 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9432 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9433 if (y)
9434 SET_DECL_VALUE_EXPR (new_vard, y);
9435 else
9437 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9438 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9442 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivam1_ref);
9443 gimplify_and_add (x, &thrn2_list);
9445 if (ctx->scan_exclusive)
9447 x = unshare_expr (rprivb_ref);
9448 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
9449 gimplify_and_add (x, &scan1_list);
9452 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9453 tseq = copy_gimple_seq_and_replace_locals (tseq);
9454 SET_DECL_VALUE_EXPR (placeholder, var2);
9455 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9456 lower_omp (&tseq, ctx);
9457 gimple_seq_add_seq (&scan1_list, tseq);
9459 if (ctx->scan_inclusive)
9461 x = unshare_expr (rprivb_ref);
9462 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
9463 gimplify_and_add (x, &scan1_list);
9466 x = unshare_expr (rpriva_ref);
9467 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
9468 gimplify_and_add (x, &mdlist);
9470 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9471 tseq = copy_gimple_seq_and_replace_locals (tseq);
9472 SET_DECL_VALUE_EXPR (new_vard, val);
9473 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9474 SET_DECL_VALUE_EXPR (placeholder, rprivb_ref);
9475 lower_omp (&tseq, ctx);
9476 if (y)
9477 SET_DECL_VALUE_EXPR (new_vard, y);
9478 else
9480 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9481 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9483 gimple_seq_add_seq (&input2_list, tseq);
9485 x = unshare_expr (new_var);
9486 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivb_ref);
9487 gimplify_and_add (x, &input2_list);
9489 x = build_outer_var_ref (var, ctx);
9490 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
9491 gimplify_and_add (x, &last_list);
9493 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
9494 gimplify_and_add (x, &reduc_list);
9495 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9496 tseq = copy_gimple_seq_and_replace_locals (tseq);
9497 val = rprival_ref;
9498 if (new_vard != new_var)
9499 val = build_fold_addr_expr_loc (clause_loc, val);
9500 SET_DECL_VALUE_EXPR (new_vard, val);
9501 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9502 SET_DECL_VALUE_EXPR (placeholder, var2);
9503 lower_omp (&tseq, ctx);
9504 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9505 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9506 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9507 if (y)
9508 SET_DECL_VALUE_EXPR (new_vard, y);
9509 else
9511 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9512 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9514 gimple_seq_add_seq (&reduc_list, tseq);
9515 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
9516 gimplify_and_add (x, &reduc_list);
9518 x = lang_hooks.decls.omp_clause_dtor (c, var2);
9519 if (x)
9520 gimplify_and_add (x, dlist);
9522 else
9524 x = build_outer_var_ref (var, ctx);
9525 gimplify_assign (var2, x, &thr01_list);
9527 x = omp_reduction_init (c, TREE_TYPE (new_var));
9528 gimplify_assign (var2, unshare_expr (x), &thrn1_list);
9529 gimplify_assign (var2, x, &thr02_list);
9531 gimplify_assign (var2, rprivam1_ref, &thrn2_list);
9533 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9534 if (code == MINUS_EXPR)
9535 code = PLUS_EXPR;
9537 if (ctx->scan_exclusive)
9538 gimplify_assign (unshare_expr (rprivb_ref), var2, &scan1_list);
9539 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
9540 gimplify_assign (var2, x, &scan1_list);
9541 if (ctx->scan_inclusive)
9542 gimplify_assign (unshare_expr (rprivb_ref), var2, &scan1_list);
9544 gimplify_assign (unshare_expr (rpriva_ref), var2, &mdlist);
9546 x = build2 (code, TREE_TYPE (new_var), rprivb_ref, var2);
9547 gimplify_assign (new_var, x, &input2_list);
9549 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
9550 &last_list);
9552 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
9553 unshare_expr (rprival_ref));
9554 gimplify_assign (rprival_ref, x, &reduc_list);
9558 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
9559 gimple_seq_add_stmt (&scan1_list, g);
9560 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
9561 gimple_seq_add_stmt (gimple_omp_body_ptr (scan_stmt2), g);
9563 tree controlb = create_tmp_var (boolean_type_node);
9564 tree controlp = create_tmp_var (ptr_type_node);
9565 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
9566 OMP_CLAUSE_DECL (nc) = controlb;
9567 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
9568 *cp1 = nc;
9569 cp1 = &OMP_CLAUSE_CHAIN (nc);
9570 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
9571 OMP_CLAUSE_DECL (nc) = controlp;
9572 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
9573 *cp1 = nc;
9574 cp1 = &OMP_CLAUSE_CHAIN (nc);
9575 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
9576 OMP_CLAUSE_DECL (nc) = controlb;
9577 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
9578 *cp2 = nc;
9579 cp2 = &OMP_CLAUSE_CHAIN (nc);
9580 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
9581 OMP_CLAUSE_DECL (nc) = controlp;
9582 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
9583 *cp2 = nc;
9584 cp2 = &OMP_CLAUSE_CHAIN (nc);
9586 *cp1 = gimple_omp_for_clauses (stmt);
9587 gimple_omp_for_set_clauses (stmt, new_clauses1);
9588 *cp2 = gimple_omp_for_clauses (new_stmt);
9589 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
9591 gimple_omp_set_body (scan_stmt1, scan1_list);
9592 gimple_omp_set_body (input_stmt2, input2_list);
9594 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
9595 GSI_SAME_STMT);
9596 gsi_remove (&input1_gsi, true);
9597 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
9598 GSI_SAME_STMT);
9599 gsi_remove (&scan1_gsi, true);
9600 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
9601 GSI_SAME_STMT);
9602 gsi_remove (&input2_gsi, true);
9603 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
9604 GSI_SAME_STMT);
9605 gsi_remove (&scan2_gsi, true);
9607 gimple_seq_add_seq (body_p, clist);
9609 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
9610 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
9611 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
9612 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
9613 gimple_seq_add_stmt (body_p, g);
9614 g = gimple_build_label (lab1);
9615 gimple_seq_add_stmt (body_p, g);
9616 gimple_seq_add_seq (body_p, thr01_list);
9617 g = gimple_build_goto (lab3);
9618 gimple_seq_add_stmt (body_p, g);
9619 g = gimple_build_label (lab2);
9620 gimple_seq_add_stmt (body_p, g);
9621 gimple_seq_add_seq (body_p, thrn1_list);
9622 g = gimple_build_label (lab3);
9623 gimple_seq_add_stmt (body_p, g);
9625 g = gimple_build_assign (ivar, size_zero_node);
9626 gimple_seq_add_stmt (body_p, g);
9628 gimple_seq_add_stmt (body_p, stmt);
9629 gimple_seq_add_seq (body_p, body);
9630 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
9631 fd->loop.v));
9633 g = gimple_build_omp_return (true);
9634 gimple_seq_add_stmt (body_p, g);
9635 gimple_seq_add_seq (body_p, mdlist);
9637 lab1 = create_artificial_label (UNKNOWN_LOCATION);
9638 lab2 = create_artificial_label (UNKNOWN_LOCATION);
9639 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
9640 gimple_seq_add_stmt (body_p, g);
9641 g = gimple_build_label (lab1);
9642 gimple_seq_add_stmt (body_p, g);
9644 g = omp_build_barrier (NULL);
9645 gimple_seq_add_stmt (body_p, g);
9647 tree down = create_tmp_var (unsigned_type_node);
9648 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
9649 gimple_seq_add_stmt (body_p, g);
9651 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
9652 gimple_seq_add_stmt (body_p, g);
9654 tree num_threadsu = create_tmp_var (unsigned_type_node);
9655 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
9656 gimple_seq_add_stmt (body_p, g);
9658 tree thread_numu = create_tmp_var (unsigned_type_node);
9659 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
9660 gimple_seq_add_stmt (body_p, g);
9662 tree thread_nump1 = create_tmp_var (unsigned_type_node);
9663 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
9664 build_int_cst (unsigned_type_node, 1));
9665 gimple_seq_add_stmt (body_p, g);
9667 lab3 = create_artificial_label (UNKNOWN_LOCATION);
9668 g = gimple_build_label (lab3);
9669 gimple_seq_add_stmt (body_p, g);
9671 tree twok = create_tmp_var (unsigned_type_node);
9672 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
9673 gimple_seq_add_stmt (body_p, g);
9675 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9676 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9677 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
9678 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
9679 gimple_seq_add_stmt (body_p, g);
9680 g = gimple_build_label (lab4);
9681 gimple_seq_add_stmt (body_p, g);
9682 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
9683 gimple_seq_add_stmt (body_p, g);
9684 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
9685 gimple_seq_add_stmt (body_p, g);
9687 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
9688 gimple_seq_add_stmt (body_p, g);
9689 g = gimple_build_label (lab6);
9690 gimple_seq_add_stmt (body_p, g);
9692 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
9693 gimple_seq_add_stmt (body_p, g);
9695 g = gimple_build_label (lab5);
9696 gimple_seq_add_stmt (body_p, g);
9698 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
9699 gimple_seq_add_stmt (body_p, g);
9701 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
9702 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
9703 gimple_call_set_lhs (g, cplx);
9704 gimple_seq_add_stmt (body_p, g);
9705 tree mul = create_tmp_var (unsigned_type_node);
9706 g = gimple_build_assign (mul, REALPART_EXPR,
9707 build1 (REALPART_EXPR, unsigned_type_node, cplx));
9708 gimple_seq_add_stmt (body_p, g);
9709 tree ovf = create_tmp_var (unsigned_type_node);
9710 g = gimple_build_assign (ovf, IMAGPART_EXPR,
9711 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
9712 gimple_seq_add_stmt (body_p, g);
9714 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
9715 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
9716 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
9717 lab7, lab8);
9718 gimple_seq_add_stmt (body_p, g);
9719 g = gimple_build_label (lab7);
9720 gimple_seq_add_stmt (body_p, g);
9722 tree andv = create_tmp_var (unsigned_type_node);
9723 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
9724 gimple_seq_add_stmt (body_p, g);
9725 tree andvm1 = create_tmp_var (unsigned_type_node);
9726 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
9727 build_minus_one_cst (unsigned_type_node));
9728 gimple_seq_add_stmt (body_p, g);
9730 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
9731 gimple_seq_add_stmt (body_p, g);
9733 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
9734 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
9735 gimple_seq_add_stmt (body_p, g);
9736 g = gimple_build_label (lab9);
9737 gimple_seq_add_stmt (body_p, g);
9738 gimple_seq_add_seq (body_p, reduc_list);
9739 g = gimple_build_label (lab8);
9740 gimple_seq_add_stmt (body_p, g);
9742 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
9743 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
9744 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
9745 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
9746 lab10, lab11);
9747 gimple_seq_add_stmt (body_p, g);
9748 g = gimple_build_label (lab10);
9749 gimple_seq_add_stmt (body_p, g);
9750 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
9751 gimple_seq_add_stmt (body_p, g);
9752 g = gimple_build_goto (lab12);
9753 gimple_seq_add_stmt (body_p, g);
9754 g = gimple_build_label (lab11);
9755 gimple_seq_add_stmt (body_p, g);
9756 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
9757 gimple_seq_add_stmt (body_p, g);
9758 g = gimple_build_label (lab12);
9759 gimple_seq_add_stmt (body_p, g);
9761 g = omp_build_barrier (NULL);
9762 gimple_seq_add_stmt (body_p, g);
9764 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
9765 lab3, lab2);
9766 gimple_seq_add_stmt (body_p, g);
9768 g = gimple_build_label (lab2);
9769 gimple_seq_add_stmt (body_p, g);
9771 lab1 = create_artificial_label (UNKNOWN_LOCATION);
9772 lab2 = create_artificial_label (UNKNOWN_LOCATION);
9773 lab3 = create_artificial_label (UNKNOWN_LOCATION);
9774 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
9775 gimple_seq_add_stmt (body_p, g);
9776 g = gimple_build_label (lab1);
9777 gimple_seq_add_stmt (body_p, g);
9778 gimple_seq_add_seq (body_p, thr02_list);
9779 g = gimple_build_goto (lab3);
9780 gimple_seq_add_stmt (body_p, g);
9781 g = gimple_build_label (lab2);
9782 gimple_seq_add_stmt (body_p, g);
9783 gimple_seq_add_seq (body_p, thrn2_list);
9784 g = gimple_build_label (lab3);
9785 gimple_seq_add_stmt (body_p, g);
9787 g = gimple_build_assign (ivar, size_zero_node);
9788 gimple_seq_add_stmt (body_p, g);
9789 gimple_seq_add_stmt (body_p, new_stmt);
9790 gimple_seq_add_seq (body_p, new_body);
9792 gimple_seq new_dlist = NULL;
9793 lab1 = create_artificial_label (UNKNOWN_LOCATION);
9794 lab2 = create_artificial_label (UNKNOWN_LOCATION);
9795 tree num_threadsm1 = create_tmp_var (integer_type_node);
9796 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
9797 integer_minus_one_node);
9798 gimple_seq_add_stmt (&new_dlist, g);
9799 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
9800 gimple_seq_add_stmt (&new_dlist, g);
9801 g = gimple_build_label (lab1);
9802 gimple_seq_add_stmt (&new_dlist, g);
9803 gimple_seq_add_seq (&new_dlist, last_list);
9804 g = gimple_build_label (lab2);
9805 gimple_seq_add_stmt (&new_dlist, g);
9806 gimple_seq_add_seq (&new_dlist, *dlist);
9807 *dlist = new_dlist;
9810 /* Lower code for an OMP loop directive. */
9812 static void
9813 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9815 tree *rhs_p, block;
9816 struct omp_for_data fd, *fdp = NULL;
9817 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
9818 gbind *new_stmt;
9819 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
9820 gimple_seq cnt_list = NULL, clist = NULL;
9821 gimple_seq oacc_head = NULL, oacc_tail = NULL;
9822 size_t i;
9824 push_gimplify_context ();
9826 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
9828 block = make_node (BLOCK);
9829 new_stmt = gimple_build_bind (NULL, NULL, block);
9830 /* Replace at gsi right away, so that 'stmt' is no member
9831 of a sequence anymore as we're going to add to a different
9832 one below. */
9833 gsi_replace (gsi_p, new_stmt, true);
9835 /* Move declaration of temporaries in the loop body before we make
9836 it go away. */
9837 omp_for_body = gimple_omp_body (stmt);
9838 if (!gimple_seq_empty_p (omp_for_body)
9839 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
9841 gbind *inner_bind
9842 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
9843 tree vars = gimple_bind_vars (inner_bind);
9844 gimple_bind_append_vars (new_stmt, vars);
9845 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
9846 keep them on the inner_bind and it's block. */
9847 gimple_bind_set_vars (inner_bind, NULL_TREE);
9848 if (gimple_bind_block (inner_bind))
9849 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
9852 if (gimple_omp_for_combined_into_p (stmt))
9854 omp_extract_for_data (stmt, &fd, NULL);
9855 fdp = &fd;
9857 /* We need two temporaries with fd.loop.v type (istart/iend)
9858 and then (fd.collapse - 1) temporaries with the same
9859 type for count2 ... countN-1 vars if not constant. */
9860 size_t count = 2;
9861 tree type = fd.iter_type;
9862 if (fd.collapse > 1
9863 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
9864 count += fd.collapse - 1;
9865 bool taskreg_for
9866 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
9867 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
9868 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
9869 tree simtc = NULL;
9870 tree clauses = *pc;
9871 if (taskreg_for)
9872 outerc
9873 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
9874 OMP_CLAUSE__LOOPTEMP_);
9875 if (ctx->simt_stmt)
9876 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
9877 OMP_CLAUSE__LOOPTEMP_);
9878 for (i = 0; i < count; i++)
9880 tree temp;
9881 if (taskreg_for)
9883 gcc_assert (outerc);
9884 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
9885 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
9886 OMP_CLAUSE__LOOPTEMP_);
9888 else
9890 /* If there are 2 adjacent SIMD stmts, one with _simt_
9891 clause, another without, make sure they have the same
9892 decls in _looptemp_ clauses, because the outer stmt
9893 they are combined into will look up just one inner_stmt. */
9894 if (ctx->simt_stmt)
9895 temp = OMP_CLAUSE_DECL (simtc);
9896 else
9897 temp = create_tmp_var (type);
9898 insert_decl_map (&ctx->outer->cb, temp, temp);
9900 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
9901 OMP_CLAUSE_DECL (*pc) = temp;
9902 pc = &OMP_CLAUSE_CHAIN (*pc);
9903 if (ctx->simt_stmt)
9904 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
9905 OMP_CLAUSE__LOOPTEMP_);
9907 *pc = clauses;
9910 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
9911 dlist = NULL;
9912 body = NULL;
9913 tree rclauses
9914 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
9915 OMP_CLAUSE_REDUCTION);
9916 tree rtmp = NULL_TREE;
9917 if (rclauses)
9919 tree type = build_pointer_type (pointer_sized_int_node);
9920 tree temp = create_tmp_var (type);
9921 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
9922 OMP_CLAUSE_DECL (c) = temp;
9923 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
9924 gimple_omp_for_set_clauses (stmt, c);
9925 lower_omp_task_reductions (ctx, OMP_FOR,
9926 gimple_omp_for_clauses (stmt),
9927 &tred_ilist, &tred_dlist);
9928 rclauses = c;
9929 rtmp = make_ssa_name (type);
9930 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
9933 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
9934 ctx);
9936 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
9937 fdp);
9938 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
9939 gimple_omp_for_pre_body (stmt));
9941 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9943 /* Lower the header expressions. At this point, we can assume that
9944 the header is of the form:
9946 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
9948 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
9949 using the .omp_data_s mapping, if needed. */
9950 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
9952 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
9953 if (!is_gimple_min_invariant (*rhs_p))
9954 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
9955 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
9956 recompute_tree_invariant_for_addr_expr (*rhs_p);
9958 rhs_p = gimple_omp_for_final_ptr (stmt, i);
9959 if (!is_gimple_min_invariant (*rhs_p))
9960 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
9961 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
9962 recompute_tree_invariant_for_addr_expr (*rhs_p);
9964 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
9965 if (!is_gimple_min_invariant (*rhs_p))
9966 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
9968 if (rclauses)
9969 gimple_seq_add_seq (&tred_ilist, cnt_list);
9970 else
9971 gimple_seq_add_seq (&body, cnt_list);
9973 /* Once lowered, extract the bounds and clauses. */
9974 omp_extract_for_data (stmt, &fd, NULL);
9976 if (is_gimple_omp_oacc (ctx->stmt)
9977 && !ctx_in_oacc_kernels_region (ctx))
9978 lower_oacc_head_tail (gimple_location (stmt),
9979 gimple_omp_for_clauses (stmt),
9980 &oacc_head, &oacc_tail, ctx);
9982 /* Add OpenACC partitioning and reduction markers just before the loop. */
9983 if (oacc_head)
9984 gimple_seq_add_seq (&body, oacc_head);
9986 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
9988 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
9989 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
9990 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9991 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
9993 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
9994 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
9995 OMP_CLAUSE_LINEAR_STEP (c)
9996 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
9997 ctx);
10000 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
10001 && gimple_omp_for_grid_phony (stmt));
10002 if ((ctx->scan_inclusive || ctx->scan_exclusive)
10003 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10005 gcc_assert (!phony_loop);
10006 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
10008 else
10010 if (!phony_loop)
10011 gimple_seq_add_stmt (&body, stmt);
10012 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
10015 if (!phony_loop)
10016 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
10017 fd.loop.v));
10019 /* After the loop, add exit clauses. */
10020 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
10022 if (clist)
10024 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
10025 gcall *g = gimple_build_call (fndecl, 0);
10026 gimple_seq_add_stmt (&body, g);
10027 gimple_seq_add_seq (&body, clist);
10028 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
10029 g = gimple_build_call (fndecl, 0);
10030 gimple_seq_add_stmt (&body, g);
10033 if (ctx->cancellable)
10034 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
10036 gimple_seq_add_seq (&body, dlist);
10038 if (rclauses)
10040 gimple_seq_add_seq (&tred_ilist, body);
10041 body = tred_ilist;
10044 body = maybe_catch_exception (body);
10046 if (!phony_loop)
10048 /* Region exit marker goes at the end of the loop body. */
10049 gimple *g = gimple_build_omp_return (fd.have_nowait);
10050 gimple_seq_add_stmt (&body, g);
10052 gimple_seq_add_seq (&body, tred_dlist);
10054 maybe_add_implicit_barrier_cancel (ctx, g, &body);
10056 if (rclauses)
10057 OMP_CLAUSE_DECL (rclauses) = rtmp;
10060 /* Add OpenACC joining and reduction markers just after the loop. */
10061 if (oacc_tail)
10062 gimple_seq_add_seq (&body, oacc_tail);
10064 pop_gimplify_context (new_stmt);
10066 gimple_bind_append_vars (new_stmt, ctx->block_vars);
10067 maybe_remove_omp_member_access_dummy_vars (new_stmt);
10068 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
10069 if (BLOCK_VARS (block))
10070 TREE_USED (block) = 1;
10072 gimple_bind_set_body (new_stmt, body);
10073 gimple_omp_set_body (stmt, NULL);
10074 gimple_omp_for_set_pre_body (stmt, NULL);
10077 /* Callback for walk_stmts. Check if the current statement only contains
10078 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
10080 static tree
10081 check_combined_parallel (gimple_stmt_iterator *gsi_p,
10082 bool *handled_ops_p,
10083 struct walk_stmt_info *wi)
10085 int *info = (int *) wi->info;
10086 gimple *stmt = gsi_stmt (*gsi_p);
10088 *handled_ops_p = true;
10089 switch (gimple_code (stmt))
10091 WALK_SUBSTMTS;
10093 case GIMPLE_DEBUG:
10094 break;
10095 case GIMPLE_OMP_FOR:
10096 case GIMPLE_OMP_SECTIONS:
10097 *info = *info == 0 ? 1 : -1;
10098 break;
10099 default:
10100 *info = -1;
10101 break;
10103 return NULL;
10106 struct omp_taskcopy_context
10108 /* This field must be at the beginning, as we do "inheritance": Some
10109 callback functions for tree-inline.c (e.g., omp_copy_decl)
10110 receive a copy_body_data pointer that is up-casted to an
10111 omp_context pointer. */
10112 copy_body_data cb;
10113 omp_context *ctx;
10116 static tree
10117 task_copyfn_copy_decl (tree var, copy_body_data *cb)
10119 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
10121 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
10122 return create_tmp_var (TREE_TYPE (var));
10124 return var;
10127 static tree
10128 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
10130 tree name, new_fields = NULL, type, f;
10132 type = lang_hooks.types.make_type (RECORD_TYPE);
10133 name = DECL_NAME (TYPE_NAME (orig_type));
10134 name = build_decl (gimple_location (tcctx->ctx->stmt),
10135 TYPE_DECL, name, type);
10136 TYPE_NAME (type) = name;
10138 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
10140 tree new_f = copy_node (f);
10141 DECL_CONTEXT (new_f) = type;
10142 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
10143 TREE_CHAIN (new_f) = new_fields;
10144 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10145 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10146 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
10147 &tcctx->cb, NULL);
10148 new_fields = new_f;
10149 tcctx->cb.decl_map->put (f, new_f);
10151 TYPE_FIELDS (type) = nreverse (new_fields);
10152 layout_type (type);
10153 return type;
10156 /* Create task copyfn. */
10158 static void
10159 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
10161 struct function *child_cfun;
10162 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
10163 tree record_type, srecord_type, bind, list;
10164 bool record_needs_remap = false, srecord_needs_remap = false;
10165 splay_tree_node n;
10166 struct omp_taskcopy_context tcctx;
10167 location_t loc = gimple_location (task_stmt);
10168 size_t looptempno = 0;
10170 child_fn = gimple_omp_task_copy_fn (task_stmt);
10171 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
10172 gcc_assert (child_cfun->cfg == NULL);
10173 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
10175 /* Reset DECL_CONTEXT on function arguments. */
10176 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
10177 DECL_CONTEXT (t) = child_fn;
10179 /* Populate the function. */
10180 push_gimplify_context ();
10181 push_cfun (child_cfun);
10183 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
10184 TREE_SIDE_EFFECTS (bind) = 1;
10185 list = NULL;
10186 DECL_SAVED_TREE (child_fn) = bind;
10187 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
10189 /* Remap src and dst argument types if needed. */
10190 record_type = ctx->record_type;
10191 srecord_type = ctx->srecord_type;
10192 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
10193 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10195 record_needs_remap = true;
10196 break;
10198 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
10199 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10201 srecord_needs_remap = true;
10202 break;
10205 if (record_needs_remap || srecord_needs_remap)
10207 memset (&tcctx, '\0', sizeof (tcctx));
10208 tcctx.cb.src_fn = ctx->cb.src_fn;
10209 tcctx.cb.dst_fn = child_fn;
10210 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
10211 gcc_checking_assert (tcctx.cb.src_node);
10212 tcctx.cb.dst_node = tcctx.cb.src_node;
10213 tcctx.cb.src_cfun = ctx->cb.src_cfun;
10214 tcctx.cb.copy_decl = task_copyfn_copy_decl;
10215 tcctx.cb.eh_lp_nr = 0;
10216 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
10217 tcctx.cb.decl_map = new hash_map<tree, tree>;
10218 tcctx.ctx = ctx;
10220 if (record_needs_remap)
10221 record_type = task_copyfn_remap_type (&tcctx, record_type);
10222 if (srecord_needs_remap)
10223 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
10225 else
10226 tcctx.cb.decl_map = NULL;
10228 arg = DECL_ARGUMENTS (child_fn);
10229 TREE_TYPE (arg) = build_pointer_type (record_type);
10230 sarg = DECL_CHAIN (arg);
10231 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
10233 /* First pass: initialize temporaries used in record_type and srecord_type
10234 sizes and field offsets. */
10235 if (tcctx.cb.decl_map)
10236 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10237 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10239 tree *p;
10241 decl = OMP_CLAUSE_DECL (c);
10242 p = tcctx.cb.decl_map->get (decl);
10243 if (p == NULL)
10244 continue;
10245 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10246 sf = (tree) n->value;
10247 sf = *tcctx.cb.decl_map->get (sf);
10248 src = build_simple_mem_ref_loc (loc, sarg);
10249 src = omp_build_component_ref (src, sf);
10250 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
10251 append_to_statement_list (t, &list);
10254 /* Second pass: copy shared var pointers and copy construct non-VLA
10255 firstprivate vars. */
10256 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10257 switch (OMP_CLAUSE_CODE (c))
10259 splay_tree_key key;
10260 case OMP_CLAUSE_SHARED:
10261 decl = OMP_CLAUSE_DECL (c);
10262 key = (splay_tree_key) decl;
10263 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
10264 key = (splay_tree_key) &DECL_UID (decl);
10265 n = splay_tree_lookup (ctx->field_map, key);
10266 if (n == NULL)
10267 break;
10268 f = (tree) n->value;
10269 if (tcctx.cb.decl_map)
10270 f = *tcctx.cb.decl_map->get (f);
10271 n = splay_tree_lookup (ctx->sfield_map, key);
10272 sf = (tree) n->value;
10273 if (tcctx.cb.decl_map)
10274 sf = *tcctx.cb.decl_map->get (sf);
10275 src = build_simple_mem_ref_loc (loc, sarg);
10276 src = omp_build_component_ref (src, sf);
10277 dst = build_simple_mem_ref_loc (loc, arg);
10278 dst = omp_build_component_ref (dst, f);
10279 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10280 append_to_statement_list (t, &list);
10281 break;
10282 case OMP_CLAUSE_REDUCTION:
10283 case OMP_CLAUSE_IN_REDUCTION:
10284 decl = OMP_CLAUSE_DECL (c);
10285 if (TREE_CODE (decl) == MEM_REF)
10287 decl = TREE_OPERAND (decl, 0);
10288 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
10289 decl = TREE_OPERAND (decl, 0);
10290 if (TREE_CODE (decl) == INDIRECT_REF
10291 || TREE_CODE (decl) == ADDR_EXPR)
10292 decl = TREE_OPERAND (decl, 0);
10294 key = (splay_tree_key) decl;
10295 n = splay_tree_lookup (ctx->field_map, key);
10296 if (n == NULL)
10297 break;
10298 f = (tree) n->value;
10299 if (tcctx.cb.decl_map)
10300 f = *tcctx.cb.decl_map->get (f);
10301 n = splay_tree_lookup (ctx->sfield_map, key);
10302 sf = (tree) n->value;
10303 if (tcctx.cb.decl_map)
10304 sf = *tcctx.cb.decl_map->get (sf);
10305 src = build_simple_mem_ref_loc (loc, sarg);
10306 src = omp_build_component_ref (src, sf);
10307 if (decl != OMP_CLAUSE_DECL (c)
10308 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10309 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
10310 src = build_simple_mem_ref_loc (loc, src);
10311 dst = build_simple_mem_ref_loc (loc, arg);
10312 dst = omp_build_component_ref (dst, f);
10313 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10314 append_to_statement_list (t, &list);
10315 break;
10316 case OMP_CLAUSE__LOOPTEMP_:
10317 /* Fields for first two _looptemp_ clauses are initialized by
10318 GOMP_taskloop*, the rest are handled like firstprivate. */
10319 if (looptempno < 2)
10321 looptempno++;
10322 break;
10324 /* FALLTHRU */
10325 case OMP_CLAUSE__REDUCTEMP_:
10326 case OMP_CLAUSE_FIRSTPRIVATE:
10327 decl = OMP_CLAUSE_DECL (c);
10328 if (is_variable_sized (decl))
10329 break;
10330 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10331 if (n == NULL)
10332 break;
10333 f = (tree) n->value;
10334 if (tcctx.cb.decl_map)
10335 f = *tcctx.cb.decl_map->get (f);
10336 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10337 if (n != NULL)
10339 sf = (tree) n->value;
10340 if (tcctx.cb.decl_map)
10341 sf = *tcctx.cb.decl_map->get (sf);
10342 src = build_simple_mem_ref_loc (loc, sarg);
10343 src = omp_build_component_ref (src, sf);
10344 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
10345 src = build_simple_mem_ref_loc (loc, src);
10347 else
10348 src = decl;
10349 dst = build_simple_mem_ref_loc (loc, arg);
10350 dst = omp_build_component_ref (dst, f);
10351 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
10352 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10353 else
10354 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
10355 append_to_statement_list (t, &list);
10356 break;
10357 case OMP_CLAUSE_PRIVATE:
10358 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
10359 break;
10360 decl = OMP_CLAUSE_DECL (c);
10361 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10362 f = (tree) n->value;
10363 if (tcctx.cb.decl_map)
10364 f = *tcctx.cb.decl_map->get (f);
10365 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10366 if (n != NULL)
10368 sf = (tree) n->value;
10369 if (tcctx.cb.decl_map)
10370 sf = *tcctx.cb.decl_map->get (sf);
10371 src = build_simple_mem_ref_loc (loc, sarg);
10372 src = omp_build_component_ref (src, sf);
10373 if (use_pointer_for_field (decl, NULL))
10374 src = build_simple_mem_ref_loc (loc, src);
10376 else
10377 src = decl;
10378 dst = build_simple_mem_ref_loc (loc, arg);
10379 dst = omp_build_component_ref (dst, f);
10380 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10381 append_to_statement_list (t, &list);
10382 break;
10383 default:
10384 break;
10387 /* Last pass: handle VLA firstprivates. */
10388 if (tcctx.cb.decl_map)
10389 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10390 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10392 tree ind, ptr, df;
10394 decl = OMP_CLAUSE_DECL (c);
10395 if (!is_variable_sized (decl))
10396 continue;
10397 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10398 if (n == NULL)
10399 continue;
10400 f = (tree) n->value;
10401 f = *tcctx.cb.decl_map->get (f);
10402 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
10403 ind = DECL_VALUE_EXPR (decl);
10404 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
10405 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
10406 n = splay_tree_lookup (ctx->sfield_map,
10407 (splay_tree_key) TREE_OPERAND (ind, 0));
10408 sf = (tree) n->value;
10409 sf = *tcctx.cb.decl_map->get (sf);
10410 src = build_simple_mem_ref_loc (loc, sarg);
10411 src = omp_build_component_ref (src, sf);
10412 src = build_simple_mem_ref_loc (loc, src);
10413 dst = build_simple_mem_ref_loc (loc, arg);
10414 dst = omp_build_component_ref (dst, f);
10415 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
10416 append_to_statement_list (t, &list);
10417 n = splay_tree_lookup (ctx->field_map,
10418 (splay_tree_key) TREE_OPERAND (ind, 0));
10419 df = (tree) n->value;
10420 df = *tcctx.cb.decl_map->get (df);
10421 ptr = build_simple_mem_ref_loc (loc, arg);
10422 ptr = omp_build_component_ref (ptr, df);
10423 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
10424 build_fold_addr_expr_loc (loc, dst));
10425 append_to_statement_list (t, &list);
10428 t = build1 (RETURN_EXPR, void_type_node, NULL);
10429 append_to_statement_list (t, &list);
10431 if (tcctx.cb.decl_map)
10432 delete tcctx.cb.decl_map;
10433 pop_gimplify_context (NULL);
10434 BIND_EXPR_BODY (bind) = list;
10435 pop_cfun ();
10438 static void
10439 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
10441 tree c, clauses;
10442 gimple *g;
10443 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
10445 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
10446 gcc_assert (clauses);
10447 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10448 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
10449 switch (OMP_CLAUSE_DEPEND_KIND (c))
10451 case OMP_CLAUSE_DEPEND_LAST:
10452 /* Lowering already done at gimplification. */
10453 return;
10454 case OMP_CLAUSE_DEPEND_IN:
10455 cnt[2]++;
10456 break;
10457 case OMP_CLAUSE_DEPEND_OUT:
10458 case OMP_CLAUSE_DEPEND_INOUT:
10459 cnt[0]++;
10460 break;
10461 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
10462 cnt[1]++;
10463 break;
10464 case OMP_CLAUSE_DEPEND_DEPOBJ:
10465 cnt[3]++;
10466 break;
10467 case OMP_CLAUSE_DEPEND_SOURCE:
10468 case OMP_CLAUSE_DEPEND_SINK:
10469 /* FALLTHRU */
10470 default:
10471 gcc_unreachable ();
10473 if (cnt[1] || cnt[3])
10474 idx = 5;
10475 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
10476 tree type = build_array_type_nelts (ptr_type_node, total + idx);
10477 tree array = create_tmp_var (type);
10478 TREE_ADDRESSABLE (array) = 1;
10479 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
10480 NULL_TREE);
10481 if (idx == 5)
10483 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
10484 gimple_seq_add_stmt (iseq, g);
10485 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
10486 NULL_TREE);
10488 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
10489 gimple_seq_add_stmt (iseq, g);
10490 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
10492 r = build4 (ARRAY_REF, ptr_type_node, array,
10493 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
10494 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
10495 gimple_seq_add_stmt (iseq, g);
10497 for (i = 0; i < 4; i++)
10499 if (cnt[i] == 0)
10500 continue;
10501 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10502 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
10503 continue;
10504 else
10506 switch (OMP_CLAUSE_DEPEND_KIND (c))
10508 case OMP_CLAUSE_DEPEND_IN:
10509 if (i != 2)
10510 continue;
10511 break;
10512 case OMP_CLAUSE_DEPEND_OUT:
10513 case OMP_CLAUSE_DEPEND_INOUT:
10514 if (i != 0)
10515 continue;
10516 break;
10517 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
10518 if (i != 1)
10519 continue;
10520 break;
10521 case OMP_CLAUSE_DEPEND_DEPOBJ:
10522 if (i != 3)
10523 continue;
10524 break;
10525 default:
10526 gcc_unreachable ();
10528 tree t = OMP_CLAUSE_DECL (c);
10529 t = fold_convert (ptr_type_node, t);
10530 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
10531 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
10532 NULL_TREE, NULL_TREE);
10533 g = gimple_build_assign (r, t);
10534 gimple_seq_add_stmt (iseq, g);
10537 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
10538 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
10539 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
10540 OMP_CLAUSE_CHAIN (c) = *pclauses;
10541 *pclauses = c;
10542 tree clobber = build_constructor (type, NULL);
10543 TREE_THIS_VOLATILE (clobber) = 1;
10544 g = gimple_build_assign (array, clobber);
10545 gimple_seq_add_stmt (oseq, g);
10548 /* Lower the OpenMP parallel or task directive in the current statement
10549 in GSI_P. CTX holds context information for the directive. */
10551 static void
10552 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10554 tree clauses;
10555 tree child_fn, t;
10556 gimple *stmt = gsi_stmt (*gsi_p);
10557 gbind *par_bind, *bind, *dep_bind = NULL;
10558 gimple_seq par_body;
10559 location_t loc = gimple_location (stmt);
10561 clauses = gimple_omp_taskreg_clauses (stmt);
10562 if (gimple_code (stmt) == GIMPLE_OMP_TASK
10563 && gimple_omp_task_taskwait_p (stmt))
10565 par_bind = NULL;
10566 par_body = NULL;
10568 else
10570 par_bind
10571 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
10572 par_body = gimple_bind_body (par_bind);
10574 child_fn = ctx->cb.dst_fn;
10575 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
10576 && !gimple_omp_parallel_combined_p (stmt))
10578 struct walk_stmt_info wi;
10579 int ws_num = 0;
10581 memset (&wi, 0, sizeof (wi));
10582 wi.info = &ws_num;
10583 wi.val_only = true;
10584 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
10585 if (ws_num == 1)
10586 gimple_omp_parallel_set_combined_p (stmt, true);
10588 gimple_seq dep_ilist = NULL;
10589 gimple_seq dep_olist = NULL;
10590 if (gimple_code (stmt) == GIMPLE_OMP_TASK
10591 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
10593 push_gimplify_context ();
10594 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
10595 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
10596 &dep_ilist, &dep_olist);
10599 if (gimple_code (stmt) == GIMPLE_OMP_TASK
10600 && gimple_omp_task_taskwait_p (stmt))
10602 if (dep_bind)
10604 gsi_replace (gsi_p, dep_bind, true);
10605 gimple_bind_add_seq (dep_bind, dep_ilist);
10606 gimple_bind_add_stmt (dep_bind, stmt);
10607 gimple_bind_add_seq (dep_bind, dep_olist);
10608 pop_gimplify_context (dep_bind);
10610 return;
10613 if (ctx->srecord_type)
10614 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
10616 gimple_seq tskred_ilist = NULL;
10617 gimple_seq tskred_olist = NULL;
10618 if ((is_task_ctx (ctx)
10619 && gimple_omp_task_taskloop_p (ctx->stmt)
10620 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
10621 OMP_CLAUSE_REDUCTION))
10622 || (is_parallel_ctx (ctx)
10623 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
10624 OMP_CLAUSE__REDUCTEMP_)))
10626 if (dep_bind == NULL)
10628 push_gimplify_context ();
10629 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
10631 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
10632 : OMP_PARALLEL,
10633 gimple_omp_taskreg_clauses (ctx->stmt),
10634 &tskred_ilist, &tskred_olist);
10637 push_gimplify_context ();
10639 gimple_seq par_olist = NULL;
10640 gimple_seq par_ilist = NULL;
10641 gimple_seq par_rlist = NULL;
10642 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
10643 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
10644 if (phony_construct && ctx->record_type)
10646 gcc_checking_assert (!ctx->receiver_decl);
10647 ctx->receiver_decl = create_tmp_var
10648 (build_reference_type (ctx->record_type), ".omp_rec");
10650 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
10651 lower_omp (&par_body, ctx);
10652 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
10653 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
10655 /* Declare all the variables created by mapping and the variables
10656 declared in the scope of the parallel body. */
10657 record_vars_into (ctx->block_vars, child_fn);
10658 maybe_remove_omp_member_access_dummy_vars (par_bind);
10659 record_vars_into (gimple_bind_vars (par_bind), child_fn);
10661 if (ctx->record_type)
10663 ctx->sender_decl
10664 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
10665 : ctx->record_type, ".omp_data_o");
10666 DECL_NAMELESS (ctx->sender_decl) = 1;
10667 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
10668 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
10671 gimple_seq olist = NULL;
10672 gimple_seq ilist = NULL;
10673 lower_send_clauses (clauses, &ilist, &olist, ctx);
10674 lower_send_shared_vars (&ilist, &olist, ctx);
10676 if (ctx->record_type)
10678 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
10679 TREE_THIS_VOLATILE (clobber) = 1;
10680 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
10681 clobber));
10684 /* Once all the expansions are done, sequence all the different
10685 fragments inside gimple_omp_body. */
10687 gimple_seq new_body = NULL;
10689 if (ctx->record_type)
10691 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
10692 /* fixup_child_record_type might have changed receiver_decl's type. */
10693 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
10694 gimple_seq_add_stmt (&new_body,
10695 gimple_build_assign (ctx->receiver_decl, t));
10698 gimple_seq_add_seq (&new_body, par_ilist);
10699 gimple_seq_add_seq (&new_body, par_body);
10700 gimple_seq_add_seq (&new_body, par_rlist);
10701 if (ctx->cancellable)
10702 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
10703 gimple_seq_add_seq (&new_body, par_olist);
10704 new_body = maybe_catch_exception (new_body);
10705 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
10706 gimple_seq_add_stmt (&new_body,
10707 gimple_build_omp_continue (integer_zero_node,
10708 integer_zero_node));
10709 if (!phony_construct)
10711 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
10712 gimple_omp_set_body (stmt, new_body);
10715 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
10716 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
10717 else
10718 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
10719 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
10720 gimple_bind_add_seq (bind, ilist);
10721 if (!phony_construct)
10722 gimple_bind_add_stmt (bind, stmt);
10723 else
10724 gimple_bind_add_seq (bind, new_body);
10725 gimple_bind_add_seq (bind, olist);
10727 pop_gimplify_context (NULL);
10729 if (dep_bind)
10731 gimple_bind_add_seq (dep_bind, dep_ilist);
10732 gimple_bind_add_seq (dep_bind, tskred_ilist);
10733 gimple_bind_add_stmt (dep_bind, bind);
10734 gimple_bind_add_seq (dep_bind, tskred_olist);
10735 gimple_bind_add_seq (dep_bind, dep_olist);
10736 pop_gimplify_context (dep_bind);
10740 /* Lower the GIMPLE_OMP_TARGET in the current statement
10741 in GSI_P. CTX holds context information for the directive. */
10743 static void
10744 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10746 tree clauses;
10747 tree child_fn, t, c;
10748 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
10749 gbind *tgt_bind, *bind, *dep_bind = NULL;
10750 gimple_seq tgt_body, olist, ilist, fplist, new_body;
10751 location_t loc = gimple_location (stmt);
10752 bool offloaded, data_region;
10753 unsigned int map_cnt = 0;
10755 offloaded = is_gimple_omp_offloaded (stmt);
10756 switch (gimple_omp_target_kind (stmt))
10758 case GF_OMP_TARGET_KIND_REGION:
10759 case GF_OMP_TARGET_KIND_UPDATE:
10760 case GF_OMP_TARGET_KIND_ENTER_DATA:
10761 case GF_OMP_TARGET_KIND_EXIT_DATA:
10762 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
10763 case GF_OMP_TARGET_KIND_OACC_KERNELS:
10764 case GF_OMP_TARGET_KIND_OACC_UPDATE:
10765 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
10766 case GF_OMP_TARGET_KIND_OACC_DECLARE:
10767 data_region = false;
10768 break;
10769 case GF_OMP_TARGET_KIND_DATA:
10770 case GF_OMP_TARGET_KIND_OACC_DATA:
10771 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
10772 data_region = true;
10773 break;
10774 default:
10775 gcc_unreachable ();
10778 clauses = gimple_omp_target_clauses (stmt);
10780 gimple_seq dep_ilist = NULL;
10781 gimple_seq dep_olist = NULL;
10782 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
10784 push_gimplify_context ();
10785 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
10786 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
10787 &dep_ilist, &dep_olist);
10790 tgt_bind = NULL;
10791 tgt_body = NULL;
10792 if (offloaded)
10794 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
10795 tgt_body = gimple_bind_body (tgt_bind);
10797 else if (data_region)
10798 tgt_body = gimple_omp_body (stmt);
10799 child_fn = ctx->cb.dst_fn;
10801 push_gimplify_context ();
10802 fplist = NULL;
10804 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
10805 switch (OMP_CLAUSE_CODE (c))
10807 tree var, x;
10809 default:
10810 break;
10811 case OMP_CLAUSE_MAP:
10812 #if CHECKING_P
10813 /* First check what we're prepared to handle in the following. */
10814 switch (OMP_CLAUSE_MAP_KIND (c))
10816 case GOMP_MAP_ALLOC:
10817 case GOMP_MAP_TO:
10818 case GOMP_MAP_FROM:
10819 case GOMP_MAP_TOFROM:
10820 case GOMP_MAP_POINTER:
10821 case GOMP_MAP_TO_PSET:
10822 case GOMP_MAP_DELETE:
10823 case GOMP_MAP_RELEASE:
10824 case GOMP_MAP_ALWAYS_TO:
10825 case GOMP_MAP_ALWAYS_FROM:
10826 case GOMP_MAP_ALWAYS_TOFROM:
10827 case GOMP_MAP_FIRSTPRIVATE_POINTER:
10828 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
10829 case GOMP_MAP_STRUCT:
10830 case GOMP_MAP_ALWAYS_POINTER:
10831 break;
10832 case GOMP_MAP_FORCE_ALLOC:
10833 case GOMP_MAP_FORCE_TO:
10834 case GOMP_MAP_FORCE_FROM:
10835 case GOMP_MAP_FORCE_TOFROM:
10836 case GOMP_MAP_FORCE_PRESENT:
10837 case GOMP_MAP_FORCE_DEVICEPTR:
10838 case GOMP_MAP_DEVICE_RESIDENT:
10839 case GOMP_MAP_LINK:
10840 gcc_assert (is_gimple_omp_oacc (stmt));
10841 break;
10842 default:
10843 gcc_unreachable ();
10845 #endif
10846 /* FALLTHRU */
10847 case OMP_CLAUSE_TO:
10848 case OMP_CLAUSE_FROM:
10849 oacc_firstprivate:
10850 var = OMP_CLAUSE_DECL (c);
10851 if (!DECL_P (var))
10853 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
10854 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
10855 && (OMP_CLAUSE_MAP_KIND (c)
10856 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
10857 map_cnt++;
10858 continue;
10861 if (DECL_SIZE (var)
10862 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
10864 tree var2 = DECL_VALUE_EXPR (var);
10865 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
10866 var2 = TREE_OPERAND (var2, 0);
10867 gcc_assert (DECL_P (var2));
10868 var = var2;
10871 if (offloaded
10872 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10873 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
10874 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10876 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
10878 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
10879 && varpool_node::get_create (var)->offloadable)
10880 continue;
10882 tree type = build_pointer_type (TREE_TYPE (var));
10883 tree new_var = lookup_decl (var, ctx);
10884 x = create_tmp_var_raw (type, get_name (new_var));
10885 gimple_add_tmp_var (x);
10886 x = build_simple_mem_ref (x);
10887 SET_DECL_VALUE_EXPR (new_var, x);
10888 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10890 continue;
10893 if (!maybe_lookup_field (var, ctx))
10894 continue;
10896 /* Don't remap oacc parallel reduction variables, because the
10897 intermediate result must be local to each gang. */
10898 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10899 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
10901 x = build_receiver_ref (var, true, ctx);
10902 tree new_var = lookup_decl (var, ctx);
10904 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10905 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
10906 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
10907 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
10908 x = build_simple_mem_ref (x);
10909 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10911 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
10912 if (omp_is_reference (new_var)
10913 && TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE)
10915 /* Create a local object to hold the instance
10916 value. */
10917 tree type = TREE_TYPE (TREE_TYPE (new_var));
10918 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
10919 tree inst = create_tmp_var (type, id);
10920 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
10921 x = build_fold_addr_expr (inst);
10923 gimplify_assign (new_var, x, &fplist);
10925 else if (DECL_P (new_var))
10927 SET_DECL_VALUE_EXPR (new_var, x);
10928 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10930 else
10931 gcc_unreachable ();
10933 map_cnt++;
10934 break;
10936 case OMP_CLAUSE_FIRSTPRIVATE:
10937 if (is_oacc_parallel (ctx))
10938 goto oacc_firstprivate;
10939 map_cnt++;
10940 var = OMP_CLAUSE_DECL (c);
10941 if (!omp_is_reference (var)
10942 && !is_gimple_reg_type (TREE_TYPE (var)))
10944 tree new_var = lookup_decl (var, ctx);
10945 if (is_variable_sized (var))
10947 tree pvar = DECL_VALUE_EXPR (var);
10948 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10949 pvar = TREE_OPERAND (pvar, 0);
10950 gcc_assert (DECL_P (pvar));
10951 tree new_pvar = lookup_decl (pvar, ctx);
10952 x = build_fold_indirect_ref (new_pvar);
10953 TREE_THIS_NOTRAP (x) = 1;
10955 else
10956 x = build_receiver_ref (var, true, ctx);
10957 SET_DECL_VALUE_EXPR (new_var, x);
10958 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10960 break;
10962 case OMP_CLAUSE_PRIVATE:
10963 if (is_gimple_omp_oacc (ctx->stmt))
10964 break;
10965 var = OMP_CLAUSE_DECL (c);
10966 if (is_variable_sized (var))
10968 tree new_var = lookup_decl (var, ctx);
10969 tree pvar = DECL_VALUE_EXPR (var);
10970 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10971 pvar = TREE_OPERAND (pvar, 0);
10972 gcc_assert (DECL_P (pvar));
10973 tree new_pvar = lookup_decl (pvar, ctx);
10974 x = build_fold_indirect_ref (new_pvar);
10975 TREE_THIS_NOTRAP (x) = 1;
10976 SET_DECL_VALUE_EXPR (new_var, x);
10977 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10979 break;
10981 case OMP_CLAUSE_USE_DEVICE_PTR:
10982 case OMP_CLAUSE_IS_DEVICE_PTR:
10983 var = OMP_CLAUSE_DECL (c);
10984 map_cnt++;
10985 if (is_variable_sized (var))
10987 tree new_var = lookup_decl (var, ctx);
10988 tree pvar = DECL_VALUE_EXPR (var);
10989 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10990 pvar = TREE_OPERAND (pvar, 0);
10991 gcc_assert (DECL_P (pvar));
10992 tree new_pvar = lookup_decl (pvar, ctx);
10993 x = build_fold_indirect_ref (new_pvar);
10994 TREE_THIS_NOTRAP (x) = 1;
10995 SET_DECL_VALUE_EXPR (new_var, x);
10996 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10998 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11000 tree new_var = lookup_decl (var, ctx);
11001 tree type = build_pointer_type (TREE_TYPE (var));
11002 x = create_tmp_var_raw (type, get_name (new_var));
11003 gimple_add_tmp_var (x);
11004 x = build_simple_mem_ref (x);
11005 SET_DECL_VALUE_EXPR (new_var, x);
11006 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11008 else
11010 tree new_var = lookup_decl (var, ctx);
11011 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
11012 gimple_add_tmp_var (x);
11013 SET_DECL_VALUE_EXPR (new_var, x);
11014 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11016 break;
11019 if (offloaded)
11021 target_nesting_level++;
11022 lower_omp (&tgt_body, ctx);
11023 target_nesting_level--;
11025 else if (data_region)
11026 lower_omp (&tgt_body, ctx);
11028 if (offloaded)
11030 /* Declare all the variables created by mapping and the variables
11031 declared in the scope of the target body. */
11032 record_vars_into (ctx->block_vars, child_fn);
11033 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
11034 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
11037 olist = NULL;
11038 ilist = NULL;
11039 if (ctx->record_type)
11041 ctx->sender_decl
11042 = create_tmp_var (ctx->record_type, ".omp_data_arr");
11043 DECL_NAMELESS (ctx->sender_decl) = 1;
11044 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11045 t = make_tree_vec (3);
11046 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
11047 TREE_VEC_ELT (t, 1)
11048 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
11049 ".omp_data_sizes");
11050 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
11051 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
11052 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
11053 tree tkind_type = short_unsigned_type_node;
11054 int talign_shift = 8;
11055 TREE_VEC_ELT (t, 2)
11056 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
11057 ".omp_data_kinds");
11058 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
11059 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
11060 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
11061 gimple_omp_target_set_data_arg (stmt, t);
11063 vec<constructor_elt, va_gc> *vsize;
11064 vec<constructor_elt, va_gc> *vkind;
11065 vec_alloc (vsize, map_cnt);
11066 vec_alloc (vkind, map_cnt);
11067 unsigned int map_idx = 0;
11069 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11070 switch (OMP_CLAUSE_CODE (c))
11072 tree ovar, nc, s, purpose, var, x, type;
11073 unsigned int talign;
11075 default:
11076 break;
11078 case OMP_CLAUSE_MAP:
11079 case OMP_CLAUSE_TO:
11080 case OMP_CLAUSE_FROM:
11081 oacc_firstprivate_map:
11082 nc = c;
11083 ovar = OMP_CLAUSE_DECL (c);
11084 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11085 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11086 || (OMP_CLAUSE_MAP_KIND (c)
11087 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
11088 break;
11089 if (!DECL_P (ovar))
11091 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11092 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
11094 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
11095 == get_base_address (ovar));
11096 nc = OMP_CLAUSE_CHAIN (c);
11097 ovar = OMP_CLAUSE_DECL (nc);
11099 else
11101 tree x = build_sender_ref (ovar, ctx);
11102 tree v
11103 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
11104 gimplify_assign (x, v, &ilist);
11105 nc = NULL_TREE;
11108 else
11110 if (DECL_SIZE (ovar)
11111 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
11113 tree ovar2 = DECL_VALUE_EXPR (ovar);
11114 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
11115 ovar2 = TREE_OPERAND (ovar2, 0);
11116 gcc_assert (DECL_P (ovar2));
11117 ovar = ovar2;
11119 if (!maybe_lookup_field (ovar, ctx))
11120 continue;
11123 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
11124 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
11125 talign = DECL_ALIGN_UNIT (ovar);
11126 if (nc)
11128 var = lookup_decl_in_outer_ctx (ovar, ctx);
11129 x = build_sender_ref (ovar, ctx);
11131 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11132 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11133 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11134 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
11136 gcc_assert (offloaded);
11137 tree avar
11138 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
11139 mark_addressable (avar);
11140 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
11141 talign = DECL_ALIGN_UNIT (avar);
11142 avar = build_fold_addr_expr (avar);
11143 gimplify_assign (x, avar, &ilist);
11145 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11147 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11148 if (!omp_is_reference (var))
11150 if (is_gimple_reg (var)
11151 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11152 TREE_NO_WARNING (var) = 1;
11153 var = build_fold_addr_expr (var);
11155 else
11156 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11157 gimplify_assign (x, var, &ilist);
11159 else if (is_gimple_reg (var))
11161 gcc_assert (offloaded);
11162 tree avar = create_tmp_var (TREE_TYPE (var));
11163 mark_addressable (avar);
11164 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
11165 if (GOMP_MAP_COPY_TO_P (map_kind)
11166 || map_kind == GOMP_MAP_POINTER
11167 || map_kind == GOMP_MAP_TO_PSET
11168 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11170 /* If we need to initialize a temporary
11171 with VAR because it is not addressable, and
11172 the variable hasn't been initialized yet, then
11173 we'll get a warning for the store to avar.
11174 Don't warn in that case, the mapping might
11175 be implicit. */
11176 TREE_NO_WARNING (var) = 1;
11177 gimplify_assign (avar, var, &ilist);
11179 avar = build_fold_addr_expr (avar);
11180 gimplify_assign (x, avar, &ilist);
11181 if ((GOMP_MAP_COPY_FROM_P (map_kind)
11182 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11183 && !TYPE_READONLY (TREE_TYPE (var)))
11185 x = unshare_expr (x);
11186 x = build_simple_mem_ref (x);
11187 gimplify_assign (var, x, &olist);
11190 else
11192 var = build_fold_addr_expr (var);
11193 gimplify_assign (x, var, &ilist);
11196 s = NULL_TREE;
11197 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11199 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11200 s = TREE_TYPE (ovar);
11201 if (TREE_CODE (s) == REFERENCE_TYPE)
11202 s = TREE_TYPE (s);
11203 s = TYPE_SIZE_UNIT (s);
11205 else
11206 s = OMP_CLAUSE_SIZE (c);
11207 if (s == NULL_TREE)
11208 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11209 s = fold_convert (size_type_node, s);
11210 purpose = size_int (map_idx++);
11211 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11212 if (TREE_CODE (s) != INTEGER_CST)
11213 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11215 unsigned HOST_WIDE_INT tkind, tkind_zero;
11216 switch (OMP_CLAUSE_CODE (c))
11218 case OMP_CLAUSE_MAP:
11219 tkind = OMP_CLAUSE_MAP_KIND (c);
11220 tkind_zero = tkind;
11221 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
11222 switch (tkind)
11224 case GOMP_MAP_ALLOC:
11225 case GOMP_MAP_TO:
11226 case GOMP_MAP_FROM:
11227 case GOMP_MAP_TOFROM:
11228 case GOMP_MAP_ALWAYS_TO:
11229 case GOMP_MAP_ALWAYS_FROM:
11230 case GOMP_MAP_ALWAYS_TOFROM:
11231 case GOMP_MAP_RELEASE:
11232 case GOMP_MAP_FORCE_TO:
11233 case GOMP_MAP_FORCE_FROM:
11234 case GOMP_MAP_FORCE_TOFROM:
11235 case GOMP_MAP_FORCE_PRESENT:
11236 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
11237 break;
11238 case GOMP_MAP_DELETE:
11239 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
11240 default:
11241 break;
11243 if (tkind_zero != tkind)
11245 if (integer_zerop (s))
11246 tkind = tkind_zero;
11247 else if (integer_nonzerop (s))
11248 tkind_zero = tkind;
11250 break;
11251 case OMP_CLAUSE_FIRSTPRIVATE:
11252 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11253 tkind = GOMP_MAP_TO;
11254 tkind_zero = tkind;
11255 break;
11256 case OMP_CLAUSE_TO:
11257 tkind = GOMP_MAP_TO;
11258 tkind_zero = tkind;
11259 break;
11260 case OMP_CLAUSE_FROM:
11261 tkind = GOMP_MAP_FROM;
11262 tkind_zero = tkind;
11263 break;
11264 default:
11265 gcc_unreachable ();
11267 gcc_checking_assert (tkind
11268 < (HOST_WIDE_INT_C (1U) << talign_shift));
11269 gcc_checking_assert (tkind_zero
11270 < (HOST_WIDE_INT_C (1U) << talign_shift));
11271 talign = ceil_log2 (talign);
11272 tkind |= talign << talign_shift;
11273 tkind_zero |= talign << talign_shift;
11274 gcc_checking_assert (tkind
11275 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11276 gcc_checking_assert (tkind_zero
11277 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11278 if (tkind == tkind_zero)
11279 x = build_int_cstu (tkind_type, tkind);
11280 else
11282 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
11283 x = build3 (COND_EXPR, tkind_type,
11284 fold_build2 (EQ_EXPR, boolean_type_node,
11285 unshare_expr (s), size_zero_node),
11286 build_int_cstu (tkind_type, tkind_zero),
11287 build_int_cstu (tkind_type, tkind));
11289 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
11290 if (nc && nc != c)
11291 c = nc;
11292 break;
11294 case OMP_CLAUSE_FIRSTPRIVATE:
11295 if (is_oacc_parallel (ctx))
11296 goto oacc_firstprivate_map;
11297 ovar = OMP_CLAUSE_DECL (c);
11298 if (omp_is_reference (ovar))
11299 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11300 else
11301 talign = DECL_ALIGN_UNIT (ovar);
11302 var = lookup_decl_in_outer_ctx (ovar, ctx);
11303 x = build_sender_ref (ovar, ctx);
11304 tkind = GOMP_MAP_FIRSTPRIVATE;
11305 type = TREE_TYPE (ovar);
11306 if (omp_is_reference (ovar))
11307 type = TREE_TYPE (type);
11308 if ((INTEGRAL_TYPE_P (type)
11309 && TYPE_PRECISION (type) <= POINTER_SIZE)
11310 || TREE_CODE (type) == POINTER_TYPE)
11312 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11313 tree t = var;
11314 if (omp_is_reference (var))
11315 t = build_simple_mem_ref (var);
11316 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11317 TREE_NO_WARNING (var) = 1;
11318 if (TREE_CODE (type) != POINTER_TYPE)
11319 t = fold_convert (pointer_sized_int_node, t);
11320 t = fold_convert (TREE_TYPE (x), t);
11321 gimplify_assign (x, t, &ilist);
11323 else if (omp_is_reference (var))
11324 gimplify_assign (x, var, &ilist);
11325 else if (is_gimple_reg (var))
11327 tree avar = create_tmp_var (TREE_TYPE (var));
11328 mark_addressable (avar);
11329 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11330 TREE_NO_WARNING (var) = 1;
11331 gimplify_assign (avar, var, &ilist);
11332 avar = build_fold_addr_expr (avar);
11333 gimplify_assign (x, avar, &ilist);
11335 else
11337 var = build_fold_addr_expr (var);
11338 gimplify_assign (x, var, &ilist);
11340 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
11341 s = size_int (0);
11342 else if (omp_is_reference (ovar))
11343 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11344 else
11345 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11346 s = fold_convert (size_type_node, s);
11347 purpose = size_int (map_idx++);
11348 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11349 if (TREE_CODE (s) != INTEGER_CST)
11350 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11352 gcc_checking_assert (tkind
11353 < (HOST_WIDE_INT_C (1U) << talign_shift));
11354 talign = ceil_log2 (talign);
11355 tkind |= talign << talign_shift;
11356 gcc_checking_assert (tkind
11357 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11358 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
11359 build_int_cstu (tkind_type, tkind));
11360 break;
11362 case OMP_CLAUSE_USE_DEVICE_PTR:
11363 case OMP_CLAUSE_IS_DEVICE_PTR:
11364 ovar = OMP_CLAUSE_DECL (c);
11365 var = lookup_decl_in_outer_ctx (ovar, ctx);
11366 x = build_sender_ref (ovar, ctx);
11367 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
11368 tkind = GOMP_MAP_USE_DEVICE_PTR;
11369 else
11370 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11371 type = TREE_TYPE (ovar);
11372 if (TREE_CODE (type) == ARRAY_TYPE)
11373 var = build_fold_addr_expr (var);
11374 else
11376 if (omp_is_reference (ovar))
11378 type = TREE_TYPE (type);
11379 if (TREE_CODE (type) != ARRAY_TYPE)
11380 var = build_simple_mem_ref (var);
11381 var = fold_convert (TREE_TYPE (x), var);
11384 gimplify_assign (x, var, &ilist);
11385 s = size_int (0);
11386 purpose = size_int (map_idx++);
11387 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11388 gcc_checking_assert (tkind
11389 < (HOST_WIDE_INT_C (1U) << talign_shift));
11390 gcc_checking_assert (tkind
11391 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11392 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
11393 build_int_cstu (tkind_type, tkind));
11394 break;
11397 gcc_assert (map_idx == map_cnt);
11399 DECL_INITIAL (TREE_VEC_ELT (t, 1))
11400 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
11401 DECL_INITIAL (TREE_VEC_ELT (t, 2))
11402 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
11403 for (int i = 1; i <= 2; i++)
11404 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
11406 gimple_seq initlist = NULL;
11407 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
11408 TREE_VEC_ELT (t, i)),
11409 &initlist, true, NULL_TREE);
11410 gimple_seq_add_seq (&ilist, initlist);
11412 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
11413 NULL);
11414 TREE_THIS_VOLATILE (clobber) = 1;
11415 gimple_seq_add_stmt (&olist,
11416 gimple_build_assign (TREE_VEC_ELT (t, i),
11417 clobber));
11420 tree clobber = build_constructor (ctx->record_type, NULL);
11421 TREE_THIS_VOLATILE (clobber) = 1;
11422 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11423 clobber));
11426 /* Once all the expansions are done, sequence all the different
11427 fragments inside gimple_omp_body. */
11429 new_body = NULL;
11431 if (offloaded
11432 && ctx->record_type)
11434 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11435 /* fixup_child_record_type might have changed receiver_decl's type. */
11436 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11437 gimple_seq_add_stmt (&new_body,
11438 gimple_build_assign (ctx->receiver_decl, t));
11440 gimple_seq_add_seq (&new_body, fplist);
11442 if (offloaded || data_region)
11444 tree prev = NULL_TREE;
11445 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11446 switch (OMP_CLAUSE_CODE (c))
11448 tree var, x;
11449 default:
11450 break;
11451 case OMP_CLAUSE_FIRSTPRIVATE:
11452 if (is_gimple_omp_oacc (ctx->stmt))
11453 break;
11454 var = OMP_CLAUSE_DECL (c);
11455 if (omp_is_reference (var)
11456 || is_gimple_reg_type (TREE_TYPE (var)))
11458 tree new_var = lookup_decl (var, ctx);
11459 tree type;
11460 type = TREE_TYPE (var);
11461 if (omp_is_reference (var))
11462 type = TREE_TYPE (type);
11463 if ((INTEGRAL_TYPE_P (type)
11464 && TYPE_PRECISION (type) <= POINTER_SIZE)
11465 || TREE_CODE (type) == POINTER_TYPE)
11467 x = build_receiver_ref (var, false, ctx);
11468 if (TREE_CODE (type) != POINTER_TYPE)
11469 x = fold_convert (pointer_sized_int_node, x);
11470 x = fold_convert (type, x);
11471 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
11472 fb_rvalue);
11473 if (omp_is_reference (var))
11475 tree v = create_tmp_var_raw (type, get_name (var));
11476 gimple_add_tmp_var (v);
11477 TREE_ADDRESSABLE (v) = 1;
11478 gimple_seq_add_stmt (&new_body,
11479 gimple_build_assign (v, x));
11480 x = build_fold_addr_expr (v);
11482 gimple_seq_add_stmt (&new_body,
11483 gimple_build_assign (new_var, x));
11485 else
11487 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
11488 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
11489 fb_rvalue);
11490 gimple_seq_add_stmt (&new_body,
11491 gimple_build_assign (new_var, x));
11494 else if (is_variable_sized (var))
11496 tree pvar = DECL_VALUE_EXPR (var);
11497 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11498 pvar = TREE_OPERAND (pvar, 0);
11499 gcc_assert (DECL_P (pvar));
11500 tree new_var = lookup_decl (pvar, ctx);
11501 x = build_receiver_ref (var, false, ctx);
11502 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11503 gimple_seq_add_stmt (&new_body,
11504 gimple_build_assign (new_var, x));
11506 break;
11507 case OMP_CLAUSE_PRIVATE:
11508 if (is_gimple_omp_oacc (ctx->stmt))
11509 break;
11510 var = OMP_CLAUSE_DECL (c);
11511 if (omp_is_reference (var))
11513 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
11514 tree new_var = lookup_decl (var, ctx);
11515 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
11516 if (TREE_CONSTANT (x))
11518 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
11519 get_name (var));
11520 gimple_add_tmp_var (x);
11521 TREE_ADDRESSABLE (x) = 1;
11522 x = build_fold_addr_expr_loc (clause_loc, x);
11524 else
11525 break;
11527 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
11528 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11529 gimple_seq_add_stmt (&new_body,
11530 gimple_build_assign (new_var, x));
11532 break;
11533 case OMP_CLAUSE_USE_DEVICE_PTR:
11534 case OMP_CLAUSE_IS_DEVICE_PTR:
11535 var = OMP_CLAUSE_DECL (c);
11536 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
11537 x = build_sender_ref (var, ctx);
11538 else
11539 x = build_receiver_ref (var, false, ctx);
11540 if (is_variable_sized (var))
11542 tree pvar = DECL_VALUE_EXPR (var);
11543 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11544 pvar = TREE_OPERAND (pvar, 0);
11545 gcc_assert (DECL_P (pvar));
11546 tree new_var = lookup_decl (pvar, ctx);
11547 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11548 gimple_seq_add_stmt (&new_body,
11549 gimple_build_assign (new_var, x));
11551 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11553 tree new_var = lookup_decl (var, ctx);
11554 new_var = DECL_VALUE_EXPR (new_var);
11555 gcc_assert (TREE_CODE (new_var) == MEM_REF);
11556 new_var = TREE_OPERAND (new_var, 0);
11557 gcc_assert (DECL_P (new_var));
11558 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11559 gimple_seq_add_stmt (&new_body,
11560 gimple_build_assign (new_var, x));
11562 else
11564 tree type = TREE_TYPE (var);
11565 tree new_var = lookup_decl (var, ctx);
11566 if (omp_is_reference (var))
11568 type = TREE_TYPE (type);
11569 if (TREE_CODE (type) != ARRAY_TYPE)
11571 tree v = create_tmp_var_raw (type, get_name (var));
11572 gimple_add_tmp_var (v);
11573 TREE_ADDRESSABLE (v) = 1;
11574 x = fold_convert (type, x);
11575 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
11576 fb_rvalue);
11577 gimple_seq_add_stmt (&new_body,
11578 gimple_build_assign (v, x));
11579 x = build_fold_addr_expr (v);
11582 new_var = DECL_VALUE_EXPR (new_var);
11583 x = fold_convert (TREE_TYPE (new_var), x);
11584 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11585 gimple_seq_add_stmt (&new_body,
11586 gimple_build_assign (new_var, x));
11588 break;
11590 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
11591 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
11592 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
11593 or references to VLAs. */
11594 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11595 switch (OMP_CLAUSE_CODE (c))
11597 tree var;
11598 default:
11599 break;
11600 case OMP_CLAUSE_MAP:
11601 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11602 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11604 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
11605 poly_int64 offset = 0;
11606 gcc_assert (prev);
11607 var = OMP_CLAUSE_DECL (c);
11608 if (DECL_P (var)
11609 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
11610 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
11611 ctx))
11612 && varpool_node::get_create (var)->offloadable)
11613 break;
11614 if (TREE_CODE (var) == INDIRECT_REF
11615 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
11616 var = TREE_OPERAND (var, 0);
11617 if (TREE_CODE (var) == COMPONENT_REF)
11619 var = get_addr_base_and_unit_offset (var, &offset);
11620 gcc_assert (var != NULL_TREE && DECL_P (var));
11622 else if (DECL_SIZE (var)
11623 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
11625 tree var2 = DECL_VALUE_EXPR (var);
11626 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
11627 var2 = TREE_OPERAND (var2, 0);
11628 gcc_assert (DECL_P (var2));
11629 var = var2;
11631 tree new_var = lookup_decl (var, ctx), x;
11632 tree type = TREE_TYPE (new_var);
11633 bool is_ref;
11634 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
11635 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
11636 == COMPONENT_REF))
11638 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
11639 is_ref = true;
11640 new_var = build2 (MEM_REF, type,
11641 build_fold_addr_expr (new_var),
11642 build_int_cst (build_pointer_type (type),
11643 offset));
11645 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
11647 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
11648 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
11649 new_var = build2 (MEM_REF, type,
11650 build_fold_addr_expr (new_var),
11651 build_int_cst (build_pointer_type (type),
11652 offset));
11654 else
11655 is_ref = omp_is_reference (var);
11656 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11657 is_ref = false;
11658 bool ref_to_array = false;
11659 if (is_ref)
11661 type = TREE_TYPE (type);
11662 if (TREE_CODE (type) == ARRAY_TYPE)
11664 type = build_pointer_type (type);
11665 ref_to_array = true;
11668 else if (TREE_CODE (type) == ARRAY_TYPE)
11670 tree decl2 = DECL_VALUE_EXPR (new_var);
11671 gcc_assert (TREE_CODE (decl2) == MEM_REF);
11672 decl2 = TREE_OPERAND (decl2, 0);
11673 gcc_assert (DECL_P (decl2));
11674 new_var = decl2;
11675 type = TREE_TYPE (new_var);
11677 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
11678 x = fold_convert_loc (clause_loc, type, x);
11679 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
11681 tree bias = OMP_CLAUSE_SIZE (c);
11682 if (DECL_P (bias))
11683 bias = lookup_decl (bias, ctx);
11684 bias = fold_convert_loc (clause_loc, sizetype, bias);
11685 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
11686 bias);
11687 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
11688 TREE_TYPE (x), x, bias);
11690 if (ref_to_array)
11691 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
11692 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11693 if (is_ref && !ref_to_array)
11695 tree t = create_tmp_var_raw (type, get_name (var));
11696 gimple_add_tmp_var (t);
11697 TREE_ADDRESSABLE (t) = 1;
11698 gimple_seq_add_stmt (&new_body,
11699 gimple_build_assign (t, x));
11700 x = build_fold_addr_expr_loc (clause_loc, t);
11702 gimple_seq_add_stmt (&new_body,
11703 gimple_build_assign (new_var, x));
11704 prev = NULL_TREE;
11706 else if (OMP_CLAUSE_CHAIN (c)
11707 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
11708 == OMP_CLAUSE_MAP
11709 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
11710 == GOMP_MAP_FIRSTPRIVATE_POINTER
11711 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
11712 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
11713 prev = c;
11714 break;
11715 case OMP_CLAUSE_PRIVATE:
11716 var = OMP_CLAUSE_DECL (c);
11717 if (is_variable_sized (var))
11719 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
11720 tree new_var = lookup_decl (var, ctx);
11721 tree pvar = DECL_VALUE_EXPR (var);
11722 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11723 pvar = TREE_OPERAND (pvar, 0);
11724 gcc_assert (DECL_P (pvar));
11725 tree new_pvar = lookup_decl (pvar, ctx);
11726 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
11727 tree al = size_int (DECL_ALIGN (var));
11728 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
11729 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
11730 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
11731 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11732 gimple_seq_add_stmt (&new_body,
11733 gimple_build_assign (new_pvar, x));
11735 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
11737 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
11738 tree new_var = lookup_decl (var, ctx);
11739 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
11740 if (TREE_CONSTANT (x))
11741 break;
11742 else
11744 tree atmp
11745 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
11746 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
11747 tree al = size_int (TYPE_ALIGN (rtype));
11748 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
11751 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
11752 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11753 gimple_seq_add_stmt (&new_body,
11754 gimple_build_assign (new_var, x));
11756 break;
11759 gimple_seq fork_seq = NULL;
11760 gimple_seq join_seq = NULL;
11762 if (is_oacc_parallel (ctx))
11764 /* If there are reductions on the offloaded region itself, treat
11765 them as a dummy GANG loop. */
11766 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
11768 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
11769 false, NULL, NULL, &fork_seq, &join_seq, ctx);
11772 gimple_seq_add_seq (&new_body, fork_seq);
11773 gimple_seq_add_seq (&new_body, tgt_body);
11774 gimple_seq_add_seq (&new_body, join_seq);
11776 if (offloaded)
11777 new_body = maybe_catch_exception (new_body);
11779 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
11780 gimple_omp_set_body (stmt, new_body);
11783 bind = gimple_build_bind (NULL, NULL,
11784 tgt_bind ? gimple_bind_block (tgt_bind)
11785 : NULL_TREE);
11786 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
11787 gimple_bind_add_seq (bind, ilist);
11788 gimple_bind_add_stmt (bind, stmt);
11789 gimple_bind_add_seq (bind, olist);
11791 pop_gimplify_context (NULL);
11793 if (dep_bind)
11795 gimple_bind_add_seq (dep_bind, dep_ilist);
11796 gimple_bind_add_stmt (dep_bind, bind);
11797 gimple_bind_add_seq (dep_bind, dep_olist);
11798 pop_gimplify_context (dep_bind);
11802 /* Expand code for an OpenMP teams directive. */
11804 static void
11805 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11807 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
11808 push_gimplify_context ();
11810 tree block = make_node (BLOCK);
11811 gbind *bind = gimple_build_bind (NULL, NULL, block);
11812 gsi_replace (gsi_p, bind, true);
11813 gimple_seq bind_body = NULL;
11814 gimple_seq dlist = NULL;
11815 gimple_seq olist = NULL;
11817 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
11818 OMP_CLAUSE_NUM_TEAMS);
11819 if (num_teams == NULL_TREE)
11820 num_teams = build_int_cst (unsigned_type_node, 0);
11821 else
11823 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
11824 num_teams = fold_convert (unsigned_type_node, num_teams);
11825 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
11827 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
11828 OMP_CLAUSE_THREAD_LIMIT);
11829 if (thread_limit == NULL_TREE)
11830 thread_limit = build_int_cst (unsigned_type_node, 0);
11831 else
11833 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
11834 thread_limit = fold_convert (unsigned_type_node, thread_limit);
11835 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
11836 fb_rvalue);
11839 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
11840 &bind_body, &dlist, ctx, NULL);
11841 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
11842 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
11843 NULL, ctx);
11844 if (!gimple_omp_teams_grid_phony (teams_stmt))
11846 gimple_seq_add_stmt (&bind_body, teams_stmt);
11847 location_t loc = gimple_location (teams_stmt);
11848 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
11849 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
11850 gimple_set_location (call, loc);
11851 gimple_seq_add_stmt (&bind_body, call);
11854 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
11855 gimple_omp_set_body (teams_stmt, NULL);
11856 gimple_seq_add_seq (&bind_body, olist);
11857 gimple_seq_add_seq (&bind_body, dlist);
11858 if (!gimple_omp_teams_grid_phony (teams_stmt))
11859 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
11860 gimple_bind_set_body (bind, bind_body);
11862 pop_gimplify_context (bind);
11864 gimple_bind_append_vars (bind, ctx->block_vars);
11865 BLOCK_VARS (block) = ctx->block_vars;
11866 if (BLOCK_VARS (block))
11867 TREE_USED (block) = 1;
11870 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
11872 static void
11873 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11875 gimple *stmt = gsi_stmt (*gsi_p);
11876 lower_omp (gimple_omp_body_ptr (stmt), ctx);
11877 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
11878 gimple_build_omp_return (false));
11882 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
11883 regimplified. If DATA is non-NULL, lower_omp_1 is outside
11884 of OMP context, but with task_shared_vars set. */
11886 static tree
11887 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
11888 void *data)
11890 tree t = *tp;
11892 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
11893 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
11894 return t;
11896 if (task_shared_vars
11897 && DECL_P (t)
11898 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
11899 return t;
11901 /* If a global variable has been privatized, TREE_CONSTANT on
11902 ADDR_EXPR might be wrong. */
11903 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
11904 recompute_tree_invariant_for_addr_expr (t);
11906 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
11907 return NULL_TREE;
11910 /* Data to be communicated between lower_omp_regimplify_operands and
11911 lower_omp_regimplify_operands_p. */
11913 struct lower_omp_regimplify_operands_data
11915 omp_context *ctx;
11916 vec<tree> *decls;
11919 /* Helper function for lower_omp_regimplify_operands. Find
11920 omp_member_access_dummy_var vars and adjust temporarily their
11921 DECL_VALUE_EXPRs if needed. */
11923 static tree
11924 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
11925 void *data)
11927 tree t = omp_member_access_dummy_var (*tp);
11928 if (t)
11930 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
11931 lower_omp_regimplify_operands_data *ldata
11932 = (lower_omp_regimplify_operands_data *) wi->info;
11933 tree o = maybe_lookup_decl (t, ldata->ctx);
11934 if (o != t)
11936 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
11937 ldata->decls->safe_push (*tp);
11938 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
11939 SET_DECL_VALUE_EXPR (*tp, v);
11942 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
11943 return NULL_TREE;
11946 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
11947 of omp_member_access_dummy_var vars during regimplification. */
11949 static void
11950 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
11951 gimple_stmt_iterator *gsi_p)
11953 auto_vec<tree, 10> decls;
11954 if (ctx)
11956 struct walk_stmt_info wi;
11957 memset (&wi, '\0', sizeof (wi));
11958 struct lower_omp_regimplify_operands_data data;
11959 data.ctx = ctx;
11960 data.decls = &decls;
11961 wi.info = &data;
11962 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
11964 gimple_regimplify_operands (stmt, gsi_p);
11965 while (!decls.is_empty ())
11967 tree t = decls.pop ();
11968 tree v = decls.pop ();
11969 SET_DECL_VALUE_EXPR (t, v);
11973 static void
11974 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11976 gimple *stmt = gsi_stmt (*gsi_p);
11977 struct walk_stmt_info wi;
11978 gcall *call_stmt;
11980 if (gimple_has_location (stmt))
11981 input_location = gimple_location (stmt);
11983 if (task_shared_vars)
11984 memset (&wi, '\0', sizeof (wi));
11986 /* If we have issued syntax errors, avoid doing any heavy lifting.
11987 Just replace the OMP directives with a NOP to avoid
11988 confusing RTL expansion. */
11989 if (seen_error () && is_gimple_omp (stmt))
11991 gsi_replace (gsi_p, gimple_build_nop (), true);
11992 return;
11995 switch (gimple_code (stmt))
11997 case GIMPLE_COND:
11999 gcond *cond_stmt = as_a <gcond *> (stmt);
12000 if ((ctx || task_shared_vars)
12001 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
12002 lower_omp_regimplify_p,
12003 ctx ? NULL : &wi, NULL)
12004 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
12005 lower_omp_regimplify_p,
12006 ctx ? NULL : &wi, NULL)))
12007 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
12009 break;
12010 case GIMPLE_CATCH:
12011 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
12012 break;
12013 case GIMPLE_EH_FILTER:
12014 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
12015 break;
12016 case GIMPLE_TRY:
12017 lower_omp (gimple_try_eval_ptr (stmt), ctx);
12018 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
12019 break;
12020 case GIMPLE_TRANSACTION:
12021 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
12022 ctx);
12023 break;
12024 case GIMPLE_BIND:
12025 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
12026 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
12027 break;
12028 case GIMPLE_OMP_PARALLEL:
12029 case GIMPLE_OMP_TASK:
12030 ctx = maybe_lookup_ctx (stmt);
12031 gcc_assert (ctx);
12032 if (ctx->cancellable)
12033 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12034 lower_omp_taskreg (gsi_p, ctx);
12035 break;
12036 case GIMPLE_OMP_FOR:
12037 ctx = maybe_lookup_ctx (stmt);
12038 gcc_assert (ctx);
12039 if (ctx->cancellable)
12040 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12041 lower_omp_for (gsi_p, ctx);
12042 break;
12043 case GIMPLE_OMP_SECTIONS:
12044 ctx = maybe_lookup_ctx (stmt);
12045 gcc_assert (ctx);
12046 if (ctx->cancellable)
12047 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12048 lower_omp_sections (gsi_p, ctx);
12049 break;
12050 case GIMPLE_OMP_SINGLE:
12051 ctx = maybe_lookup_ctx (stmt);
12052 gcc_assert (ctx);
12053 lower_omp_single (gsi_p, ctx);
12054 break;
12055 case GIMPLE_OMP_MASTER:
12056 ctx = maybe_lookup_ctx (stmt);
12057 gcc_assert (ctx);
12058 lower_omp_master (gsi_p, ctx);
12059 break;
12060 case GIMPLE_OMP_TASKGROUP:
12061 ctx = maybe_lookup_ctx (stmt);
12062 gcc_assert (ctx);
12063 lower_omp_taskgroup (gsi_p, ctx);
12064 break;
12065 case GIMPLE_OMP_ORDERED:
12066 ctx = maybe_lookup_ctx (stmt);
12067 gcc_assert (ctx);
12068 lower_omp_ordered (gsi_p, ctx);
12069 break;
12070 case GIMPLE_OMP_SCAN:
12071 ctx = maybe_lookup_ctx (stmt);
12072 gcc_assert (ctx);
12073 lower_omp_scan (gsi_p, ctx);
12074 break;
12075 case GIMPLE_OMP_CRITICAL:
12076 ctx = maybe_lookup_ctx (stmt);
12077 gcc_assert (ctx);
12078 lower_omp_critical (gsi_p, ctx);
12079 break;
12080 case GIMPLE_OMP_ATOMIC_LOAD:
12081 if ((ctx || task_shared_vars)
12082 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
12083 as_a <gomp_atomic_load *> (stmt)),
12084 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
12085 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12086 break;
12087 case GIMPLE_OMP_TARGET:
12088 ctx = maybe_lookup_ctx (stmt);
12089 gcc_assert (ctx);
12090 lower_omp_target (gsi_p, ctx);
12091 break;
12092 case GIMPLE_OMP_TEAMS:
12093 ctx = maybe_lookup_ctx (stmt);
12094 gcc_assert (ctx);
12095 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
12096 lower_omp_taskreg (gsi_p, ctx);
12097 else
12098 lower_omp_teams (gsi_p, ctx);
12099 break;
12100 case GIMPLE_OMP_GRID_BODY:
12101 ctx = maybe_lookup_ctx (stmt);
12102 gcc_assert (ctx);
12103 lower_omp_grid_body (gsi_p, ctx);
12104 break;
12105 case GIMPLE_CALL:
12106 tree fndecl;
12107 call_stmt = as_a <gcall *> (stmt);
12108 fndecl = gimple_call_fndecl (call_stmt);
12109 if (fndecl
12110 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
12111 switch (DECL_FUNCTION_CODE (fndecl))
12113 case BUILT_IN_GOMP_BARRIER:
12114 if (ctx == NULL)
12115 break;
12116 /* FALLTHRU */
12117 case BUILT_IN_GOMP_CANCEL:
12118 case BUILT_IN_GOMP_CANCELLATION_POINT:
12119 omp_context *cctx;
12120 cctx = ctx;
12121 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
12122 cctx = cctx->outer;
12123 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
12124 if (!cctx->cancellable)
12126 if (DECL_FUNCTION_CODE (fndecl)
12127 == BUILT_IN_GOMP_CANCELLATION_POINT)
12129 stmt = gimple_build_nop ();
12130 gsi_replace (gsi_p, stmt, false);
12132 break;
12134 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
12136 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
12137 gimple_call_set_fndecl (call_stmt, fndecl);
12138 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
12140 tree lhs;
12141 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
12142 gimple_call_set_lhs (call_stmt, lhs);
12143 tree fallthru_label;
12144 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
12145 gimple *g;
12146 g = gimple_build_label (fallthru_label);
12147 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12148 g = gimple_build_cond (NE_EXPR, lhs,
12149 fold_convert (TREE_TYPE (lhs),
12150 boolean_false_node),
12151 cctx->cancel_label, fallthru_label);
12152 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12153 break;
12154 default:
12155 break;
12157 goto regimplify;
12159 case GIMPLE_ASSIGN:
12160 for (omp_context *up = ctx; up; up = up->outer)
12162 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
12163 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
12164 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
12165 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
12166 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
12167 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
12168 && (gimple_omp_target_kind (up->stmt)
12169 == GF_OMP_TARGET_KIND_DATA)))
12170 continue;
12171 else if (!up->lastprivate_conditional_map)
12172 break;
12173 tree lhs = get_base_address (gimple_assign_lhs (stmt));
12174 if (TREE_CODE (lhs) == MEM_REF
12175 && DECL_P (TREE_OPERAND (lhs, 0))
12176 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
12177 0))) == REFERENCE_TYPE)
12178 lhs = TREE_OPERAND (lhs, 0);
12179 if (DECL_P (lhs))
12180 if (tree *v = up->lastprivate_conditional_map->get (lhs))
12182 tree clauses;
12183 if (up->combined_into_simd_safelen0)
12184 up = up->outer;
12185 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
12186 clauses = gimple_omp_for_clauses (up->stmt);
12187 else
12188 clauses = gimple_omp_sections_clauses (up->stmt);
12189 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
12190 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
12191 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
12192 OMP_CLAUSE__CONDTEMP_);
12193 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
12194 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
12195 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12198 /* FALLTHRU */
12200 default:
12201 regimplify:
12202 if ((ctx || task_shared_vars)
12203 && walk_gimple_op (stmt, lower_omp_regimplify_p,
12204 ctx ? NULL : &wi))
12206 /* Just remove clobbers, this should happen only if we have
12207 "privatized" local addressable variables in SIMD regions,
12208 the clobber isn't needed in that case and gimplifying address
12209 of the ARRAY_REF into a pointer and creating MEM_REF based
12210 clobber would create worse code than we get with the clobber
12211 dropped. */
12212 if (gimple_clobber_p (stmt))
12214 gsi_replace (gsi_p, gimple_build_nop (), true);
12215 break;
12217 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12219 break;
12223 static void
12224 lower_omp (gimple_seq *body, omp_context *ctx)
12226 location_t saved_location = input_location;
12227 gimple_stmt_iterator gsi;
12228 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
12229 lower_omp_1 (&gsi, ctx);
12230 /* During gimplification, we haven't folded statments inside offloading
12231 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
12232 if (target_nesting_level || taskreg_nesting_level)
12233 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
12234 fold_stmt (&gsi);
12235 input_location = saved_location;
12238 /* Main entry point. */
12240 static unsigned int
12241 execute_lower_omp (void)
12243 gimple_seq body;
12244 int i;
12245 omp_context *ctx;
12247 /* This pass always runs, to provide PROP_gimple_lomp.
12248 But often, there is nothing to do. */
12249 if (flag_openacc == 0 && flag_openmp == 0
12250 && flag_openmp_simd == 0)
12251 return 0;
12253 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
12254 delete_omp_context);
12256 body = gimple_body (current_function_decl);
12258 if (hsa_gen_requested_p ())
12259 omp_grid_gridify_all_targets (&body);
12261 scan_omp (&body, NULL);
12262 gcc_assert (taskreg_nesting_level == 0);
12263 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
12264 finish_taskreg_scan (ctx);
12265 taskreg_contexts.release ();
12267 if (all_contexts->root)
12269 if (task_shared_vars)
12270 push_gimplify_context ();
12271 lower_omp (&body, NULL);
12272 if (task_shared_vars)
12273 pop_gimplify_context (NULL);
12276 if (all_contexts)
12278 splay_tree_delete (all_contexts);
12279 all_contexts = NULL;
12281 BITMAP_FREE (task_shared_vars);
12283 /* If current function is a method, remove artificial dummy VAR_DECL created
12284 for non-static data member privatization, they aren't needed for
12285 debuginfo nor anything else, have been already replaced everywhere in the
12286 IL and cause problems with LTO. */
12287 if (DECL_ARGUMENTS (current_function_decl)
12288 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
12289 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
12290 == POINTER_TYPE))
12291 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
12292 return 0;
12295 namespace {
12297 const pass_data pass_data_lower_omp =
12299 GIMPLE_PASS, /* type */
12300 "omplower", /* name */
12301 OPTGROUP_OMP, /* optinfo_flags */
12302 TV_NONE, /* tv_id */
12303 PROP_gimple_any, /* properties_required */
12304 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
12305 0, /* properties_destroyed */
12306 0, /* todo_flags_start */
12307 0, /* todo_flags_finish */
12310 class pass_lower_omp : public gimple_opt_pass
12312 public:
12313 pass_lower_omp (gcc::context *ctxt)
12314 : gimple_opt_pass (pass_data_lower_omp, ctxt)
12317 /* opt_pass methods: */
12318 virtual unsigned int execute (function *) { return execute_lower_omp (); }
12320 }; // class pass_lower_omp
12322 } // anon namespace
12324 gimple_opt_pass *
12325 make_pass_lower_omp (gcc::context *ctxt)
12327 return new pass_lower_omp (ctxt);
12330 /* The following is a utility to diagnose structured block violations.
12331 It is not part of the "omplower" pass, as that's invoked too late. It
12332 should be invoked by the respective front ends after gimplification. */
12334 static splay_tree all_labels;
12336 /* Check for mismatched contexts and generate an error if needed. Return
12337 true if an error is detected. */
12339 static bool
12340 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
12341 gimple *branch_ctx, gimple *label_ctx)
12343 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
12344 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
12346 if (label_ctx == branch_ctx)
12347 return false;
12349 const char* kind = NULL;
12351 if (flag_openacc)
12353 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
12354 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
12356 gcc_checking_assert (kind == NULL);
12357 kind = "OpenACC";
12360 if (kind == NULL)
12362 gcc_checking_assert (flag_openmp || flag_openmp_simd);
12363 kind = "OpenMP";
12366 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
12367 so we could traverse it and issue a correct "exit" or "enter" error
12368 message upon a structured block violation.
12370 We built the context by building a list with tree_cons'ing, but there is
12371 no easy counterpart in gimple tuples. It seems like far too much work
12372 for issuing exit/enter error messages. If someone really misses the
12373 distinct error message... patches welcome. */
12375 #if 0
12376 /* Try to avoid confusing the user by producing and error message
12377 with correct "exit" or "enter" verbiage. We prefer "exit"
12378 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
12379 if (branch_ctx == NULL)
12380 exit_p = false;
12381 else
12383 while (label_ctx)
12385 if (TREE_VALUE (label_ctx) == branch_ctx)
12387 exit_p = false;
12388 break;
12390 label_ctx = TREE_CHAIN (label_ctx);
12394 if (exit_p)
12395 error ("invalid exit from %s structured block", kind);
12396 else
12397 error ("invalid entry to %s structured block", kind);
12398 #endif
12400 /* If it's obvious we have an invalid entry, be specific about the error. */
12401 if (branch_ctx == NULL)
12402 error ("invalid entry to %s structured block", kind);
12403 else
12405 /* Otherwise, be vague and lazy, but efficient. */
12406 error ("invalid branch to/from %s structured block", kind);
12409 gsi_replace (gsi_p, gimple_build_nop (), false);
12410 return true;
12413 /* Pass 1: Create a minimal tree of structured blocks, and record
12414 where each label is found. */
12416 static tree
12417 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
12418 struct walk_stmt_info *wi)
12420 gimple *context = (gimple *) wi->info;
12421 gimple *inner_context;
12422 gimple *stmt = gsi_stmt (*gsi_p);
12424 *handled_ops_p = true;
12426 switch (gimple_code (stmt))
12428 WALK_SUBSTMTS;
12430 case GIMPLE_OMP_PARALLEL:
12431 case GIMPLE_OMP_TASK:
12432 case GIMPLE_OMP_SECTIONS:
12433 case GIMPLE_OMP_SINGLE:
12434 case GIMPLE_OMP_SECTION:
12435 case GIMPLE_OMP_MASTER:
12436 case GIMPLE_OMP_ORDERED:
12437 case GIMPLE_OMP_SCAN:
12438 case GIMPLE_OMP_CRITICAL:
12439 case GIMPLE_OMP_TARGET:
12440 case GIMPLE_OMP_TEAMS:
12441 case GIMPLE_OMP_TASKGROUP:
12442 /* The minimal context here is just the current OMP construct. */
12443 inner_context = stmt;
12444 wi->info = inner_context;
12445 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
12446 wi->info = context;
12447 break;
12449 case GIMPLE_OMP_FOR:
12450 inner_context = stmt;
12451 wi->info = inner_context;
12452 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
12453 walk them. */
12454 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
12455 diagnose_sb_1, NULL, wi);
12456 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
12457 wi->info = context;
12458 break;
12460 case GIMPLE_LABEL:
12461 splay_tree_insert (all_labels,
12462 (splay_tree_key) gimple_label_label (
12463 as_a <glabel *> (stmt)),
12464 (splay_tree_value) context);
12465 break;
12467 default:
12468 break;
12471 return NULL_TREE;
12474 /* Pass 2: Check each branch and see if its context differs from that of
12475 the destination label's context. */
12477 static tree
12478 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
12479 struct walk_stmt_info *wi)
12481 gimple *context = (gimple *) wi->info;
12482 splay_tree_node n;
12483 gimple *stmt = gsi_stmt (*gsi_p);
12485 *handled_ops_p = true;
12487 switch (gimple_code (stmt))
12489 WALK_SUBSTMTS;
12491 case GIMPLE_OMP_PARALLEL:
12492 case GIMPLE_OMP_TASK:
12493 case GIMPLE_OMP_SECTIONS:
12494 case GIMPLE_OMP_SINGLE:
12495 case GIMPLE_OMP_SECTION:
12496 case GIMPLE_OMP_MASTER:
12497 case GIMPLE_OMP_ORDERED:
12498 case GIMPLE_OMP_SCAN:
12499 case GIMPLE_OMP_CRITICAL:
12500 case GIMPLE_OMP_TARGET:
12501 case GIMPLE_OMP_TEAMS:
12502 case GIMPLE_OMP_TASKGROUP:
12503 wi->info = stmt;
12504 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
12505 wi->info = context;
12506 break;
12508 case GIMPLE_OMP_FOR:
12509 wi->info = stmt;
12510 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
12511 walk them. */
12512 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
12513 diagnose_sb_2, NULL, wi);
12514 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
12515 wi->info = context;
12516 break;
12518 case GIMPLE_COND:
12520 gcond *cond_stmt = as_a <gcond *> (stmt);
12521 tree lab = gimple_cond_true_label (cond_stmt);
12522 if (lab)
12524 n = splay_tree_lookup (all_labels,
12525 (splay_tree_key) lab);
12526 diagnose_sb_0 (gsi_p, context,
12527 n ? (gimple *) n->value : NULL);
12529 lab = gimple_cond_false_label (cond_stmt);
12530 if (lab)
12532 n = splay_tree_lookup (all_labels,
12533 (splay_tree_key) lab);
12534 diagnose_sb_0 (gsi_p, context,
12535 n ? (gimple *) n->value : NULL);
12538 break;
12540 case GIMPLE_GOTO:
12542 tree lab = gimple_goto_dest (stmt);
12543 if (TREE_CODE (lab) != LABEL_DECL)
12544 break;
12546 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
12547 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
12549 break;
12551 case GIMPLE_SWITCH:
12553 gswitch *switch_stmt = as_a <gswitch *> (stmt);
12554 unsigned int i;
12555 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
12557 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
12558 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
12559 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
12560 break;
12563 break;
12565 case GIMPLE_RETURN:
12566 diagnose_sb_0 (gsi_p, context, NULL);
12567 break;
12569 default:
12570 break;
12573 return NULL_TREE;
12576 static unsigned int
12577 diagnose_omp_structured_block_errors (void)
12579 struct walk_stmt_info wi;
12580 gimple_seq body = gimple_body (current_function_decl);
12582 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
12584 memset (&wi, 0, sizeof (wi));
12585 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
12587 memset (&wi, 0, sizeof (wi));
12588 wi.want_locations = true;
12589 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
12591 gimple_set_body (current_function_decl, body);
12593 splay_tree_delete (all_labels);
12594 all_labels = NULL;
12596 return 0;
12599 namespace {
12601 const pass_data pass_data_diagnose_omp_blocks =
12603 GIMPLE_PASS, /* type */
12604 "*diagnose_omp_blocks", /* name */
12605 OPTGROUP_OMP, /* optinfo_flags */
12606 TV_NONE, /* tv_id */
12607 PROP_gimple_any, /* properties_required */
12608 0, /* properties_provided */
12609 0, /* properties_destroyed */
12610 0, /* todo_flags_start */
12611 0, /* todo_flags_finish */
12614 class pass_diagnose_omp_blocks : public gimple_opt_pass
12616 public:
12617 pass_diagnose_omp_blocks (gcc::context *ctxt)
12618 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
12621 /* opt_pass methods: */
12622 virtual bool gate (function *)
12624 return flag_openacc || flag_openmp || flag_openmp_simd;
12626 virtual unsigned int execute (function *)
12628 return diagnose_omp_structured_block_errors ();
12631 }; // class pass_diagnose_omp_blocks
12633 } // anon namespace
12635 gimple_opt_pass *
12636 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
12638 return new pass_diagnose_omp_blocks (ctxt);
12642 #include "gt-omp-low.h"