PR C++/88114 Gen destructor of an abstract class
[official-gcc.git] / gcc / omp-low.c
blob84a6addbf3705268b7c841e015abbc5adff8e9d5
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2019 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
122 /* And a hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
126 /* Nesting depth of this context. Used to beautify error messages re
127 invalid gotos. The outermost ctx is depth 1, with depth 0 being
128 reserved for the main body of the function. */
129 int depth;
131 /* True if this parallel directive is nested within another. */
132 bool is_nested;
134 /* True if this construct can be cancelled. */
135 bool cancellable;
138 static splay_tree all_contexts;
139 static int taskreg_nesting_level;
140 static int target_nesting_level;
141 static bitmap task_shared_vars;
142 static vec<omp_context *> taskreg_contexts;
144 static void scan_omp (gimple_seq *, omp_context *);
145 static tree scan_omp_1_op (tree *, int *, void *);
147 #define WALK_SUBSTMTS \
148 case GIMPLE_BIND: \
149 case GIMPLE_TRY: \
150 case GIMPLE_CATCH: \
151 case GIMPLE_EH_FILTER: \
152 case GIMPLE_TRANSACTION: \
153 /* The sub-statements for these should be walked. */ \
154 *handled_ops_p = false; \
155 break;
157 /* Return true if CTX corresponds to an oacc parallel region. */
159 static bool
160 is_oacc_parallel (omp_context *ctx)
162 enum gimple_code outer_type = gimple_code (ctx->stmt);
163 return ((outer_type == GIMPLE_OMP_TARGET)
164 && (gimple_omp_target_kind (ctx->stmt)
165 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
168 /* Return true if CTX corresponds to an oacc kernels region. */
170 static bool
171 is_oacc_kernels (omp_context *ctx)
173 enum gimple_code outer_type = gimple_code (ctx->stmt);
174 return ((outer_type == GIMPLE_OMP_TARGET)
175 && (gimple_omp_target_kind (ctx->stmt)
176 == GF_OMP_TARGET_KIND_OACC_KERNELS));
179 /* If DECL is the artificial dummy VAR_DECL created for non-static
180 data member privatization, return the underlying "this" parameter,
181 otherwise return NULL. */
183 tree
184 omp_member_access_dummy_var (tree decl)
186 if (!VAR_P (decl)
187 || !DECL_ARTIFICIAL (decl)
188 || !DECL_IGNORED_P (decl)
189 || !DECL_HAS_VALUE_EXPR_P (decl)
190 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
191 return NULL_TREE;
193 tree v = DECL_VALUE_EXPR (decl);
194 if (TREE_CODE (v) != COMPONENT_REF)
195 return NULL_TREE;
197 while (1)
198 switch (TREE_CODE (v))
200 case COMPONENT_REF:
201 case MEM_REF:
202 case INDIRECT_REF:
203 CASE_CONVERT:
204 case POINTER_PLUS_EXPR:
205 v = TREE_OPERAND (v, 0);
206 continue;
207 case PARM_DECL:
208 if (DECL_CONTEXT (v) == current_function_decl
209 && DECL_ARTIFICIAL (v)
210 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
211 return v;
212 return NULL_TREE;
213 default:
214 return NULL_TREE;
218 /* Helper for unshare_and_remap, called through walk_tree. */
220 static tree
221 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
223 tree *pair = (tree *) data;
224 if (*tp == pair[0])
226 *tp = unshare_expr (pair[1]);
227 *walk_subtrees = 0;
229 else if (IS_TYPE_OR_DECL_P (*tp))
230 *walk_subtrees = 0;
231 return NULL_TREE;
234 /* Return unshare_expr (X) with all occurrences of FROM
235 replaced with TO. */
237 static tree
238 unshare_and_remap (tree x, tree from, tree to)
240 tree pair[2] = { from, to };
241 x = unshare_expr (x);
242 walk_tree (&x, unshare_and_remap_1, pair, NULL);
243 return x;
246 /* Convenience function for calling scan_omp_1_op on tree operands. */
248 static inline tree
249 scan_omp_op (tree *tp, omp_context *ctx)
251 struct walk_stmt_info wi;
253 memset (&wi, 0, sizeof (wi));
254 wi.info = ctx;
255 wi.want_locations = true;
257 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
260 static void lower_omp (gimple_seq *, omp_context *);
261 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
262 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
264 /* Return true if CTX is for an omp parallel. */
266 static inline bool
267 is_parallel_ctx (omp_context *ctx)
269 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
273 /* Return true if CTX is for an omp task. */
275 static inline bool
276 is_task_ctx (omp_context *ctx)
278 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
282 /* Return true if CTX is for an omp taskloop. */
284 static inline bool
285 is_taskloop_ctx (omp_context *ctx)
287 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
288 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
292 /* Return true if CTX is for a host omp teams. */
294 static inline bool
295 is_host_teams_ctx (omp_context *ctx)
297 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
298 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
301 /* Return true if CTX is for an omp parallel or omp task or host omp teams
302 (the last one is strictly not a task region in OpenMP speak, but we
303 need to treat it similarly). */
305 static inline bool
306 is_taskreg_ctx (omp_context *ctx)
308 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
311 /* Return true if EXPR is variable sized. */
313 static inline bool
314 is_variable_sized (const_tree expr)
316 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
319 /* Lookup variables. The "maybe" form
320 allows for the variable form to not have been entered, otherwise we
321 assert that the variable must have been entered. */
323 static inline tree
324 lookup_decl (tree var, omp_context *ctx)
326 tree *n = ctx->cb.decl_map->get (var);
327 return *n;
330 static inline tree
331 maybe_lookup_decl (const_tree var, omp_context *ctx)
333 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
334 return n ? *n : NULL_TREE;
337 static inline tree
338 lookup_field (tree var, omp_context *ctx)
340 splay_tree_node n;
341 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
342 return (tree) n->value;
345 static inline tree
346 lookup_sfield (splay_tree_key key, omp_context *ctx)
348 splay_tree_node n;
349 n = splay_tree_lookup (ctx->sfield_map
350 ? ctx->sfield_map : ctx->field_map, key);
351 return (tree) n->value;
354 static inline tree
355 lookup_sfield (tree var, omp_context *ctx)
357 return lookup_sfield ((splay_tree_key) var, ctx);
360 static inline tree
361 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
363 splay_tree_node n;
364 n = splay_tree_lookup (ctx->field_map, key);
365 return n ? (tree) n->value : NULL_TREE;
368 static inline tree
369 maybe_lookup_field (tree var, omp_context *ctx)
371 return maybe_lookup_field ((splay_tree_key) var, ctx);
374 /* Return true if DECL should be copied by pointer. SHARED_CTX is
375 the parallel context if DECL is to be shared. */
377 static bool
378 use_pointer_for_field (tree decl, omp_context *shared_ctx)
380 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
381 || TYPE_ATOMIC (TREE_TYPE (decl)))
382 return true;
384 /* We can only use copy-in/copy-out semantics for shared variables
385 when we know the value is not accessible from an outer scope. */
386 if (shared_ctx)
388 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
390 /* ??? Trivially accessible from anywhere. But why would we even
391 be passing an address in this case? Should we simply assert
392 this to be false, or should we have a cleanup pass that removes
393 these from the list of mappings? */
394 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
395 return true;
397 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
398 without analyzing the expression whether or not its location
399 is accessible to anyone else. In the case of nested parallel
400 regions it certainly may be. */
401 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
402 return true;
404 /* Do not use copy-in/copy-out for variables that have their
405 address taken. */
406 if (TREE_ADDRESSABLE (decl))
407 return true;
409 /* lower_send_shared_vars only uses copy-in, but not copy-out
410 for these. */
411 if (TREE_READONLY (decl)
412 || ((TREE_CODE (decl) == RESULT_DECL
413 || TREE_CODE (decl) == PARM_DECL)
414 && DECL_BY_REFERENCE (decl)))
415 return false;
417 /* Disallow copy-in/out in nested parallel if
418 decl is shared in outer parallel, otherwise
419 each thread could store the shared variable
420 in its own copy-in location, making the
421 variable no longer really shared. */
422 if (shared_ctx->is_nested)
424 omp_context *up;
426 for (up = shared_ctx->outer; up; up = up->outer)
427 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
428 break;
430 if (up)
432 tree c;
434 for (c = gimple_omp_taskreg_clauses (up->stmt);
435 c; c = OMP_CLAUSE_CHAIN (c))
436 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
437 && OMP_CLAUSE_DECL (c) == decl)
438 break;
440 if (c)
441 goto maybe_mark_addressable_and_ret;
445 /* For tasks avoid using copy-in/out. As tasks can be
446 deferred or executed in different thread, when GOMP_task
447 returns, the task hasn't necessarily terminated. */
448 if (is_task_ctx (shared_ctx))
450 tree outer;
451 maybe_mark_addressable_and_ret:
452 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
453 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
455 /* Taking address of OUTER in lower_send_shared_vars
456 might need regimplification of everything that uses the
457 variable. */
458 if (!task_shared_vars)
459 task_shared_vars = BITMAP_ALLOC (NULL);
460 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
461 TREE_ADDRESSABLE (outer) = 1;
463 return true;
467 return false;
470 /* Construct a new automatic decl similar to VAR. */
472 static tree
473 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
475 tree copy = copy_var_decl (var, name, type);
477 DECL_CONTEXT (copy) = current_function_decl;
478 DECL_CHAIN (copy) = ctx->block_vars;
479 /* If VAR is listed in task_shared_vars, it means it wasn't
480 originally addressable and is just because task needs to take
481 it's address. But we don't need to take address of privatizations
482 from that var. */
483 if (TREE_ADDRESSABLE (var)
484 && task_shared_vars
485 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
486 TREE_ADDRESSABLE (copy) = 0;
487 ctx->block_vars = copy;
489 return copy;
492 static tree
493 omp_copy_decl_1 (tree var, omp_context *ctx)
495 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
498 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
499 as appropriate. */
500 static tree
501 omp_build_component_ref (tree obj, tree field)
503 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
504 if (TREE_THIS_VOLATILE (field))
505 TREE_THIS_VOLATILE (ret) |= 1;
506 if (TREE_READONLY (field))
507 TREE_READONLY (ret) |= 1;
508 return ret;
511 /* Build tree nodes to access the field for VAR on the receiver side. */
513 static tree
514 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
516 tree x, field = lookup_field (var, ctx);
518 /* If the receiver record type was remapped in the child function,
519 remap the field into the new record type. */
520 x = maybe_lookup_field (field, ctx);
521 if (x != NULL)
522 field = x;
524 x = build_simple_mem_ref (ctx->receiver_decl);
525 TREE_THIS_NOTRAP (x) = 1;
526 x = omp_build_component_ref (x, field);
527 if (by_ref)
529 x = build_simple_mem_ref (x);
530 TREE_THIS_NOTRAP (x) = 1;
533 return x;
536 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
537 of a parallel, this is a component reference; for workshare constructs
538 this is some variable. */
540 static tree
541 build_outer_var_ref (tree var, omp_context *ctx,
542 enum omp_clause_code code = OMP_CLAUSE_ERROR)
544 tree x;
545 omp_context *outer = ctx->outer;
546 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
547 outer = outer->outer;
549 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
550 x = var;
551 else if (is_variable_sized (var))
553 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
554 x = build_outer_var_ref (x, ctx, code);
555 x = build_simple_mem_ref (x);
557 else if (is_taskreg_ctx (ctx))
559 bool by_ref = use_pointer_for_field (var, NULL);
560 x = build_receiver_ref (var, by_ref, ctx);
562 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
563 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
564 || (code == OMP_CLAUSE_PRIVATE
565 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
566 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
567 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
569 /* #pragma omp simd isn't a worksharing construct, and can reference
570 even private vars in its linear etc. clauses.
571 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
572 to private vars in all worksharing constructs. */
573 x = NULL_TREE;
574 if (outer && is_taskreg_ctx (outer))
575 x = lookup_decl (var, outer);
576 else if (outer)
577 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
578 if (x == NULL_TREE)
579 x = var;
581 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
583 gcc_assert (outer);
584 splay_tree_node n
585 = splay_tree_lookup (outer->field_map,
586 (splay_tree_key) &DECL_UID (var));
587 if (n == NULL)
589 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
590 x = var;
591 else
592 x = lookup_decl (var, outer);
594 else
596 tree field = (tree) n->value;
597 /* If the receiver record type was remapped in the child function,
598 remap the field into the new record type. */
599 x = maybe_lookup_field (field, outer);
600 if (x != NULL)
601 field = x;
603 x = build_simple_mem_ref (outer->receiver_decl);
604 x = omp_build_component_ref (x, field);
605 if (use_pointer_for_field (var, outer))
606 x = build_simple_mem_ref (x);
609 else if (outer)
611 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
613 outer = outer->outer;
614 gcc_assert (outer
615 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
617 x = lookup_decl (var, outer);
619 else if (omp_is_reference (var))
620 /* This can happen with orphaned constructs. If var is reference, it is
621 possible it is shared and as such valid. */
622 x = var;
623 else if (omp_member_access_dummy_var (var))
624 x = var;
625 else
626 gcc_unreachable ();
628 if (x == var)
630 tree t = omp_member_access_dummy_var (var);
631 if (t)
633 x = DECL_VALUE_EXPR (var);
634 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
635 if (o != t)
636 x = unshare_and_remap (x, t, o);
637 else
638 x = unshare_expr (x);
642 if (omp_is_reference (var))
643 x = build_simple_mem_ref (x);
645 return x;
648 /* Build tree nodes to access the field for VAR on the sender side. */
650 static tree
651 build_sender_ref (splay_tree_key key, omp_context *ctx)
653 tree field = lookup_sfield (key, ctx);
654 return omp_build_component_ref (ctx->sender_decl, field);
657 static tree
658 build_sender_ref (tree var, omp_context *ctx)
660 return build_sender_ref ((splay_tree_key) var, ctx);
663 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
664 BASE_POINTERS_RESTRICT, declare the field with restrict. */
666 static void
667 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
669 tree field, type, sfield = NULL_TREE;
670 splay_tree_key key = (splay_tree_key) var;
672 if ((mask & 8) != 0)
674 key = (splay_tree_key) &DECL_UID (var);
675 gcc_checking_assert (key != (splay_tree_key) var);
677 gcc_assert ((mask & 1) == 0
678 || !splay_tree_lookup (ctx->field_map, key));
679 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
680 || !splay_tree_lookup (ctx->sfield_map, key));
681 gcc_assert ((mask & 3) == 3
682 || !is_gimple_omp_oacc (ctx->stmt));
684 type = TREE_TYPE (var);
685 /* Prevent redeclaring the var in the split-off function with a restrict
686 pointer type. Note that we only clear type itself, restrict qualifiers in
687 the pointed-to type will be ignored by points-to analysis. */
688 if (POINTER_TYPE_P (type)
689 && TYPE_RESTRICT (type))
690 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
692 if (mask & 4)
694 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
695 type = build_pointer_type (build_pointer_type (type));
697 else if (by_ref)
698 type = build_pointer_type (type);
699 else if ((mask & 3) == 1 && omp_is_reference (var))
700 type = TREE_TYPE (type);
702 field = build_decl (DECL_SOURCE_LOCATION (var),
703 FIELD_DECL, DECL_NAME (var), type);
705 /* Remember what variable this field was created for. This does have a
706 side effect of making dwarf2out ignore this member, so for helpful
707 debugging we clear it later in delete_omp_context. */
708 DECL_ABSTRACT_ORIGIN (field) = var;
709 if (type == TREE_TYPE (var))
711 SET_DECL_ALIGN (field, DECL_ALIGN (var));
712 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
713 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
715 else
716 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
718 if ((mask & 3) == 3)
720 insert_field_into_struct (ctx->record_type, field);
721 if (ctx->srecord_type)
723 sfield = build_decl (DECL_SOURCE_LOCATION (var),
724 FIELD_DECL, DECL_NAME (var), type);
725 DECL_ABSTRACT_ORIGIN (sfield) = var;
726 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
727 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
728 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
729 insert_field_into_struct (ctx->srecord_type, sfield);
732 else
734 if (ctx->srecord_type == NULL_TREE)
736 tree t;
738 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
739 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
740 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
742 sfield = build_decl (DECL_SOURCE_LOCATION (t),
743 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
744 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
745 insert_field_into_struct (ctx->srecord_type, sfield);
746 splay_tree_insert (ctx->sfield_map,
747 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
748 (splay_tree_value) sfield);
751 sfield = field;
752 insert_field_into_struct ((mask & 1) ? ctx->record_type
753 : ctx->srecord_type, field);
756 if (mask & 1)
757 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
758 if ((mask & 2) && ctx->sfield_map)
759 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
762 static tree
763 install_var_local (tree var, omp_context *ctx)
765 tree new_var = omp_copy_decl_1 (var, ctx);
766 insert_decl_map (&ctx->cb, var, new_var);
767 return new_var;
770 /* Adjust the replacement for DECL in CTX for the new context. This means
771 copying the DECL_VALUE_EXPR, and fixing up the type. */
773 static void
774 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
776 tree new_decl, size;
778 new_decl = lookup_decl (decl, ctx);
780 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
782 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
783 && DECL_HAS_VALUE_EXPR_P (decl))
785 tree ve = DECL_VALUE_EXPR (decl);
786 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
787 SET_DECL_VALUE_EXPR (new_decl, ve);
788 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
791 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
793 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
794 if (size == error_mark_node)
795 size = TYPE_SIZE (TREE_TYPE (new_decl));
796 DECL_SIZE (new_decl) = size;
798 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
799 if (size == error_mark_node)
800 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
801 DECL_SIZE_UNIT (new_decl) = size;
805 /* The callback for remap_decl. Search all containing contexts for a
806 mapping of the variable; this avoids having to duplicate the splay
807 tree ahead of time. We know a mapping doesn't already exist in the
808 given context. Create new mappings to implement default semantics. */
810 static tree
811 omp_copy_decl (tree var, copy_body_data *cb)
813 omp_context *ctx = (omp_context *) cb;
814 tree new_var;
816 if (TREE_CODE (var) == LABEL_DECL)
818 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
819 return var;
820 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
821 DECL_CONTEXT (new_var) = current_function_decl;
822 insert_decl_map (&ctx->cb, var, new_var);
823 return new_var;
826 while (!is_taskreg_ctx (ctx))
828 ctx = ctx->outer;
829 if (ctx == NULL)
830 return var;
831 new_var = maybe_lookup_decl (var, ctx);
832 if (new_var)
833 return new_var;
836 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
837 return var;
839 return error_mark_node;
842 /* Create a new context, with OUTER_CTX being the surrounding context. */
844 static omp_context *
845 new_omp_context (gimple *stmt, omp_context *outer_ctx)
847 omp_context *ctx = XCNEW (omp_context);
849 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
850 (splay_tree_value) ctx);
851 ctx->stmt = stmt;
853 if (outer_ctx)
855 ctx->outer = outer_ctx;
856 ctx->cb = outer_ctx->cb;
857 ctx->cb.block = NULL;
858 ctx->depth = outer_ctx->depth + 1;
860 else
862 ctx->cb.src_fn = current_function_decl;
863 ctx->cb.dst_fn = current_function_decl;
864 ctx->cb.src_node = cgraph_node::get (current_function_decl);
865 gcc_checking_assert (ctx->cb.src_node);
866 ctx->cb.dst_node = ctx->cb.src_node;
867 ctx->cb.src_cfun = cfun;
868 ctx->cb.copy_decl = omp_copy_decl;
869 ctx->cb.eh_lp_nr = 0;
870 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
871 ctx->depth = 1;
874 ctx->cb.decl_map = new hash_map<tree, tree>;
876 return ctx;
879 static gimple_seq maybe_catch_exception (gimple_seq);
881 /* Finalize task copyfn. */
883 static void
884 finalize_task_copyfn (gomp_task *task_stmt)
886 struct function *child_cfun;
887 tree child_fn;
888 gimple_seq seq = NULL, new_seq;
889 gbind *bind;
891 child_fn = gimple_omp_task_copy_fn (task_stmt);
892 if (child_fn == NULL_TREE)
893 return;
895 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
896 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
898 push_cfun (child_cfun);
899 bind = gimplify_body (child_fn, false);
900 gimple_seq_add_stmt (&seq, bind);
901 new_seq = maybe_catch_exception (seq);
902 if (new_seq != seq)
904 bind = gimple_build_bind (NULL, new_seq, NULL);
905 seq = NULL;
906 gimple_seq_add_stmt (&seq, bind);
908 gimple_set_body (child_fn, seq);
909 pop_cfun ();
911 /* Inform the callgraph about the new function. */
912 cgraph_node *node = cgraph_node::get_create (child_fn);
913 node->parallelized_function = 1;
914 cgraph_node::add_new_function (child_fn, false);
917 /* Destroy a omp_context data structures. Called through the splay tree
918 value delete callback. */
920 static void
921 delete_omp_context (splay_tree_value value)
923 omp_context *ctx = (omp_context *) value;
925 delete ctx->cb.decl_map;
927 if (ctx->field_map)
928 splay_tree_delete (ctx->field_map);
929 if (ctx->sfield_map)
930 splay_tree_delete (ctx->sfield_map);
932 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
933 it produces corrupt debug information. */
934 if (ctx->record_type)
936 tree t;
937 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
938 DECL_ABSTRACT_ORIGIN (t) = NULL;
940 if (ctx->srecord_type)
942 tree t;
943 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
944 DECL_ABSTRACT_ORIGIN (t) = NULL;
947 if (is_task_ctx (ctx))
948 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
950 if (ctx->task_reduction_map)
952 ctx->task_reductions.release ();
953 delete ctx->task_reduction_map;
956 XDELETE (ctx);
959 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
960 context. */
962 static void
963 fixup_child_record_type (omp_context *ctx)
965 tree f, type = ctx->record_type;
967 if (!ctx->receiver_decl)
968 return;
969 /* ??? It isn't sufficient to just call remap_type here, because
970 variably_modified_type_p doesn't work the way we expect for
971 record types. Testing each field for whether it needs remapping
972 and creating a new record by hand works, however. */
973 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
974 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
975 break;
976 if (f)
978 tree name, new_fields = NULL;
980 type = lang_hooks.types.make_type (RECORD_TYPE);
981 name = DECL_NAME (TYPE_NAME (ctx->record_type));
982 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
983 TYPE_DECL, name, type);
984 TYPE_NAME (type) = name;
986 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
988 tree new_f = copy_node (f);
989 DECL_CONTEXT (new_f) = type;
990 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
991 DECL_CHAIN (new_f) = new_fields;
992 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
993 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
994 &ctx->cb, NULL);
995 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
996 &ctx->cb, NULL);
997 new_fields = new_f;
999 /* Arrange to be able to look up the receiver field
1000 given the sender field. */
1001 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1002 (splay_tree_value) new_f);
1004 TYPE_FIELDS (type) = nreverse (new_fields);
1005 layout_type (type);
1008 /* In a target region we never modify any of the pointers in *.omp_data_i,
1009 so attempt to help the optimizers. */
1010 if (is_gimple_omp_offloaded (ctx->stmt))
1011 type = build_qualified_type (type, TYPE_QUAL_CONST);
1013 TREE_TYPE (ctx->receiver_decl)
1014 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1017 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1018 specified by CLAUSES. */
1020 static void
1021 scan_sharing_clauses (tree clauses, omp_context *ctx)
1023 tree c, decl;
1024 bool scan_array_reductions = false;
1026 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1028 bool by_ref;
1030 switch (OMP_CLAUSE_CODE (c))
1032 case OMP_CLAUSE_PRIVATE:
1033 decl = OMP_CLAUSE_DECL (c);
1034 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1035 goto do_private;
1036 else if (!is_variable_sized (decl))
1037 install_var_local (decl, ctx);
1038 break;
1040 case OMP_CLAUSE_SHARED:
1041 decl = OMP_CLAUSE_DECL (c);
1042 /* Ignore shared directives in teams construct inside of
1043 target construct. */
1044 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1045 && !is_host_teams_ctx (ctx))
1047 /* Global variables don't need to be copied,
1048 the receiver side will use them directly. */
1049 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1050 if (is_global_var (odecl))
1051 break;
1052 insert_decl_map (&ctx->cb, decl, odecl);
1053 break;
1055 gcc_assert (is_taskreg_ctx (ctx));
1056 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1057 || !is_variable_sized (decl));
1058 /* Global variables don't need to be copied,
1059 the receiver side will use them directly. */
1060 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1061 break;
1062 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1064 use_pointer_for_field (decl, ctx);
1065 break;
1067 by_ref = use_pointer_for_field (decl, NULL);
1068 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1069 || TREE_ADDRESSABLE (decl)
1070 || by_ref
1071 || omp_is_reference (decl))
1073 by_ref = use_pointer_for_field (decl, ctx);
1074 install_var_field (decl, by_ref, 3, ctx);
1075 install_var_local (decl, ctx);
1076 break;
1078 /* We don't need to copy const scalar vars back. */
1079 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1080 goto do_private;
1082 case OMP_CLAUSE_REDUCTION:
1083 case OMP_CLAUSE_IN_REDUCTION:
1084 decl = OMP_CLAUSE_DECL (c);
1085 if (TREE_CODE (decl) == MEM_REF)
1087 tree t = TREE_OPERAND (decl, 0);
1088 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1089 t = TREE_OPERAND (t, 0);
1090 if (TREE_CODE (t) == INDIRECT_REF
1091 || TREE_CODE (t) == ADDR_EXPR)
1092 t = TREE_OPERAND (t, 0);
1093 install_var_local (t, ctx);
1094 if (is_taskreg_ctx (ctx)
1095 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1096 || (is_task_ctx (ctx)
1097 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1098 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1099 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1100 == POINTER_TYPE)))))
1101 && !is_variable_sized (t)
1102 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1103 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1104 && !is_task_ctx (ctx))))
1106 by_ref = use_pointer_for_field (t, NULL);
1107 if (is_task_ctx (ctx)
1108 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1109 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1111 install_var_field (t, false, 1, ctx);
1112 install_var_field (t, by_ref, 2, ctx);
1114 else
1115 install_var_field (t, by_ref, 3, ctx);
1117 break;
1119 if (is_task_ctx (ctx)
1120 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1121 && OMP_CLAUSE_REDUCTION_TASK (c)
1122 && is_parallel_ctx (ctx)))
1124 /* Global variables don't need to be copied,
1125 the receiver side will use them directly. */
1126 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1128 by_ref = use_pointer_for_field (decl, ctx);
1129 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1130 install_var_field (decl, by_ref, 3, ctx);
1132 install_var_local (decl, ctx);
1133 break;
1135 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1136 && OMP_CLAUSE_REDUCTION_TASK (c))
1138 install_var_local (decl, ctx);
1139 break;
1141 goto do_private;
1143 case OMP_CLAUSE_LASTPRIVATE:
1144 /* Let the corresponding firstprivate clause create
1145 the variable. */
1146 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1147 break;
1148 /* FALLTHRU */
1150 case OMP_CLAUSE_FIRSTPRIVATE:
1151 case OMP_CLAUSE_LINEAR:
1152 decl = OMP_CLAUSE_DECL (c);
1153 do_private:
1154 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1155 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1156 && is_gimple_omp_offloaded (ctx->stmt))
1158 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1159 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1160 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1161 install_var_field (decl, true, 3, ctx);
1162 else
1163 install_var_field (decl, false, 3, ctx);
1165 if (is_variable_sized (decl))
1167 if (is_task_ctx (ctx))
1168 install_var_field (decl, false, 1, ctx);
1169 break;
1171 else if (is_taskreg_ctx (ctx))
1173 bool global
1174 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1175 by_ref = use_pointer_for_field (decl, NULL);
1177 if (is_task_ctx (ctx)
1178 && (global || by_ref || omp_is_reference (decl)))
1180 install_var_field (decl, false, 1, ctx);
1181 if (!global)
1182 install_var_field (decl, by_ref, 2, ctx);
1184 else if (!global)
1185 install_var_field (decl, by_ref, 3, ctx);
1187 install_var_local (decl, ctx);
1188 break;
1190 case OMP_CLAUSE_USE_DEVICE_PTR:
1191 decl = OMP_CLAUSE_DECL (c);
1192 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1193 install_var_field (decl, true, 3, ctx);
1194 else
1195 install_var_field (decl, false, 3, ctx);
1196 if (DECL_SIZE (decl)
1197 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1199 tree decl2 = DECL_VALUE_EXPR (decl);
1200 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1201 decl2 = TREE_OPERAND (decl2, 0);
1202 gcc_assert (DECL_P (decl2));
1203 install_var_local (decl2, ctx);
1205 install_var_local (decl, ctx);
1206 break;
1208 case OMP_CLAUSE_IS_DEVICE_PTR:
1209 decl = OMP_CLAUSE_DECL (c);
1210 goto do_private;
1212 case OMP_CLAUSE__LOOPTEMP_:
1213 case OMP_CLAUSE__REDUCTEMP_:
1214 gcc_assert (is_taskreg_ctx (ctx));
1215 decl = OMP_CLAUSE_DECL (c);
1216 install_var_field (decl, false, 3, ctx);
1217 install_var_local (decl, ctx);
1218 break;
1220 case OMP_CLAUSE_COPYPRIVATE:
1221 case OMP_CLAUSE_COPYIN:
1222 decl = OMP_CLAUSE_DECL (c);
1223 by_ref = use_pointer_for_field (decl, NULL);
1224 install_var_field (decl, by_ref, 3, ctx);
1225 break;
1227 case OMP_CLAUSE_FINAL:
1228 case OMP_CLAUSE_IF:
1229 case OMP_CLAUSE_NUM_THREADS:
1230 case OMP_CLAUSE_NUM_TEAMS:
1231 case OMP_CLAUSE_THREAD_LIMIT:
1232 case OMP_CLAUSE_DEVICE:
1233 case OMP_CLAUSE_SCHEDULE:
1234 case OMP_CLAUSE_DIST_SCHEDULE:
1235 case OMP_CLAUSE_DEPEND:
1236 case OMP_CLAUSE_PRIORITY:
1237 case OMP_CLAUSE_GRAINSIZE:
1238 case OMP_CLAUSE_NUM_TASKS:
1239 case OMP_CLAUSE_NUM_GANGS:
1240 case OMP_CLAUSE_NUM_WORKERS:
1241 case OMP_CLAUSE_VECTOR_LENGTH:
1242 if (ctx->outer)
1243 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1244 break;
1246 case OMP_CLAUSE_TO:
1247 case OMP_CLAUSE_FROM:
1248 case OMP_CLAUSE_MAP:
1249 if (ctx->outer)
1250 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1251 decl = OMP_CLAUSE_DECL (c);
1252 /* Global variables with "omp declare target" attribute
1253 don't need to be copied, the receiver side will use them
1254 directly. However, global variables with "omp declare target link"
1255 attribute need to be copied. Or when ALWAYS modifier is used. */
1256 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1257 && DECL_P (decl)
1258 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1259 && (OMP_CLAUSE_MAP_KIND (c)
1260 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1261 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1262 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1263 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1264 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1265 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1266 && varpool_node::get_create (decl)->offloadable
1267 && !lookup_attribute ("omp declare target link",
1268 DECL_ATTRIBUTES (decl)))
1269 break;
1270 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1271 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1273 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1274 not offloaded; there is nothing to map for those. */
1275 if (!is_gimple_omp_offloaded (ctx->stmt)
1276 && !POINTER_TYPE_P (TREE_TYPE (decl))
1277 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1278 break;
1280 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1281 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1282 || (OMP_CLAUSE_MAP_KIND (c)
1283 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1285 if (TREE_CODE (decl) == COMPONENT_REF
1286 || (TREE_CODE (decl) == INDIRECT_REF
1287 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1288 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1289 == REFERENCE_TYPE)))
1290 break;
1291 if (DECL_SIZE (decl)
1292 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1294 tree decl2 = DECL_VALUE_EXPR (decl);
1295 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1296 decl2 = TREE_OPERAND (decl2, 0);
1297 gcc_assert (DECL_P (decl2));
1298 install_var_local (decl2, ctx);
1300 install_var_local (decl, ctx);
1301 break;
1303 if (DECL_P (decl))
1305 if (DECL_SIZE (decl)
1306 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1308 tree decl2 = DECL_VALUE_EXPR (decl);
1309 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1310 decl2 = TREE_OPERAND (decl2, 0);
1311 gcc_assert (DECL_P (decl2));
1312 install_var_field (decl2, true, 3, ctx);
1313 install_var_local (decl2, ctx);
1314 install_var_local (decl, ctx);
1316 else
1318 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1319 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1320 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1321 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1322 install_var_field (decl, true, 7, ctx);
1323 else
1324 install_var_field (decl, true, 3, ctx);
1325 if (is_gimple_omp_offloaded (ctx->stmt)
1326 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1327 install_var_local (decl, ctx);
1330 else
1332 tree base = get_base_address (decl);
1333 tree nc = OMP_CLAUSE_CHAIN (c);
1334 if (DECL_P (base)
1335 && nc != NULL_TREE
1336 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1337 && OMP_CLAUSE_DECL (nc) == base
1338 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1339 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1341 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1342 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1344 else
1346 if (ctx->outer)
1348 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1349 decl = OMP_CLAUSE_DECL (c);
1351 gcc_assert (!splay_tree_lookup (ctx->field_map,
1352 (splay_tree_key) decl));
1353 tree field
1354 = build_decl (OMP_CLAUSE_LOCATION (c),
1355 FIELD_DECL, NULL_TREE, ptr_type_node);
1356 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1357 insert_field_into_struct (ctx->record_type, field);
1358 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1359 (splay_tree_value) field);
1362 break;
1364 case OMP_CLAUSE__GRIDDIM_:
1365 if (ctx->outer)
1367 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1368 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1370 break;
1372 case OMP_CLAUSE_NOWAIT:
1373 case OMP_CLAUSE_ORDERED:
1374 case OMP_CLAUSE_COLLAPSE:
1375 case OMP_CLAUSE_UNTIED:
1376 case OMP_CLAUSE_MERGEABLE:
1377 case OMP_CLAUSE_PROC_BIND:
1378 case OMP_CLAUSE_SAFELEN:
1379 case OMP_CLAUSE_SIMDLEN:
1380 case OMP_CLAUSE_THREADS:
1381 case OMP_CLAUSE_SIMD:
1382 case OMP_CLAUSE_NOGROUP:
1383 case OMP_CLAUSE_DEFAULTMAP:
1384 case OMP_CLAUSE_ASYNC:
1385 case OMP_CLAUSE_WAIT:
1386 case OMP_CLAUSE_GANG:
1387 case OMP_CLAUSE_WORKER:
1388 case OMP_CLAUSE_VECTOR:
1389 case OMP_CLAUSE_INDEPENDENT:
1390 case OMP_CLAUSE_AUTO:
1391 case OMP_CLAUSE_SEQ:
1392 case OMP_CLAUSE_TILE:
1393 case OMP_CLAUSE__SIMT_:
1394 case OMP_CLAUSE_DEFAULT:
1395 case OMP_CLAUSE_NONTEMPORAL:
1396 case OMP_CLAUSE_IF_PRESENT:
1397 case OMP_CLAUSE_FINALIZE:
1398 case OMP_CLAUSE_TASK_REDUCTION:
1399 break;
1401 case OMP_CLAUSE_ALIGNED:
1402 decl = OMP_CLAUSE_DECL (c);
1403 if (is_global_var (decl)
1404 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1405 install_var_local (decl, ctx);
1406 break;
1408 case OMP_CLAUSE__CACHE_:
1409 default:
1410 gcc_unreachable ();
1414 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1416 switch (OMP_CLAUSE_CODE (c))
1418 case OMP_CLAUSE_LASTPRIVATE:
1419 /* Let the corresponding firstprivate clause create
1420 the variable. */
1421 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1422 scan_array_reductions = true;
1423 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1424 break;
1425 /* FALLTHRU */
1427 case OMP_CLAUSE_FIRSTPRIVATE:
1428 case OMP_CLAUSE_PRIVATE:
1429 case OMP_CLAUSE_LINEAR:
1430 case OMP_CLAUSE_IS_DEVICE_PTR:
1431 decl = OMP_CLAUSE_DECL (c);
1432 if (is_variable_sized (decl))
1434 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1435 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1436 && is_gimple_omp_offloaded (ctx->stmt))
1438 tree decl2 = DECL_VALUE_EXPR (decl);
1439 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1440 decl2 = TREE_OPERAND (decl2, 0);
1441 gcc_assert (DECL_P (decl2));
1442 install_var_local (decl2, ctx);
1443 fixup_remapped_decl (decl2, ctx, false);
1445 install_var_local (decl, ctx);
1447 fixup_remapped_decl (decl, ctx,
1448 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1449 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1450 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1451 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1452 scan_array_reductions = true;
1453 break;
1455 case OMP_CLAUSE_REDUCTION:
1456 case OMP_CLAUSE_IN_REDUCTION:
1457 decl = OMP_CLAUSE_DECL (c);
1458 if (TREE_CODE (decl) != MEM_REF)
1460 if (is_variable_sized (decl))
1461 install_var_local (decl, ctx);
1462 fixup_remapped_decl (decl, ctx, false);
1464 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1465 scan_array_reductions = true;
1466 break;
1468 case OMP_CLAUSE_TASK_REDUCTION:
1469 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1470 scan_array_reductions = true;
1471 break;
1473 case OMP_CLAUSE_SHARED:
1474 /* Ignore shared directives in teams construct inside of
1475 target construct. */
1476 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1477 && !is_host_teams_ctx (ctx))
1478 break;
1479 decl = OMP_CLAUSE_DECL (c);
1480 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1481 break;
1482 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1484 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1485 ctx->outer)))
1486 break;
1487 bool by_ref = use_pointer_for_field (decl, ctx);
1488 install_var_field (decl, by_ref, 11, ctx);
1489 break;
1491 fixup_remapped_decl (decl, ctx, false);
1492 break;
1494 case OMP_CLAUSE_MAP:
1495 if (!is_gimple_omp_offloaded (ctx->stmt))
1496 break;
1497 decl = OMP_CLAUSE_DECL (c);
1498 if (DECL_P (decl)
1499 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1500 && (OMP_CLAUSE_MAP_KIND (c)
1501 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1502 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1503 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1504 && varpool_node::get_create (decl)->offloadable)
1505 break;
1506 if (DECL_P (decl))
1508 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1509 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1510 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1511 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1513 tree new_decl = lookup_decl (decl, ctx);
1514 TREE_TYPE (new_decl)
1515 = remap_type (TREE_TYPE (decl), &ctx->cb);
1517 else if (DECL_SIZE (decl)
1518 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1520 tree decl2 = DECL_VALUE_EXPR (decl);
1521 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1522 decl2 = TREE_OPERAND (decl2, 0);
1523 gcc_assert (DECL_P (decl2));
1524 fixup_remapped_decl (decl2, ctx, false);
1525 fixup_remapped_decl (decl, ctx, true);
1527 else
1528 fixup_remapped_decl (decl, ctx, false);
1530 break;
1532 case OMP_CLAUSE_COPYPRIVATE:
1533 case OMP_CLAUSE_COPYIN:
1534 case OMP_CLAUSE_DEFAULT:
1535 case OMP_CLAUSE_IF:
1536 case OMP_CLAUSE_NUM_THREADS:
1537 case OMP_CLAUSE_NUM_TEAMS:
1538 case OMP_CLAUSE_THREAD_LIMIT:
1539 case OMP_CLAUSE_DEVICE:
1540 case OMP_CLAUSE_SCHEDULE:
1541 case OMP_CLAUSE_DIST_SCHEDULE:
1542 case OMP_CLAUSE_NOWAIT:
1543 case OMP_CLAUSE_ORDERED:
1544 case OMP_CLAUSE_COLLAPSE:
1545 case OMP_CLAUSE_UNTIED:
1546 case OMP_CLAUSE_FINAL:
1547 case OMP_CLAUSE_MERGEABLE:
1548 case OMP_CLAUSE_PROC_BIND:
1549 case OMP_CLAUSE_SAFELEN:
1550 case OMP_CLAUSE_SIMDLEN:
1551 case OMP_CLAUSE_ALIGNED:
1552 case OMP_CLAUSE_DEPEND:
1553 case OMP_CLAUSE__LOOPTEMP_:
1554 case OMP_CLAUSE__REDUCTEMP_:
1555 case OMP_CLAUSE_TO:
1556 case OMP_CLAUSE_FROM:
1557 case OMP_CLAUSE_PRIORITY:
1558 case OMP_CLAUSE_GRAINSIZE:
1559 case OMP_CLAUSE_NUM_TASKS:
1560 case OMP_CLAUSE_THREADS:
1561 case OMP_CLAUSE_SIMD:
1562 case OMP_CLAUSE_NOGROUP:
1563 case OMP_CLAUSE_DEFAULTMAP:
1564 case OMP_CLAUSE_USE_DEVICE_PTR:
1565 case OMP_CLAUSE_NONTEMPORAL:
1566 case OMP_CLAUSE_ASYNC:
1567 case OMP_CLAUSE_WAIT:
1568 case OMP_CLAUSE_NUM_GANGS:
1569 case OMP_CLAUSE_NUM_WORKERS:
1570 case OMP_CLAUSE_VECTOR_LENGTH:
1571 case OMP_CLAUSE_GANG:
1572 case OMP_CLAUSE_WORKER:
1573 case OMP_CLAUSE_VECTOR:
1574 case OMP_CLAUSE_INDEPENDENT:
1575 case OMP_CLAUSE_AUTO:
1576 case OMP_CLAUSE_SEQ:
1577 case OMP_CLAUSE_TILE:
1578 case OMP_CLAUSE__GRIDDIM_:
1579 case OMP_CLAUSE__SIMT_:
1580 case OMP_CLAUSE_IF_PRESENT:
1581 case OMP_CLAUSE_FINALIZE:
1582 break;
1584 case OMP_CLAUSE__CACHE_:
1585 default:
1586 gcc_unreachable ();
1590 gcc_checking_assert (!scan_array_reductions
1591 || !is_gimple_omp_oacc (ctx->stmt));
1592 if (scan_array_reductions)
1594 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1595 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1596 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1597 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1598 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1600 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1601 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1603 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1604 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1605 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1606 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1607 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1608 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1612 /* Create a new name for omp child function. Returns an identifier. */
1614 static tree
1615 create_omp_child_function_name (bool task_copy)
1617 return clone_function_name_numbered (current_function_decl,
1618 task_copy ? "_omp_cpyfn" : "_omp_fn");
1621 /* Return true if CTX may belong to offloaded code: either if current function
1622 is offloaded, or any enclosing context corresponds to a target region. */
1624 static bool
1625 omp_maybe_offloaded_ctx (omp_context *ctx)
1627 if (cgraph_node::get (current_function_decl)->offloadable)
1628 return true;
1629 for (; ctx; ctx = ctx->outer)
1630 if (is_gimple_omp_offloaded (ctx->stmt))
1631 return true;
1632 return false;
1635 /* Build a decl for the omp child function. It'll not contain a body
1636 yet, just the bare decl. */
1638 static void
1639 create_omp_child_function (omp_context *ctx, bool task_copy)
1641 tree decl, type, name, t;
1643 name = create_omp_child_function_name (task_copy);
1644 if (task_copy)
1645 type = build_function_type_list (void_type_node, ptr_type_node,
1646 ptr_type_node, NULL_TREE);
1647 else
1648 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1650 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1652 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1653 || !task_copy);
1654 if (!task_copy)
1655 ctx->cb.dst_fn = decl;
1656 else
1657 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1659 TREE_STATIC (decl) = 1;
1660 TREE_USED (decl) = 1;
1661 DECL_ARTIFICIAL (decl) = 1;
1662 DECL_IGNORED_P (decl) = 0;
1663 TREE_PUBLIC (decl) = 0;
1664 DECL_UNINLINABLE (decl) = 1;
1665 DECL_EXTERNAL (decl) = 0;
1666 DECL_CONTEXT (decl) = NULL_TREE;
1667 DECL_INITIAL (decl) = make_node (BLOCK);
1668 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1669 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1670 /* Remove omp declare simd attribute from the new attributes. */
1671 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1673 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1674 a = a2;
1675 a = TREE_CHAIN (a);
1676 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1677 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1678 *p = TREE_CHAIN (*p);
1679 else
1681 tree chain = TREE_CHAIN (*p);
1682 *p = copy_node (*p);
1683 p = &TREE_CHAIN (*p);
1684 *p = chain;
1687 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1688 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1689 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1690 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1691 DECL_FUNCTION_VERSIONED (decl)
1692 = DECL_FUNCTION_VERSIONED (current_function_decl);
1694 if (omp_maybe_offloaded_ctx (ctx))
1696 cgraph_node::get_create (decl)->offloadable = 1;
1697 if (ENABLE_OFFLOADING)
1698 g->have_offload = true;
1701 if (cgraph_node::get_create (decl)->offloadable
1702 && !lookup_attribute ("omp declare target",
1703 DECL_ATTRIBUTES (current_function_decl)))
1705 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1706 ? "omp target entrypoint"
1707 : "omp declare target");
1708 DECL_ATTRIBUTES (decl)
1709 = tree_cons (get_identifier (target_attr),
1710 NULL_TREE, DECL_ATTRIBUTES (decl));
1713 t = build_decl (DECL_SOURCE_LOCATION (decl),
1714 RESULT_DECL, NULL_TREE, void_type_node);
1715 DECL_ARTIFICIAL (t) = 1;
1716 DECL_IGNORED_P (t) = 1;
1717 DECL_CONTEXT (t) = decl;
1718 DECL_RESULT (decl) = t;
1720 tree data_name = get_identifier (".omp_data_i");
1721 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1722 ptr_type_node);
1723 DECL_ARTIFICIAL (t) = 1;
1724 DECL_NAMELESS (t) = 1;
1725 DECL_ARG_TYPE (t) = ptr_type_node;
1726 DECL_CONTEXT (t) = current_function_decl;
1727 TREE_USED (t) = 1;
1728 TREE_READONLY (t) = 1;
1729 DECL_ARGUMENTS (decl) = t;
1730 if (!task_copy)
1731 ctx->receiver_decl = t;
1732 else
1734 t = build_decl (DECL_SOURCE_LOCATION (decl),
1735 PARM_DECL, get_identifier (".omp_data_o"),
1736 ptr_type_node);
1737 DECL_ARTIFICIAL (t) = 1;
1738 DECL_NAMELESS (t) = 1;
1739 DECL_ARG_TYPE (t) = ptr_type_node;
1740 DECL_CONTEXT (t) = current_function_decl;
1741 TREE_USED (t) = 1;
1742 TREE_ADDRESSABLE (t) = 1;
1743 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1744 DECL_ARGUMENTS (decl) = t;
1747 /* Allocate memory for the function structure. The call to
1748 allocate_struct_function clobbers CFUN, so we need to restore
1749 it afterward. */
1750 push_struct_function (decl);
1751 cfun->function_end_locus = gimple_location (ctx->stmt);
1752 init_tree_ssa (cfun);
1753 pop_cfun ();
1756 /* Callback for walk_gimple_seq. Check if combined parallel
1757 contains gimple_omp_for_combined_into_p OMP_FOR. */
1759 tree
1760 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1761 bool *handled_ops_p,
1762 struct walk_stmt_info *wi)
1764 gimple *stmt = gsi_stmt (*gsi_p);
1766 *handled_ops_p = true;
1767 switch (gimple_code (stmt))
1769 WALK_SUBSTMTS;
1771 case GIMPLE_OMP_FOR:
1772 if (gimple_omp_for_combined_into_p (stmt)
1773 && gimple_omp_for_kind (stmt)
1774 == *(const enum gf_mask *) (wi->info))
1776 wi->info = stmt;
1777 return integer_zero_node;
1779 break;
1780 default:
1781 break;
1783 return NULL;
1786 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1788 static void
1789 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1790 omp_context *outer_ctx)
1792 struct walk_stmt_info wi;
1794 memset (&wi, 0, sizeof (wi));
1795 wi.val_only = true;
1796 wi.info = (void *) &msk;
1797 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1798 if (wi.info != (void *) &msk)
1800 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1801 struct omp_for_data fd;
1802 omp_extract_for_data (for_stmt, &fd, NULL);
1803 /* We need two temporaries with fd.loop.v type (istart/iend)
1804 and then (fd.collapse - 1) temporaries with the same
1805 type for count2 ... countN-1 vars if not constant. */
1806 size_t count = 2, i;
1807 tree type = fd.iter_type;
1808 if (fd.collapse > 1
1809 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1811 count += fd.collapse - 1;
1812 /* If there are lastprivate clauses on the inner
1813 GIMPLE_OMP_FOR, add one more temporaries for the total number
1814 of iterations (product of count1 ... countN-1). */
1815 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1816 OMP_CLAUSE_LASTPRIVATE))
1817 count++;
1818 else if (msk == GF_OMP_FOR_KIND_FOR
1819 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1820 OMP_CLAUSE_LASTPRIVATE))
1821 count++;
1823 for (i = 0; i < count; i++)
1825 tree temp = create_tmp_var (type);
1826 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1827 insert_decl_map (&outer_ctx->cb, temp, temp);
1828 OMP_CLAUSE_DECL (c) = temp;
1829 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1830 gimple_omp_taskreg_set_clauses (stmt, c);
1833 if (msk == GF_OMP_FOR_KIND_TASKLOOP
1834 && omp_find_clause (gimple_omp_task_clauses (stmt),
1835 OMP_CLAUSE_REDUCTION))
1837 tree type = build_pointer_type (pointer_sized_int_node);
1838 tree temp = create_tmp_var (type);
1839 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1840 insert_decl_map (&outer_ctx->cb, temp, temp);
1841 OMP_CLAUSE_DECL (c) = temp;
1842 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
1843 gimple_omp_task_set_clauses (stmt, c);
1847 /* Scan an OpenMP parallel directive. */
1849 static void
1850 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1852 omp_context *ctx;
1853 tree name;
1854 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1856 /* Ignore parallel directives with empty bodies, unless there
1857 are copyin clauses. */
1858 if (optimize > 0
1859 && empty_body_p (gimple_omp_body (stmt))
1860 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1861 OMP_CLAUSE_COPYIN) == NULL)
1863 gsi_replace (gsi, gimple_build_nop (), false);
1864 return;
1867 if (gimple_omp_parallel_combined_p (stmt))
1868 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1869 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
1870 OMP_CLAUSE_REDUCTION);
1871 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
1872 if (OMP_CLAUSE_REDUCTION_TASK (c))
1874 tree type = build_pointer_type (pointer_sized_int_node);
1875 tree temp = create_tmp_var (type);
1876 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1877 if (outer_ctx)
1878 insert_decl_map (&outer_ctx->cb, temp, temp);
1879 OMP_CLAUSE_DECL (c) = temp;
1880 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1881 gimple_omp_parallel_set_clauses (stmt, c);
1882 break;
1884 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
1885 break;
1887 ctx = new_omp_context (stmt, outer_ctx);
1888 taskreg_contexts.safe_push (ctx);
1889 if (taskreg_nesting_level > 1)
1890 ctx->is_nested = true;
1891 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1892 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1893 name = create_tmp_var_name (".omp_data_s");
1894 name = build_decl (gimple_location (stmt),
1895 TYPE_DECL, name, ctx->record_type);
1896 DECL_ARTIFICIAL (name) = 1;
1897 DECL_NAMELESS (name) = 1;
1898 TYPE_NAME (ctx->record_type) = name;
1899 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1900 if (!gimple_omp_parallel_grid_phony (stmt))
1902 create_omp_child_function (ctx, false);
1903 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1906 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1907 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1909 if (TYPE_FIELDS (ctx->record_type) == NULL)
1910 ctx->record_type = ctx->receiver_decl = NULL;
1913 /* Scan an OpenMP task directive. */
1915 static void
1916 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1918 omp_context *ctx;
1919 tree name, t;
1920 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1922 /* Ignore task directives with empty bodies, unless they have depend
1923 clause. */
1924 if (optimize > 0
1925 && gimple_omp_body (stmt)
1926 && empty_body_p (gimple_omp_body (stmt))
1927 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1929 gsi_replace (gsi, gimple_build_nop (), false);
1930 return;
1933 if (gimple_omp_task_taskloop_p (stmt))
1934 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1936 ctx = new_omp_context (stmt, outer_ctx);
1938 if (gimple_omp_task_taskwait_p (stmt))
1940 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1941 return;
1944 taskreg_contexts.safe_push (ctx);
1945 if (taskreg_nesting_level > 1)
1946 ctx->is_nested = true;
1947 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1948 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1949 name = create_tmp_var_name (".omp_data_s");
1950 name = build_decl (gimple_location (stmt),
1951 TYPE_DECL, name, ctx->record_type);
1952 DECL_ARTIFICIAL (name) = 1;
1953 DECL_NAMELESS (name) = 1;
1954 TYPE_NAME (ctx->record_type) = name;
1955 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1956 create_omp_child_function (ctx, false);
1957 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1959 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1961 if (ctx->srecord_type)
1963 name = create_tmp_var_name (".omp_data_a");
1964 name = build_decl (gimple_location (stmt),
1965 TYPE_DECL, name, ctx->srecord_type);
1966 DECL_ARTIFICIAL (name) = 1;
1967 DECL_NAMELESS (name) = 1;
1968 TYPE_NAME (ctx->srecord_type) = name;
1969 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1970 create_omp_child_function (ctx, true);
1973 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1975 if (TYPE_FIELDS (ctx->record_type) == NULL)
1977 ctx->record_type = ctx->receiver_decl = NULL;
1978 t = build_int_cst (long_integer_type_node, 0);
1979 gimple_omp_task_set_arg_size (stmt, t);
1980 t = build_int_cst (long_integer_type_node, 1);
1981 gimple_omp_task_set_arg_align (stmt, t);
1985 /* Helper function for finish_taskreg_scan, called through walk_tree.
1986 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1987 tree, replace it in the expression. */
1989 static tree
1990 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1992 if (VAR_P (*tp))
1994 omp_context *ctx = (omp_context *) data;
1995 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1996 if (t != *tp)
1998 if (DECL_HAS_VALUE_EXPR_P (t))
1999 t = unshare_expr (DECL_VALUE_EXPR (t));
2000 *tp = t;
2002 *walk_subtrees = 0;
2004 else if (IS_TYPE_OR_DECL_P (*tp))
2005 *walk_subtrees = 0;
2006 return NULL_TREE;
2009 /* If any decls have been made addressable during scan_omp,
2010 adjust their fields if needed, and layout record types
2011 of parallel/task constructs. */
2013 static void
2014 finish_taskreg_scan (omp_context *ctx)
2016 if (ctx->record_type == NULL_TREE)
2017 return;
2019 /* If any task_shared_vars were needed, verify all
2020 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2021 statements if use_pointer_for_field hasn't changed
2022 because of that. If it did, update field types now. */
2023 if (task_shared_vars)
2025 tree c;
2027 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2028 c; c = OMP_CLAUSE_CHAIN (c))
2029 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2030 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2032 tree decl = OMP_CLAUSE_DECL (c);
2034 /* Global variables don't need to be copied,
2035 the receiver side will use them directly. */
2036 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2037 continue;
2038 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2039 || !use_pointer_for_field (decl, ctx))
2040 continue;
2041 tree field = lookup_field (decl, ctx);
2042 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2043 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2044 continue;
2045 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2046 TREE_THIS_VOLATILE (field) = 0;
2047 DECL_USER_ALIGN (field) = 0;
2048 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2049 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2050 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2051 if (ctx->srecord_type)
2053 tree sfield = lookup_sfield (decl, ctx);
2054 TREE_TYPE (sfield) = TREE_TYPE (field);
2055 TREE_THIS_VOLATILE (sfield) = 0;
2056 DECL_USER_ALIGN (sfield) = 0;
2057 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2058 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2059 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2064 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2066 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2067 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2068 if (c)
2070 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2071 expects to find it at the start of data. */
2072 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2073 tree *p = &TYPE_FIELDS (ctx->record_type);
2074 while (*p)
2075 if (*p == f)
2077 *p = DECL_CHAIN (*p);
2078 break;
2080 else
2081 p = &DECL_CHAIN (*p);
2082 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2083 TYPE_FIELDS (ctx->record_type) = f;
2085 layout_type (ctx->record_type);
2086 fixup_child_record_type (ctx);
2088 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2090 layout_type (ctx->record_type);
2091 fixup_child_record_type (ctx);
2093 else
2095 location_t loc = gimple_location (ctx->stmt);
2096 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2097 /* Move VLA fields to the end. */
2098 p = &TYPE_FIELDS (ctx->record_type);
2099 while (*p)
2100 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2101 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2103 *q = *p;
2104 *p = TREE_CHAIN (*p);
2105 TREE_CHAIN (*q) = NULL_TREE;
2106 q = &TREE_CHAIN (*q);
2108 else
2109 p = &DECL_CHAIN (*p);
2110 *p = vla_fields;
2111 if (gimple_omp_task_taskloop_p (ctx->stmt))
2113 /* Move fields corresponding to first and second _looptemp_
2114 clause first. There are filled by GOMP_taskloop
2115 and thus need to be in specific positions. */
2116 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2117 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2118 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2119 OMP_CLAUSE__LOOPTEMP_);
2120 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2121 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2122 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2123 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2124 p = &TYPE_FIELDS (ctx->record_type);
2125 while (*p)
2126 if (*p == f1 || *p == f2 || *p == f3)
2127 *p = DECL_CHAIN (*p);
2128 else
2129 p = &DECL_CHAIN (*p);
2130 DECL_CHAIN (f1) = f2;
2131 if (c3)
2133 DECL_CHAIN (f2) = f3;
2134 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2136 else
2137 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2138 TYPE_FIELDS (ctx->record_type) = f1;
2139 if (ctx->srecord_type)
2141 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2142 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2143 if (c3)
2144 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2145 p = &TYPE_FIELDS (ctx->srecord_type);
2146 while (*p)
2147 if (*p == f1 || *p == f2 || *p == f3)
2148 *p = DECL_CHAIN (*p);
2149 else
2150 p = &DECL_CHAIN (*p);
2151 DECL_CHAIN (f1) = f2;
2152 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2153 if (c3)
2155 DECL_CHAIN (f2) = f3;
2156 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2158 else
2159 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2160 TYPE_FIELDS (ctx->srecord_type) = f1;
2163 layout_type (ctx->record_type);
2164 fixup_child_record_type (ctx);
2165 if (ctx->srecord_type)
2166 layout_type (ctx->srecord_type);
2167 tree t = fold_convert_loc (loc, long_integer_type_node,
2168 TYPE_SIZE_UNIT (ctx->record_type));
2169 if (TREE_CODE (t) != INTEGER_CST)
2171 t = unshare_expr (t);
2172 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2174 gimple_omp_task_set_arg_size (ctx->stmt, t);
2175 t = build_int_cst (long_integer_type_node,
2176 TYPE_ALIGN_UNIT (ctx->record_type));
2177 gimple_omp_task_set_arg_align (ctx->stmt, t);
2181 /* Find the enclosing offload context. */
2183 static omp_context *
2184 enclosing_target_ctx (omp_context *ctx)
2186 for (; ctx; ctx = ctx->outer)
2187 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2188 break;
2190 return ctx;
2193 /* Return true if ctx is part of an oacc kernels region. */
2195 static bool
2196 ctx_in_oacc_kernels_region (omp_context *ctx)
2198 for (;ctx != NULL; ctx = ctx->outer)
2200 gimple *stmt = ctx->stmt;
2201 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2202 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2203 return true;
2206 return false;
2209 /* Check the parallelism clauses inside a kernels regions.
2210 Until kernels handling moves to use the same loop indirection
2211 scheme as parallel, we need to do this checking early. */
2213 static unsigned
2214 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2216 bool checking = true;
2217 unsigned outer_mask = 0;
2218 unsigned this_mask = 0;
2219 bool has_seq = false, has_auto = false;
2221 if (ctx->outer)
2222 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2223 if (!stmt)
2225 checking = false;
2226 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2227 return outer_mask;
2228 stmt = as_a <gomp_for *> (ctx->stmt);
2231 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2233 switch (OMP_CLAUSE_CODE (c))
2235 case OMP_CLAUSE_GANG:
2236 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2237 break;
2238 case OMP_CLAUSE_WORKER:
2239 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2240 break;
2241 case OMP_CLAUSE_VECTOR:
2242 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2243 break;
2244 case OMP_CLAUSE_SEQ:
2245 has_seq = true;
2246 break;
2247 case OMP_CLAUSE_AUTO:
2248 has_auto = true;
2249 break;
2250 default:
2251 break;
2255 if (checking)
2257 if (has_seq && (this_mask || has_auto))
2258 error_at (gimple_location (stmt), "%<seq%> overrides other"
2259 " OpenACC loop specifiers");
2260 else if (has_auto && this_mask)
2261 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2262 " OpenACC loop specifiers");
2264 if (this_mask & outer_mask)
2265 error_at (gimple_location (stmt), "inner loop uses same"
2266 " OpenACC parallelism as containing loop");
2269 return outer_mask | this_mask;
2272 /* Scan a GIMPLE_OMP_FOR. */
2274 static omp_context *
2275 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2277 omp_context *ctx;
2278 size_t i;
2279 tree clauses = gimple_omp_for_clauses (stmt);
2281 ctx = new_omp_context (stmt, outer_ctx);
2283 if (is_gimple_omp_oacc (stmt))
2285 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2287 if (!tgt || is_oacc_parallel (tgt))
2288 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2290 char const *check = NULL;
2292 switch (OMP_CLAUSE_CODE (c))
2294 case OMP_CLAUSE_GANG:
2295 check = "gang";
2296 break;
2298 case OMP_CLAUSE_WORKER:
2299 check = "worker";
2300 break;
2302 case OMP_CLAUSE_VECTOR:
2303 check = "vector";
2304 break;
2306 default:
2307 break;
2310 if (check && OMP_CLAUSE_OPERAND (c, 0))
2311 error_at (gimple_location (stmt),
2312 "argument not permitted on %qs clause in"
2313 " OpenACC %<parallel%>", check);
2316 if (tgt && is_oacc_kernels (tgt))
2318 /* Strip out reductions, as they are not handled yet. */
2319 tree *prev_ptr = &clauses;
2321 while (tree probe = *prev_ptr)
2323 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2325 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2326 *prev_ptr = *next_ptr;
2327 else
2328 prev_ptr = next_ptr;
2331 gimple_omp_for_set_clauses (stmt, clauses);
2332 check_oacc_kernel_gwv (stmt, ctx);
2336 scan_sharing_clauses (clauses, ctx);
2338 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2339 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2341 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2342 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2343 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2344 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2346 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2347 return ctx;
2350 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2352 static void
2353 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2354 omp_context *outer_ctx)
2356 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2357 gsi_replace (gsi, bind, false);
2358 gimple_seq seq = NULL;
2359 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2360 tree cond = create_tmp_var_raw (integer_type_node);
2361 DECL_CONTEXT (cond) = current_function_decl;
2362 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2363 gimple_bind_set_vars (bind, cond);
2364 gimple_call_set_lhs (g, cond);
2365 gimple_seq_add_stmt (&seq, g);
2366 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2367 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2368 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2369 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2370 gimple_seq_add_stmt (&seq, g);
2371 g = gimple_build_label (lab1);
2372 gimple_seq_add_stmt (&seq, g);
2373 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2374 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2375 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2376 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2377 gimple_omp_for_set_clauses (new_stmt, clause);
2378 gimple_seq_add_stmt (&seq, new_stmt);
2379 g = gimple_build_goto (lab3);
2380 gimple_seq_add_stmt (&seq, g);
2381 g = gimple_build_label (lab2);
2382 gimple_seq_add_stmt (&seq, g);
2383 gimple_seq_add_stmt (&seq, stmt);
2384 g = gimple_build_label (lab3);
2385 gimple_seq_add_stmt (&seq, g);
2386 gimple_bind_set_body (bind, seq);
2387 update_stmt (bind);
2388 scan_omp_for (new_stmt, outer_ctx);
2389 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2392 /* Scan an OpenMP sections directive. */
2394 static void
2395 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2397 omp_context *ctx;
2399 ctx = new_omp_context (stmt, outer_ctx);
2400 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2401 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2404 /* Scan an OpenMP single directive. */
2406 static void
2407 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2409 omp_context *ctx;
2410 tree name;
2412 ctx = new_omp_context (stmt, outer_ctx);
2413 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2414 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2415 name = create_tmp_var_name (".omp_copy_s");
2416 name = build_decl (gimple_location (stmt),
2417 TYPE_DECL, name, ctx->record_type);
2418 TYPE_NAME (ctx->record_type) = name;
2420 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2421 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2423 if (TYPE_FIELDS (ctx->record_type) == NULL)
2424 ctx->record_type = NULL;
2425 else
2426 layout_type (ctx->record_type);
2429 /* Scan a GIMPLE_OMP_TARGET. */
2431 static void
2432 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2434 omp_context *ctx;
2435 tree name;
2436 bool offloaded = is_gimple_omp_offloaded (stmt);
2437 tree clauses = gimple_omp_target_clauses (stmt);
2439 ctx = new_omp_context (stmt, outer_ctx);
2440 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2441 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2442 name = create_tmp_var_name (".omp_data_t");
2443 name = build_decl (gimple_location (stmt),
2444 TYPE_DECL, name, ctx->record_type);
2445 DECL_ARTIFICIAL (name) = 1;
2446 DECL_NAMELESS (name) = 1;
2447 TYPE_NAME (ctx->record_type) = name;
2448 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2450 if (offloaded)
2452 create_omp_child_function (ctx, false);
2453 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2456 scan_sharing_clauses (clauses, ctx);
2457 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2459 if (TYPE_FIELDS (ctx->record_type) == NULL)
2460 ctx->record_type = ctx->receiver_decl = NULL;
2461 else
2463 TYPE_FIELDS (ctx->record_type)
2464 = nreverse (TYPE_FIELDS (ctx->record_type));
2465 if (flag_checking)
2467 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2468 for (tree field = TYPE_FIELDS (ctx->record_type);
2469 field;
2470 field = DECL_CHAIN (field))
2471 gcc_assert (DECL_ALIGN (field) == align);
2473 layout_type (ctx->record_type);
2474 if (offloaded)
2475 fixup_child_record_type (ctx);
2479 /* Scan an OpenMP teams directive. */
2481 static void
2482 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2484 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2486 if (!gimple_omp_teams_host (stmt))
2488 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2489 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2490 return;
2492 taskreg_contexts.safe_push (ctx);
2493 gcc_assert (taskreg_nesting_level == 1);
2494 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2495 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2496 tree name = create_tmp_var_name (".omp_data_s");
2497 name = build_decl (gimple_location (stmt),
2498 TYPE_DECL, name, ctx->record_type);
2499 DECL_ARTIFICIAL (name) = 1;
2500 DECL_NAMELESS (name) = 1;
2501 TYPE_NAME (ctx->record_type) = name;
2502 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2503 create_omp_child_function (ctx, false);
2504 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2506 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2507 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2509 if (TYPE_FIELDS (ctx->record_type) == NULL)
2510 ctx->record_type = ctx->receiver_decl = NULL;
2513 /* Check nesting restrictions. */
2514 static bool
2515 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2517 tree c;
2519 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2520 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2521 the original copy of its contents. */
2522 return true;
2524 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2525 inside an OpenACC CTX. */
2526 if (!(is_gimple_omp (stmt)
2527 && is_gimple_omp_oacc (stmt))
2528 /* Except for atomic codes that we share with OpenMP. */
2529 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2530 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2532 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2534 error_at (gimple_location (stmt),
2535 "non-OpenACC construct inside of OpenACC routine");
2536 return false;
2538 else
2539 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2540 if (is_gimple_omp (octx->stmt)
2541 && is_gimple_omp_oacc (octx->stmt))
2543 error_at (gimple_location (stmt),
2544 "non-OpenACC construct inside of OpenACC region");
2545 return false;
2549 if (ctx != NULL)
2551 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2552 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2554 c = NULL_TREE;
2555 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2557 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2558 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2560 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2561 && (ctx->outer == NULL
2562 || !gimple_omp_for_combined_into_p (ctx->stmt)
2563 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2564 || (gimple_omp_for_kind (ctx->outer->stmt)
2565 != GF_OMP_FOR_KIND_FOR)
2566 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2568 error_at (gimple_location (stmt),
2569 "%<ordered simd threads%> must be closely "
2570 "nested inside of %<for simd%> region");
2571 return false;
2573 return true;
2576 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2577 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)
2578 return true;
2579 error_at (gimple_location (stmt),
2580 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2581 " or %<#pragma omp atomic%> may not be nested inside"
2582 " %<simd%> region");
2583 return false;
2585 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2587 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2588 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2589 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2590 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2592 error_at (gimple_location (stmt),
2593 "only %<distribute%> or %<parallel%> regions are "
2594 "allowed to be strictly nested inside %<teams%> "
2595 "region");
2596 return false;
2600 switch (gimple_code (stmt))
2602 case GIMPLE_OMP_FOR:
2603 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2604 return true;
2605 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2607 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2609 error_at (gimple_location (stmt),
2610 "%<distribute%> region must be strictly nested "
2611 "inside %<teams%> construct");
2612 return false;
2614 return true;
2616 /* We split taskloop into task and nested taskloop in it. */
2617 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2618 return true;
2619 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2621 bool ok = false;
2623 if (ctx)
2624 switch (gimple_code (ctx->stmt))
2626 case GIMPLE_OMP_FOR:
2627 ok = (gimple_omp_for_kind (ctx->stmt)
2628 == GF_OMP_FOR_KIND_OACC_LOOP);
2629 break;
2631 case GIMPLE_OMP_TARGET:
2632 switch (gimple_omp_target_kind (ctx->stmt))
2634 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2635 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2636 ok = true;
2637 break;
2639 default:
2640 break;
2643 default:
2644 break;
2646 else if (oacc_get_fn_attrib (current_function_decl))
2647 ok = true;
2648 if (!ok)
2650 error_at (gimple_location (stmt),
2651 "OpenACC loop directive must be associated with"
2652 " an OpenACC compute region");
2653 return false;
2656 /* FALLTHRU */
2657 case GIMPLE_CALL:
2658 if (is_gimple_call (stmt)
2659 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2660 == BUILT_IN_GOMP_CANCEL
2661 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2662 == BUILT_IN_GOMP_CANCELLATION_POINT))
2664 const char *bad = NULL;
2665 const char *kind = NULL;
2666 const char *construct
2667 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2668 == BUILT_IN_GOMP_CANCEL)
2669 ? "#pragma omp cancel"
2670 : "#pragma omp cancellation point";
2671 if (ctx == NULL)
2673 error_at (gimple_location (stmt), "orphaned %qs construct",
2674 construct);
2675 return false;
2677 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2678 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2679 : 0)
2681 case 1:
2682 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2683 bad = "#pragma omp parallel";
2684 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2685 == BUILT_IN_GOMP_CANCEL
2686 && !integer_zerop (gimple_call_arg (stmt, 1)))
2687 ctx->cancellable = true;
2688 kind = "parallel";
2689 break;
2690 case 2:
2691 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2692 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2693 bad = "#pragma omp for";
2694 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2695 == BUILT_IN_GOMP_CANCEL
2696 && !integer_zerop (gimple_call_arg (stmt, 1)))
2698 ctx->cancellable = true;
2699 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2700 OMP_CLAUSE_NOWAIT))
2701 warning_at (gimple_location (stmt), 0,
2702 "%<#pragma omp cancel for%> inside "
2703 "%<nowait%> for construct");
2704 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2705 OMP_CLAUSE_ORDERED))
2706 warning_at (gimple_location (stmt), 0,
2707 "%<#pragma omp cancel for%> inside "
2708 "%<ordered%> for construct");
2710 kind = "for";
2711 break;
2712 case 4:
2713 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2714 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2715 bad = "#pragma omp sections";
2716 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2717 == BUILT_IN_GOMP_CANCEL
2718 && !integer_zerop (gimple_call_arg (stmt, 1)))
2720 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2722 ctx->cancellable = true;
2723 if (omp_find_clause (gimple_omp_sections_clauses
2724 (ctx->stmt),
2725 OMP_CLAUSE_NOWAIT))
2726 warning_at (gimple_location (stmt), 0,
2727 "%<#pragma omp cancel sections%> inside "
2728 "%<nowait%> sections construct");
2730 else
2732 gcc_assert (ctx->outer
2733 && gimple_code (ctx->outer->stmt)
2734 == GIMPLE_OMP_SECTIONS);
2735 ctx->outer->cancellable = true;
2736 if (omp_find_clause (gimple_omp_sections_clauses
2737 (ctx->outer->stmt),
2738 OMP_CLAUSE_NOWAIT))
2739 warning_at (gimple_location (stmt), 0,
2740 "%<#pragma omp cancel sections%> inside "
2741 "%<nowait%> sections construct");
2744 kind = "sections";
2745 break;
2746 case 8:
2747 if (!is_task_ctx (ctx)
2748 && (!is_taskloop_ctx (ctx)
2749 || ctx->outer == NULL
2750 || !is_task_ctx (ctx->outer)))
2751 bad = "#pragma omp task";
2752 else
2754 for (omp_context *octx = ctx->outer;
2755 octx; octx = octx->outer)
2757 switch (gimple_code (octx->stmt))
2759 case GIMPLE_OMP_TASKGROUP:
2760 break;
2761 case GIMPLE_OMP_TARGET:
2762 if (gimple_omp_target_kind (octx->stmt)
2763 != GF_OMP_TARGET_KIND_REGION)
2764 continue;
2765 /* FALLTHRU */
2766 case GIMPLE_OMP_PARALLEL:
2767 case GIMPLE_OMP_TEAMS:
2768 error_at (gimple_location (stmt),
2769 "%<%s taskgroup%> construct not closely "
2770 "nested inside of %<taskgroup%> region",
2771 construct);
2772 return false;
2773 case GIMPLE_OMP_TASK:
2774 if (gimple_omp_task_taskloop_p (octx->stmt)
2775 && octx->outer
2776 && is_taskloop_ctx (octx->outer))
2778 tree clauses
2779 = gimple_omp_for_clauses (octx->outer->stmt);
2780 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
2781 break;
2783 continue;
2784 default:
2785 continue;
2787 break;
2789 ctx->cancellable = true;
2791 kind = "taskgroup";
2792 break;
2793 default:
2794 error_at (gimple_location (stmt), "invalid arguments");
2795 return false;
2797 if (bad)
2799 error_at (gimple_location (stmt),
2800 "%<%s %s%> construct not closely nested inside of %qs",
2801 construct, kind, bad);
2802 return false;
2805 /* FALLTHRU */
2806 case GIMPLE_OMP_SECTIONS:
2807 case GIMPLE_OMP_SINGLE:
2808 for (; ctx != NULL; ctx = ctx->outer)
2809 switch (gimple_code (ctx->stmt))
2811 case GIMPLE_OMP_FOR:
2812 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2813 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2814 break;
2815 /* FALLTHRU */
2816 case GIMPLE_OMP_SECTIONS:
2817 case GIMPLE_OMP_SINGLE:
2818 case GIMPLE_OMP_ORDERED:
2819 case GIMPLE_OMP_MASTER:
2820 case GIMPLE_OMP_TASK:
2821 case GIMPLE_OMP_CRITICAL:
2822 if (is_gimple_call (stmt))
2824 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2825 != BUILT_IN_GOMP_BARRIER)
2826 return true;
2827 error_at (gimple_location (stmt),
2828 "barrier region may not be closely nested inside "
2829 "of work-sharing, %<critical%>, %<ordered%>, "
2830 "%<master%>, explicit %<task%> or %<taskloop%> "
2831 "region");
2832 return false;
2834 error_at (gimple_location (stmt),
2835 "work-sharing region may not be closely nested inside "
2836 "of work-sharing, %<critical%>, %<ordered%>, "
2837 "%<master%>, explicit %<task%> or %<taskloop%> region");
2838 return false;
2839 case GIMPLE_OMP_PARALLEL:
2840 case GIMPLE_OMP_TEAMS:
2841 return true;
2842 case GIMPLE_OMP_TARGET:
2843 if (gimple_omp_target_kind (ctx->stmt)
2844 == GF_OMP_TARGET_KIND_REGION)
2845 return true;
2846 break;
2847 default:
2848 break;
2850 break;
2851 case GIMPLE_OMP_MASTER:
2852 for (; ctx != NULL; ctx = ctx->outer)
2853 switch (gimple_code (ctx->stmt))
2855 case GIMPLE_OMP_FOR:
2856 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2857 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2858 break;
2859 /* FALLTHRU */
2860 case GIMPLE_OMP_SECTIONS:
2861 case GIMPLE_OMP_SINGLE:
2862 case GIMPLE_OMP_TASK:
2863 error_at (gimple_location (stmt),
2864 "%<master%> region may not be closely nested inside "
2865 "of work-sharing, explicit %<task%> or %<taskloop%> "
2866 "region");
2867 return false;
2868 case GIMPLE_OMP_PARALLEL:
2869 case GIMPLE_OMP_TEAMS:
2870 return true;
2871 case GIMPLE_OMP_TARGET:
2872 if (gimple_omp_target_kind (ctx->stmt)
2873 == GF_OMP_TARGET_KIND_REGION)
2874 return true;
2875 break;
2876 default:
2877 break;
2879 break;
2880 case GIMPLE_OMP_TASK:
2881 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2882 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2883 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2884 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2886 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2887 error_at (OMP_CLAUSE_LOCATION (c),
2888 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2889 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2890 return false;
2892 break;
2893 case GIMPLE_OMP_ORDERED:
2894 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2895 c; c = OMP_CLAUSE_CHAIN (c))
2897 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2899 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2900 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2901 continue;
2903 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2904 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2905 || kind == OMP_CLAUSE_DEPEND_SINK)
2907 tree oclause;
2908 /* Look for containing ordered(N) loop. */
2909 if (ctx == NULL
2910 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2911 || (oclause
2912 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2913 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2915 error_at (OMP_CLAUSE_LOCATION (c),
2916 "%<ordered%> construct with %<depend%> clause "
2917 "must be closely nested inside an %<ordered%> "
2918 "loop");
2919 return false;
2921 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2923 error_at (OMP_CLAUSE_LOCATION (c),
2924 "%<ordered%> construct with %<depend%> clause "
2925 "must be closely nested inside a loop with "
2926 "%<ordered%> clause with a parameter");
2927 return false;
2930 else
2932 error_at (OMP_CLAUSE_LOCATION (c),
2933 "invalid depend kind in omp %<ordered%> %<depend%>");
2934 return false;
2937 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2938 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2940 /* ordered simd must be closely nested inside of simd region,
2941 and simd region must not encounter constructs other than
2942 ordered simd, therefore ordered simd may be either orphaned,
2943 or ctx->stmt must be simd. The latter case is handled already
2944 earlier. */
2945 if (ctx != NULL)
2947 error_at (gimple_location (stmt),
2948 "%<ordered%> %<simd%> must be closely nested inside "
2949 "%<simd%> region");
2950 return false;
2953 for (; ctx != NULL; ctx = ctx->outer)
2954 switch (gimple_code (ctx->stmt))
2956 case GIMPLE_OMP_CRITICAL:
2957 case GIMPLE_OMP_TASK:
2958 case GIMPLE_OMP_ORDERED:
2959 ordered_in_taskloop:
2960 error_at (gimple_location (stmt),
2961 "%<ordered%> region may not be closely nested inside "
2962 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2963 "%<taskloop%> region");
2964 return false;
2965 case GIMPLE_OMP_FOR:
2966 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2967 goto ordered_in_taskloop;
2968 tree o;
2969 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2970 OMP_CLAUSE_ORDERED);
2971 if (o == NULL)
2973 error_at (gimple_location (stmt),
2974 "%<ordered%> region must be closely nested inside "
2975 "a loop region with an %<ordered%> clause");
2976 return false;
2978 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
2979 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
2981 error_at (gimple_location (stmt),
2982 "%<ordered%> region without %<depend%> clause may "
2983 "not be closely nested inside a loop region with "
2984 "an %<ordered%> clause with a parameter");
2985 return false;
2987 return true;
2988 case GIMPLE_OMP_TARGET:
2989 if (gimple_omp_target_kind (ctx->stmt)
2990 != GF_OMP_TARGET_KIND_REGION)
2991 break;
2992 /* FALLTHRU */
2993 case GIMPLE_OMP_PARALLEL:
2994 case GIMPLE_OMP_TEAMS:
2995 error_at (gimple_location (stmt),
2996 "%<ordered%> region must be closely nested inside "
2997 "a loop region with an %<ordered%> clause");
2998 return false;
2999 default:
3000 break;
3002 break;
3003 case GIMPLE_OMP_CRITICAL:
3005 tree this_stmt_name
3006 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3007 for (; ctx != NULL; ctx = ctx->outer)
3008 if (gomp_critical *other_crit
3009 = dyn_cast <gomp_critical *> (ctx->stmt))
3010 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3012 error_at (gimple_location (stmt),
3013 "%<critical%> region may not be nested inside "
3014 "a %<critical%> region with the same name");
3015 return false;
3018 break;
3019 case GIMPLE_OMP_TEAMS:
3020 if (ctx == NULL)
3021 break;
3022 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3023 || (gimple_omp_target_kind (ctx->stmt)
3024 != GF_OMP_TARGET_KIND_REGION))
3026 /* Teams construct can appear either strictly nested inside of
3027 target construct with no intervening stmts, or can be encountered
3028 only by initial task (so must not appear inside any OpenMP
3029 construct. */
3030 error_at (gimple_location (stmt),
3031 "%<teams%> construct must be closely nested inside of "
3032 "%<target%> construct or not nested in any OpenMP "
3033 "construct");
3034 return false;
3036 break;
3037 case GIMPLE_OMP_TARGET:
3038 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3039 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3040 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3041 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3043 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3044 error_at (OMP_CLAUSE_LOCATION (c),
3045 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3046 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3047 return false;
3049 if (is_gimple_omp_offloaded (stmt)
3050 && oacc_get_fn_attrib (cfun->decl) != NULL)
3052 error_at (gimple_location (stmt),
3053 "OpenACC region inside of OpenACC routine, nested "
3054 "parallelism not supported yet");
3055 return false;
3057 for (; ctx != NULL; ctx = ctx->outer)
3059 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3061 if (is_gimple_omp (stmt)
3062 && is_gimple_omp_oacc (stmt)
3063 && is_gimple_omp (ctx->stmt))
3065 error_at (gimple_location (stmt),
3066 "OpenACC construct inside of non-OpenACC region");
3067 return false;
3069 continue;
3072 const char *stmt_name, *ctx_stmt_name;
3073 switch (gimple_omp_target_kind (stmt))
3075 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3076 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3077 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3078 case GF_OMP_TARGET_KIND_ENTER_DATA:
3079 stmt_name = "target enter data"; break;
3080 case GF_OMP_TARGET_KIND_EXIT_DATA:
3081 stmt_name = "target exit data"; break;
3082 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3083 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3084 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3085 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3086 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3087 stmt_name = "enter/exit data"; break;
3088 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3089 break;
3090 default: gcc_unreachable ();
3092 switch (gimple_omp_target_kind (ctx->stmt))
3094 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3095 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3096 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3097 ctx_stmt_name = "parallel"; break;
3098 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3099 ctx_stmt_name = "kernels"; break;
3100 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3101 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3102 ctx_stmt_name = "host_data"; break;
3103 default: gcc_unreachable ();
3106 /* OpenACC/OpenMP mismatch? */
3107 if (is_gimple_omp_oacc (stmt)
3108 != is_gimple_omp_oacc (ctx->stmt))
3110 error_at (gimple_location (stmt),
3111 "%s %qs construct inside of %s %qs region",
3112 (is_gimple_omp_oacc (stmt)
3113 ? "OpenACC" : "OpenMP"), stmt_name,
3114 (is_gimple_omp_oacc (ctx->stmt)
3115 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3116 return false;
3118 if (is_gimple_omp_offloaded (ctx->stmt))
3120 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3121 if (is_gimple_omp_oacc (ctx->stmt))
3123 error_at (gimple_location (stmt),
3124 "%qs construct inside of %qs region",
3125 stmt_name, ctx_stmt_name);
3126 return false;
3128 else
3130 warning_at (gimple_location (stmt), 0,
3131 "%qs construct inside of %qs region",
3132 stmt_name, ctx_stmt_name);
3136 break;
3137 default:
3138 break;
3140 return true;
3144 /* Helper function scan_omp.
3146 Callback for walk_tree or operators in walk_gimple_stmt used to
3147 scan for OMP directives in TP. */
3149 static tree
3150 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3152 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3153 omp_context *ctx = (omp_context *) wi->info;
3154 tree t = *tp;
3156 switch (TREE_CODE (t))
3158 case VAR_DECL:
3159 case PARM_DECL:
3160 case LABEL_DECL:
3161 case RESULT_DECL:
3162 if (ctx)
3164 tree repl = remap_decl (t, &ctx->cb);
3165 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3166 *tp = repl;
3168 break;
3170 default:
3171 if (ctx && TYPE_P (t))
3172 *tp = remap_type (t, &ctx->cb);
3173 else if (!DECL_P (t))
3175 *walk_subtrees = 1;
3176 if (ctx)
3178 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3179 if (tem != TREE_TYPE (t))
3181 if (TREE_CODE (t) == INTEGER_CST)
3182 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3183 else
3184 TREE_TYPE (t) = tem;
3188 break;
3191 return NULL_TREE;
3194 /* Return true if FNDECL is a setjmp or a longjmp. */
3196 static bool
3197 setjmp_or_longjmp_p (const_tree fndecl)
3199 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3200 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3201 return true;
3203 tree declname = DECL_NAME (fndecl);
3204 if (!declname)
3205 return false;
3206 const char *name = IDENTIFIER_POINTER (declname);
3207 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3211 /* Helper function for scan_omp.
3213 Callback for walk_gimple_stmt used to scan for OMP directives in
3214 the current statement in GSI. */
3216 static tree
3217 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3218 struct walk_stmt_info *wi)
3220 gimple *stmt = gsi_stmt (*gsi);
3221 omp_context *ctx = (omp_context *) wi->info;
3223 if (gimple_has_location (stmt))
3224 input_location = gimple_location (stmt);
3226 /* Check the nesting restrictions. */
3227 bool remove = false;
3228 if (is_gimple_omp (stmt))
3229 remove = !check_omp_nesting_restrictions (stmt, ctx);
3230 else if (is_gimple_call (stmt))
3232 tree fndecl = gimple_call_fndecl (stmt);
3233 if (fndecl)
3235 if (setjmp_or_longjmp_p (fndecl)
3236 && ctx
3237 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3238 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3240 remove = true;
3241 error_at (gimple_location (stmt),
3242 "setjmp/longjmp inside simd construct");
3244 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3245 switch (DECL_FUNCTION_CODE (fndecl))
3247 case BUILT_IN_GOMP_BARRIER:
3248 case BUILT_IN_GOMP_CANCEL:
3249 case BUILT_IN_GOMP_CANCELLATION_POINT:
3250 case BUILT_IN_GOMP_TASKYIELD:
3251 case BUILT_IN_GOMP_TASKWAIT:
3252 case BUILT_IN_GOMP_TASKGROUP_START:
3253 case BUILT_IN_GOMP_TASKGROUP_END:
3254 remove = !check_omp_nesting_restrictions (stmt, ctx);
3255 break;
3256 default:
3257 break;
3261 if (remove)
3263 stmt = gimple_build_nop ();
3264 gsi_replace (gsi, stmt, false);
3267 *handled_ops_p = true;
3269 switch (gimple_code (stmt))
3271 case GIMPLE_OMP_PARALLEL:
3272 taskreg_nesting_level++;
3273 scan_omp_parallel (gsi, ctx);
3274 taskreg_nesting_level--;
3275 break;
3277 case GIMPLE_OMP_TASK:
3278 taskreg_nesting_level++;
3279 scan_omp_task (gsi, ctx);
3280 taskreg_nesting_level--;
3281 break;
3283 case GIMPLE_OMP_FOR:
3284 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3285 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3286 && omp_maybe_offloaded_ctx (ctx)
3287 && omp_max_simt_vf ())
3288 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3289 else
3290 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3291 break;
3293 case GIMPLE_OMP_SECTIONS:
3294 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3295 break;
3297 case GIMPLE_OMP_SINGLE:
3298 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3299 break;
3301 case GIMPLE_OMP_SECTION:
3302 case GIMPLE_OMP_MASTER:
3303 case GIMPLE_OMP_ORDERED:
3304 case GIMPLE_OMP_CRITICAL:
3305 case GIMPLE_OMP_GRID_BODY:
3306 ctx = new_omp_context (stmt, ctx);
3307 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3308 break;
3310 case GIMPLE_OMP_TASKGROUP:
3311 ctx = new_omp_context (stmt, ctx);
3312 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3313 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3314 break;
3316 case GIMPLE_OMP_TARGET:
3317 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3318 break;
3320 case GIMPLE_OMP_TEAMS:
3321 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3323 taskreg_nesting_level++;
3324 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3325 taskreg_nesting_level--;
3327 else
3328 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3329 break;
3331 case GIMPLE_BIND:
3333 tree var;
3335 *handled_ops_p = false;
3336 if (ctx)
3337 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3338 var ;
3339 var = DECL_CHAIN (var))
3340 insert_decl_map (&ctx->cb, var, var);
3342 break;
3343 default:
3344 *handled_ops_p = false;
3345 break;
3348 return NULL_TREE;
3352 /* Scan all the statements starting at the current statement. CTX
3353 contains context information about the OMP directives and
3354 clauses found during the scan. */
3356 static void
3357 scan_omp (gimple_seq *body_p, omp_context *ctx)
3359 location_t saved_location;
3360 struct walk_stmt_info wi;
3362 memset (&wi, 0, sizeof (wi));
3363 wi.info = ctx;
3364 wi.want_locations = true;
3366 saved_location = input_location;
3367 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3368 input_location = saved_location;
3371 /* Re-gimplification and code generation routines. */
3373 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3374 of BIND if in a method. */
3376 static void
3377 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3379 if (DECL_ARGUMENTS (current_function_decl)
3380 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3381 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3382 == POINTER_TYPE))
3384 tree vars = gimple_bind_vars (bind);
3385 for (tree *pvar = &vars; *pvar; )
3386 if (omp_member_access_dummy_var (*pvar))
3387 *pvar = DECL_CHAIN (*pvar);
3388 else
3389 pvar = &DECL_CHAIN (*pvar);
3390 gimple_bind_set_vars (bind, vars);
3394 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3395 block and its subblocks. */
3397 static void
3398 remove_member_access_dummy_vars (tree block)
3400 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3401 if (omp_member_access_dummy_var (*pvar))
3402 *pvar = DECL_CHAIN (*pvar);
3403 else
3404 pvar = &DECL_CHAIN (*pvar);
3406 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3407 remove_member_access_dummy_vars (block);
3410 /* If a context was created for STMT when it was scanned, return it. */
3412 static omp_context *
3413 maybe_lookup_ctx (gimple *stmt)
3415 splay_tree_node n;
3416 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3417 return n ? (omp_context *) n->value : NULL;
3421 /* Find the mapping for DECL in CTX or the immediately enclosing
3422 context that has a mapping for DECL.
3424 If CTX is a nested parallel directive, we may have to use the decl
3425 mappings created in CTX's parent context. Suppose that we have the
3426 following parallel nesting (variable UIDs showed for clarity):
3428 iD.1562 = 0;
3429 #omp parallel shared(iD.1562) -> outer parallel
3430 iD.1562 = iD.1562 + 1;
3432 #omp parallel shared (iD.1562) -> inner parallel
3433 iD.1562 = iD.1562 - 1;
3435 Each parallel structure will create a distinct .omp_data_s structure
3436 for copying iD.1562 in/out of the directive:
3438 outer parallel .omp_data_s.1.i -> iD.1562
3439 inner parallel .omp_data_s.2.i -> iD.1562
3441 A shared variable mapping will produce a copy-out operation before
3442 the parallel directive and a copy-in operation after it. So, in
3443 this case we would have:
3445 iD.1562 = 0;
3446 .omp_data_o.1.i = iD.1562;
3447 #omp parallel shared(iD.1562) -> outer parallel
3448 .omp_data_i.1 = &.omp_data_o.1
3449 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3451 .omp_data_o.2.i = iD.1562; -> **
3452 #omp parallel shared(iD.1562) -> inner parallel
3453 .omp_data_i.2 = &.omp_data_o.2
3454 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3457 ** This is a problem. The symbol iD.1562 cannot be referenced
3458 inside the body of the outer parallel region. But since we are
3459 emitting this copy operation while expanding the inner parallel
3460 directive, we need to access the CTX structure of the outer
3461 parallel directive to get the correct mapping:
3463 .omp_data_o.2.i = .omp_data_i.1->i
3465 Since there may be other workshare or parallel directives enclosing
3466 the parallel directive, it may be necessary to walk up the context
3467 parent chain. This is not a problem in general because nested
3468 parallelism happens only rarely. */
3470 static tree
3471 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3473 tree t;
3474 omp_context *up;
3476 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3477 t = maybe_lookup_decl (decl, up);
3479 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3481 return t ? t : decl;
3485 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3486 in outer contexts. */
3488 static tree
3489 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3491 tree t = NULL;
3492 omp_context *up;
3494 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3495 t = maybe_lookup_decl (decl, up);
3497 return t ? t : decl;
3501 /* Construct the initialization value for reduction operation OP. */
3503 tree
3504 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3506 switch (op)
3508 case PLUS_EXPR:
3509 case MINUS_EXPR:
3510 case BIT_IOR_EXPR:
3511 case BIT_XOR_EXPR:
3512 case TRUTH_OR_EXPR:
3513 case TRUTH_ORIF_EXPR:
3514 case TRUTH_XOR_EXPR:
3515 case NE_EXPR:
3516 return build_zero_cst (type);
3518 case MULT_EXPR:
3519 case TRUTH_AND_EXPR:
3520 case TRUTH_ANDIF_EXPR:
3521 case EQ_EXPR:
3522 return fold_convert_loc (loc, type, integer_one_node);
3524 case BIT_AND_EXPR:
3525 return fold_convert_loc (loc, type, integer_minus_one_node);
3527 case MAX_EXPR:
3528 if (SCALAR_FLOAT_TYPE_P (type))
3530 REAL_VALUE_TYPE max, min;
3531 if (HONOR_INFINITIES (type))
3533 real_inf (&max);
3534 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3536 else
3537 real_maxval (&min, 1, TYPE_MODE (type));
3538 return build_real (type, min);
3540 else if (POINTER_TYPE_P (type))
3542 wide_int min
3543 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3544 return wide_int_to_tree (type, min);
3546 else
3548 gcc_assert (INTEGRAL_TYPE_P (type));
3549 return TYPE_MIN_VALUE (type);
3552 case MIN_EXPR:
3553 if (SCALAR_FLOAT_TYPE_P (type))
3555 REAL_VALUE_TYPE max;
3556 if (HONOR_INFINITIES (type))
3557 real_inf (&max);
3558 else
3559 real_maxval (&max, 0, TYPE_MODE (type));
3560 return build_real (type, max);
3562 else if (POINTER_TYPE_P (type))
3564 wide_int max
3565 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3566 return wide_int_to_tree (type, max);
3568 else
3570 gcc_assert (INTEGRAL_TYPE_P (type));
3571 return TYPE_MAX_VALUE (type);
3574 default:
3575 gcc_unreachable ();
3579 /* Construct the initialization value for reduction CLAUSE. */
3581 tree
3582 omp_reduction_init (tree clause, tree type)
3584 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3585 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3588 /* Return alignment to be assumed for var in CLAUSE, which should be
3589 OMP_CLAUSE_ALIGNED. */
3591 static tree
3592 omp_clause_aligned_alignment (tree clause)
3594 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3595 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3597 /* Otherwise return implementation defined alignment. */
3598 unsigned int al = 1;
3599 opt_scalar_mode mode_iter;
3600 auto_vector_sizes sizes;
3601 targetm.vectorize.autovectorize_vector_sizes (&sizes);
3602 poly_uint64 vs = 0;
3603 for (unsigned int i = 0; i < sizes.length (); ++i)
3604 vs = ordered_max (vs, sizes[i]);
3605 static enum mode_class classes[]
3606 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3607 for (int i = 0; i < 4; i += 2)
3608 /* The for loop above dictates that we only walk through scalar classes. */
3609 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3611 scalar_mode mode = mode_iter.require ();
3612 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3613 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3614 continue;
3615 while (maybe_ne (vs, 0U)
3616 && known_lt (GET_MODE_SIZE (vmode), vs)
3617 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3618 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3620 tree type = lang_hooks.types.type_for_mode (mode, 1);
3621 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3622 continue;
3623 poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3624 GET_MODE_SIZE (mode));
3625 type = build_vector_type (type, nelts);
3626 if (TYPE_MODE (type) != vmode)
3627 continue;
3628 if (TYPE_ALIGN_UNIT (type) > al)
3629 al = TYPE_ALIGN_UNIT (type);
3631 return build_int_cst (integer_type_node, al);
3635 /* This structure is part of the interface between lower_rec_simd_input_clauses
3636 and lower_rec_input_clauses. */
3638 struct omplow_simd_context {
3639 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3640 tree idx;
3641 tree lane;
3642 vec<tree, va_heap> simt_eargs;
3643 gimple_seq simt_dlist;
3644 poly_uint64_pod max_vf;
3645 bool is_simt;
3648 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3649 privatization. */
3651 static bool
3652 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3653 omplow_simd_context *sctx, tree &ivar, tree &lvar)
3655 if (known_eq (sctx->max_vf, 0U))
3657 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3658 if (maybe_gt (sctx->max_vf, 1U))
3660 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3661 OMP_CLAUSE_SAFELEN);
3662 if (c)
3664 poly_uint64 safe_len;
3665 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3666 || maybe_lt (safe_len, 1U))
3667 sctx->max_vf = 1;
3668 else
3669 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3672 if (maybe_gt (sctx->max_vf, 1U))
3674 sctx->idx = create_tmp_var (unsigned_type_node);
3675 sctx->lane = create_tmp_var (unsigned_type_node);
3678 if (known_eq (sctx->max_vf, 1U))
3679 return false;
3681 if (sctx->is_simt)
3683 if (is_gimple_reg (new_var))
3685 ivar = lvar = new_var;
3686 return true;
3688 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3689 ivar = lvar = create_tmp_var (type);
3690 TREE_ADDRESSABLE (ivar) = 1;
3691 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3692 NULL, DECL_ATTRIBUTES (ivar));
3693 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3694 tree clobber = build_constructor (type, NULL);
3695 TREE_THIS_VOLATILE (clobber) = 1;
3696 gimple *g = gimple_build_assign (ivar, clobber);
3697 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3699 else
3701 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3702 tree avar = create_tmp_var_raw (atype);
3703 if (TREE_ADDRESSABLE (new_var))
3704 TREE_ADDRESSABLE (avar) = 1;
3705 DECL_ATTRIBUTES (avar)
3706 = tree_cons (get_identifier ("omp simd array"), NULL,
3707 DECL_ATTRIBUTES (avar));
3708 gimple_add_tmp_var (avar);
3709 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3710 NULL_TREE, NULL_TREE);
3711 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3712 NULL_TREE, NULL_TREE);
3714 if (DECL_P (new_var))
3716 SET_DECL_VALUE_EXPR (new_var, lvar);
3717 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3719 return true;
3722 /* Helper function of lower_rec_input_clauses. For a reference
3723 in simd reduction, add an underlying variable it will reference. */
3725 static void
3726 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3728 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3729 if (TREE_CONSTANT (z))
3731 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3732 get_name (new_vard));
3733 gimple_add_tmp_var (z);
3734 TREE_ADDRESSABLE (z) = 1;
3735 z = build_fold_addr_expr_loc (loc, z);
3736 gimplify_assign (new_vard, z, ilist);
3740 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
3741 code to emit (type) (tskred_temp[idx]). */
3743 static tree
3744 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
3745 unsigned idx)
3747 unsigned HOST_WIDE_INT sz
3748 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
3749 tree r = build2 (MEM_REF, pointer_sized_int_node,
3750 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
3751 idx * sz));
3752 tree v = create_tmp_var (pointer_sized_int_node);
3753 gimple *g = gimple_build_assign (v, r);
3754 gimple_seq_add_stmt (ilist, g);
3755 if (!useless_type_conversion_p (type, pointer_sized_int_node))
3757 v = create_tmp_var (type);
3758 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
3759 gimple_seq_add_stmt (ilist, g);
3761 return v;
3764 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3765 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3766 private variables. Initialization statements go in ILIST, while calls
3767 to destructors go in DLIST. */
3769 static void
3770 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3771 omp_context *ctx, struct omp_for_data *fd)
3773 tree c, dtor, copyin_seq, x, ptr;
3774 bool copyin_by_ref = false;
3775 bool lastprivate_firstprivate = false;
3776 bool reduction_omp_orig_ref = false;
3777 int pass;
3778 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3779 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3780 omplow_simd_context sctx = omplow_simd_context ();
3781 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3782 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3783 gimple_seq llist[3] = { };
3785 copyin_seq = NULL;
3786 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3788 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3789 with data sharing clauses referencing variable sized vars. That
3790 is unnecessarily hard to support and very unlikely to result in
3791 vectorized code anyway. */
3792 if (is_simd)
3793 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3794 switch (OMP_CLAUSE_CODE (c))
3796 case OMP_CLAUSE_LINEAR:
3797 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3798 sctx.max_vf = 1;
3799 /* FALLTHRU */
3800 case OMP_CLAUSE_PRIVATE:
3801 case OMP_CLAUSE_FIRSTPRIVATE:
3802 case OMP_CLAUSE_LASTPRIVATE:
3803 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3804 sctx.max_vf = 1;
3805 break;
3806 case OMP_CLAUSE_REDUCTION:
3807 case OMP_CLAUSE_IN_REDUCTION:
3808 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3809 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3810 sctx.max_vf = 1;
3811 break;
3812 default:
3813 continue;
3816 /* Add a placeholder for simduid. */
3817 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
3818 sctx.simt_eargs.safe_push (NULL_TREE);
3820 unsigned task_reduction_cnt = 0;
3821 unsigned task_reduction_cntorig = 0;
3822 unsigned task_reduction_cnt_full = 0;
3823 unsigned task_reduction_cntorig_full = 0;
3824 unsigned task_reduction_other_cnt = 0;
3825 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
3826 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
3827 /* Do all the fixed sized types in the first pass, and the variable sized
3828 types in the second pass. This makes sure that the scalar arguments to
3829 the variable sized types are processed before we use them in the
3830 variable sized operations. For task reductions we use 4 passes, in the
3831 first two we ignore them, in the third one gather arguments for
3832 GOMP_task_reduction_remap call and in the last pass actually handle
3833 the task reductions. */
3834 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
3835 ? 4 : 2); ++pass)
3837 if (pass == 2 && task_reduction_cnt)
3839 tskred_atype
3840 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
3841 + task_reduction_cntorig);
3842 tskred_avar = create_tmp_var_raw (tskred_atype);
3843 gimple_add_tmp_var (tskred_avar);
3844 TREE_ADDRESSABLE (tskred_avar) = 1;
3845 task_reduction_cnt_full = task_reduction_cnt;
3846 task_reduction_cntorig_full = task_reduction_cntorig;
3848 else if (pass == 3 && task_reduction_cnt)
3850 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
3851 gimple *g
3852 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
3853 size_int (task_reduction_cntorig),
3854 build_fold_addr_expr (tskred_avar));
3855 gimple_seq_add_stmt (ilist, g);
3857 if (pass == 3 && task_reduction_other_cnt)
3859 /* For reduction clauses, build
3860 tskred_base = (void *) tskred_temp[2]
3861 + omp_get_thread_num () * tskred_temp[1]
3862 or if tskred_temp[1] is known to be constant, that constant
3863 directly. This is the start of the private reduction copy block
3864 for the current thread. */
3865 tree v = create_tmp_var (integer_type_node);
3866 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
3867 gimple *g = gimple_build_call (x, 0);
3868 gimple_call_set_lhs (g, v);
3869 gimple_seq_add_stmt (ilist, g);
3870 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
3871 tskred_temp = OMP_CLAUSE_DECL (c);
3872 if (is_taskreg_ctx (ctx))
3873 tskred_temp = lookup_decl (tskred_temp, ctx);
3874 tree v2 = create_tmp_var (sizetype);
3875 g = gimple_build_assign (v2, NOP_EXPR, v);
3876 gimple_seq_add_stmt (ilist, g);
3877 if (ctx->task_reductions[0])
3878 v = fold_convert (sizetype, ctx->task_reductions[0]);
3879 else
3880 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
3881 tree v3 = create_tmp_var (sizetype);
3882 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
3883 gimple_seq_add_stmt (ilist, g);
3884 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
3885 tskred_base = create_tmp_var (ptr_type_node);
3886 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
3887 gimple_seq_add_stmt (ilist, g);
3889 task_reduction_cnt = 0;
3890 task_reduction_cntorig = 0;
3891 task_reduction_other_cnt = 0;
3892 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3894 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3895 tree var, new_var;
3896 bool by_ref;
3897 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3898 bool task_reduction_p = false;
3899 bool task_reduction_needs_orig_p = false;
3900 tree cond = NULL_TREE;
3902 switch (c_kind)
3904 case OMP_CLAUSE_PRIVATE:
3905 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3906 continue;
3907 break;
3908 case OMP_CLAUSE_SHARED:
3909 /* Ignore shared directives in teams construct inside
3910 of target construct. */
3911 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
3912 && !is_host_teams_ctx (ctx))
3913 continue;
3914 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3916 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3917 || is_global_var (OMP_CLAUSE_DECL (c)));
3918 continue;
3920 case OMP_CLAUSE_FIRSTPRIVATE:
3921 case OMP_CLAUSE_COPYIN:
3922 break;
3923 case OMP_CLAUSE_LINEAR:
3924 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3925 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3926 lastprivate_firstprivate = true;
3927 break;
3928 case OMP_CLAUSE_REDUCTION:
3929 case OMP_CLAUSE_IN_REDUCTION:
3930 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
3932 task_reduction_p = true;
3933 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
3935 task_reduction_other_cnt++;
3936 if (pass == 2)
3937 continue;
3939 else
3940 task_reduction_cnt++;
3941 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3943 var = OMP_CLAUSE_DECL (c);
3944 /* If var is a global variable that isn't privatized
3945 in outer contexts, we don't need to look up the
3946 original address, it is always the address of the
3947 global variable itself. */
3948 if (!DECL_P (var)
3949 || omp_is_reference (var)
3950 || !is_global_var
3951 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
3953 task_reduction_needs_orig_p = true;
3954 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
3955 task_reduction_cntorig++;
3959 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3960 reduction_omp_orig_ref = true;
3961 break;
3962 case OMP_CLAUSE__REDUCTEMP_:
3963 if (!is_taskreg_ctx (ctx))
3964 continue;
3965 /* FALLTHRU */
3966 case OMP_CLAUSE__LOOPTEMP_:
3967 /* Handle _looptemp_/_reductemp_ clauses only on
3968 parallel/task. */
3969 if (fd)
3970 continue;
3971 break;
3972 case OMP_CLAUSE_LASTPRIVATE:
3973 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3975 lastprivate_firstprivate = true;
3976 if (pass != 0 || is_taskloop_ctx (ctx))
3977 continue;
3979 /* Even without corresponding firstprivate, if
3980 decl is Fortran allocatable, it needs outer var
3981 reference. */
3982 else if (pass == 0
3983 && lang_hooks.decls.omp_private_outer_ref
3984 (OMP_CLAUSE_DECL (c)))
3985 lastprivate_firstprivate = true;
3986 break;
3987 case OMP_CLAUSE_ALIGNED:
3988 if (pass != 1)
3989 continue;
3990 var = OMP_CLAUSE_DECL (c);
3991 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3992 && !is_global_var (var))
3994 new_var = maybe_lookup_decl (var, ctx);
3995 if (new_var == NULL_TREE)
3996 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3997 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3998 tree alarg = omp_clause_aligned_alignment (c);
3999 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4000 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4001 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4002 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4003 gimplify_and_add (x, ilist);
4005 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4006 && is_global_var (var))
4008 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4009 new_var = lookup_decl (var, ctx);
4010 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4011 t = build_fold_addr_expr_loc (clause_loc, t);
4012 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4013 tree alarg = omp_clause_aligned_alignment (c);
4014 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4015 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4016 t = fold_convert_loc (clause_loc, ptype, t);
4017 x = create_tmp_var (ptype);
4018 t = build2 (MODIFY_EXPR, ptype, x, t);
4019 gimplify_and_add (t, ilist);
4020 t = build_simple_mem_ref_loc (clause_loc, x);
4021 SET_DECL_VALUE_EXPR (new_var, t);
4022 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4024 continue;
4025 default:
4026 continue;
4029 if (task_reduction_p != (pass >= 2))
4030 continue;
4032 new_var = var = OMP_CLAUSE_DECL (c);
4033 if ((c_kind == OMP_CLAUSE_REDUCTION
4034 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4035 && TREE_CODE (var) == MEM_REF)
4037 var = TREE_OPERAND (var, 0);
4038 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4039 var = TREE_OPERAND (var, 0);
4040 if (TREE_CODE (var) == INDIRECT_REF
4041 || TREE_CODE (var) == ADDR_EXPR)
4042 var = TREE_OPERAND (var, 0);
4043 if (is_variable_sized (var))
4045 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4046 var = DECL_VALUE_EXPR (var);
4047 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4048 var = TREE_OPERAND (var, 0);
4049 gcc_assert (DECL_P (var));
4051 new_var = var;
4053 if (c_kind != OMP_CLAUSE_COPYIN)
4054 new_var = lookup_decl (var, ctx);
4056 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4058 if (pass != 0)
4059 continue;
4061 /* C/C++ array section reductions. */
4062 else if ((c_kind == OMP_CLAUSE_REDUCTION
4063 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4064 && var != OMP_CLAUSE_DECL (c))
4066 if (pass == 0)
4067 continue;
4069 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4070 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4072 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4074 tree b = TREE_OPERAND (orig_var, 1);
4075 b = maybe_lookup_decl (b, ctx);
4076 if (b == NULL)
4078 b = TREE_OPERAND (orig_var, 1);
4079 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4081 if (integer_zerop (bias))
4082 bias = b;
4083 else
4085 bias = fold_convert_loc (clause_loc,
4086 TREE_TYPE (b), bias);
4087 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4088 TREE_TYPE (b), b, bias);
4090 orig_var = TREE_OPERAND (orig_var, 0);
4092 if (pass == 2)
4094 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4095 if (is_global_var (out)
4096 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4097 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4098 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4099 != POINTER_TYPE)))
4100 x = var;
4101 else
4103 bool by_ref = use_pointer_for_field (var, NULL);
4104 x = build_receiver_ref (var, by_ref, ctx);
4105 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4106 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4107 == POINTER_TYPE))
4108 x = build_fold_addr_expr (x);
4110 if (TREE_CODE (orig_var) == INDIRECT_REF)
4111 x = build_simple_mem_ref (x);
4112 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4114 if (var == TREE_OPERAND (orig_var, 0))
4115 x = build_fold_addr_expr (x);
4117 bias = fold_convert (sizetype, bias);
4118 x = fold_convert (ptr_type_node, x);
4119 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4120 TREE_TYPE (x), x, bias);
4121 unsigned cnt = task_reduction_cnt - 1;
4122 if (!task_reduction_needs_orig_p)
4123 cnt += (task_reduction_cntorig_full
4124 - task_reduction_cntorig);
4125 else
4126 cnt = task_reduction_cntorig - 1;
4127 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4128 size_int (cnt), NULL_TREE, NULL_TREE);
4129 gimplify_assign (r, x, ilist);
4130 continue;
4133 if (TREE_CODE (orig_var) == INDIRECT_REF
4134 || TREE_CODE (orig_var) == ADDR_EXPR)
4135 orig_var = TREE_OPERAND (orig_var, 0);
4136 tree d = OMP_CLAUSE_DECL (c);
4137 tree type = TREE_TYPE (d);
4138 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4139 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4140 const char *name = get_name (orig_var);
4141 if (pass == 3)
4143 tree xv = create_tmp_var (ptr_type_node);
4144 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4146 unsigned cnt = task_reduction_cnt - 1;
4147 if (!task_reduction_needs_orig_p)
4148 cnt += (task_reduction_cntorig_full
4149 - task_reduction_cntorig);
4150 else
4151 cnt = task_reduction_cntorig - 1;
4152 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4153 size_int (cnt), NULL_TREE, NULL_TREE);
4155 gimple *g = gimple_build_assign (xv, x);
4156 gimple_seq_add_stmt (ilist, g);
4158 else
4160 unsigned int idx = *ctx->task_reduction_map->get (c);
4161 tree off;
4162 if (ctx->task_reductions[1 + idx])
4163 off = fold_convert (sizetype,
4164 ctx->task_reductions[1 + idx]);
4165 else
4166 off = task_reduction_read (ilist, tskred_temp, sizetype,
4167 7 + 3 * idx + 1);
4168 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4169 tskred_base, off);
4170 gimple_seq_add_stmt (ilist, g);
4172 x = fold_convert (build_pointer_type (boolean_type_node),
4173 xv);
4174 if (TREE_CONSTANT (v))
4175 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4176 TYPE_SIZE_UNIT (type));
4177 else
4179 tree t = maybe_lookup_decl (v, ctx);
4180 if (t)
4181 v = t;
4182 else
4183 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4184 gimplify_expr (&v, ilist, NULL, is_gimple_val,
4185 fb_rvalue);
4186 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4187 TREE_TYPE (v), v,
4188 build_int_cst (TREE_TYPE (v), 1));
4189 t = fold_build2_loc (clause_loc, MULT_EXPR,
4190 TREE_TYPE (v), t,
4191 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4192 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4194 cond = create_tmp_var (TREE_TYPE (x));
4195 gimplify_assign (cond, x, ilist);
4196 x = xv;
4198 else if (TREE_CONSTANT (v))
4200 x = create_tmp_var_raw (type, name);
4201 gimple_add_tmp_var (x);
4202 TREE_ADDRESSABLE (x) = 1;
4203 x = build_fold_addr_expr_loc (clause_loc, x);
4205 else
4207 tree atmp
4208 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4209 tree t = maybe_lookup_decl (v, ctx);
4210 if (t)
4211 v = t;
4212 else
4213 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4214 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4215 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4216 TREE_TYPE (v), v,
4217 build_int_cst (TREE_TYPE (v), 1));
4218 t = fold_build2_loc (clause_loc, MULT_EXPR,
4219 TREE_TYPE (v), t,
4220 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4221 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4222 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4225 tree ptype = build_pointer_type (TREE_TYPE (type));
4226 x = fold_convert_loc (clause_loc, ptype, x);
4227 tree y = create_tmp_var (ptype, name);
4228 gimplify_assign (y, x, ilist);
4229 x = y;
4230 tree yb = y;
4232 if (!integer_zerop (bias))
4234 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4235 bias);
4236 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4238 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4239 pointer_sized_int_node, yb, bias);
4240 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
4241 yb = create_tmp_var (ptype, name);
4242 gimplify_assign (yb, x, ilist);
4243 x = yb;
4246 d = TREE_OPERAND (d, 0);
4247 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4248 d = TREE_OPERAND (d, 0);
4249 if (TREE_CODE (d) == ADDR_EXPR)
4251 if (orig_var != var)
4253 gcc_assert (is_variable_sized (orig_var));
4254 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4256 gimplify_assign (new_var, x, ilist);
4257 tree new_orig_var = lookup_decl (orig_var, ctx);
4258 tree t = build_fold_indirect_ref (new_var);
4259 DECL_IGNORED_P (new_var) = 0;
4260 TREE_THIS_NOTRAP (t) = 1;
4261 SET_DECL_VALUE_EXPR (new_orig_var, t);
4262 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4264 else
4266 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4267 build_int_cst (ptype, 0));
4268 SET_DECL_VALUE_EXPR (new_var, x);
4269 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4272 else
4274 gcc_assert (orig_var == var);
4275 if (TREE_CODE (d) == INDIRECT_REF)
4277 x = create_tmp_var (ptype, name);
4278 TREE_ADDRESSABLE (x) = 1;
4279 gimplify_assign (x, yb, ilist);
4280 x = build_fold_addr_expr_loc (clause_loc, x);
4282 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4283 gimplify_assign (new_var, x, ilist);
4285 /* GOMP_taskgroup_reduction_register memsets the whole
4286 array to zero. If the initializer is zero, we don't
4287 need to initialize it again, just mark it as ever
4288 used unconditionally, i.e. cond = true. */
4289 if (cond
4290 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4291 && initializer_zerop (omp_reduction_init (c,
4292 TREE_TYPE (type))))
4294 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4295 boolean_true_node);
4296 gimple_seq_add_stmt (ilist, g);
4297 continue;
4299 tree end = create_artificial_label (UNKNOWN_LOCATION);
4300 if (cond)
4302 gimple *g;
4303 if (!is_parallel_ctx (ctx))
4305 tree condv = create_tmp_var (boolean_type_node);
4306 g = gimple_build_assign (condv,
4307 build_simple_mem_ref (cond));
4308 gimple_seq_add_stmt (ilist, g);
4309 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4310 g = gimple_build_cond (NE_EXPR, condv,
4311 boolean_false_node, end, lab1);
4312 gimple_seq_add_stmt (ilist, g);
4313 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4315 g = gimple_build_assign (build_simple_mem_ref (cond),
4316 boolean_true_node);
4317 gimple_seq_add_stmt (ilist, g);
4320 tree y1 = create_tmp_var (ptype);
4321 gimplify_assign (y1, y, ilist);
4322 tree i2 = NULL_TREE, y2 = NULL_TREE;
4323 tree body2 = NULL_TREE, end2 = NULL_TREE;
4324 tree y3 = NULL_TREE, y4 = NULL_TREE;
4325 if (task_reduction_needs_orig_p)
4327 y3 = create_tmp_var (ptype);
4328 tree ref;
4329 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4330 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4331 size_int (task_reduction_cnt_full
4332 + task_reduction_cntorig - 1),
4333 NULL_TREE, NULL_TREE);
4334 else
4336 unsigned int idx = *ctx->task_reduction_map->get (c);
4337 ref = task_reduction_read (ilist, tskred_temp, ptype,
4338 7 + 3 * idx);
4340 gimplify_assign (y3, ref, ilist);
4342 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4344 if (pass != 3)
4346 y2 = create_tmp_var (ptype);
4347 gimplify_assign (y2, y, ilist);
4349 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4351 tree ref = build_outer_var_ref (var, ctx);
4352 /* For ref build_outer_var_ref already performs this. */
4353 if (TREE_CODE (d) == INDIRECT_REF)
4354 gcc_assert (omp_is_reference (var));
4355 else if (TREE_CODE (d) == ADDR_EXPR)
4356 ref = build_fold_addr_expr (ref);
4357 else if (omp_is_reference (var))
4358 ref = build_fold_addr_expr (ref);
4359 ref = fold_convert_loc (clause_loc, ptype, ref);
4360 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4361 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4363 y3 = create_tmp_var (ptype);
4364 gimplify_assign (y3, unshare_expr (ref), ilist);
4366 if (is_simd)
4368 y4 = create_tmp_var (ptype);
4369 gimplify_assign (y4, ref, dlist);
4373 tree i = create_tmp_var (TREE_TYPE (v));
4374 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
4375 tree body = create_artificial_label (UNKNOWN_LOCATION);
4376 gimple_seq_add_stmt (ilist, gimple_build_label (body));
4377 if (y2)
4379 i2 = create_tmp_var (TREE_TYPE (v));
4380 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
4381 body2 = create_artificial_label (UNKNOWN_LOCATION);
4382 end2 = create_artificial_label (UNKNOWN_LOCATION);
4383 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
4385 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4387 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4388 tree decl_placeholder
4389 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
4390 SET_DECL_VALUE_EXPR (decl_placeholder,
4391 build_simple_mem_ref (y1));
4392 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4393 SET_DECL_VALUE_EXPR (placeholder,
4394 y3 ? build_simple_mem_ref (y3)
4395 : error_mark_node);
4396 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4397 x = lang_hooks.decls.omp_clause_default_ctor
4398 (c, build_simple_mem_ref (y1),
4399 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4400 if (x)
4401 gimplify_and_add (x, ilist);
4402 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4404 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4405 lower_omp (&tseq, ctx);
4406 gimple_seq_add_seq (ilist, tseq);
4408 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4409 if (is_simd)
4411 SET_DECL_VALUE_EXPR (decl_placeholder,
4412 build_simple_mem_ref (y2));
4413 SET_DECL_VALUE_EXPR (placeholder,
4414 build_simple_mem_ref (y4));
4415 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4416 lower_omp (&tseq, ctx);
4417 gimple_seq_add_seq (dlist, tseq);
4418 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4420 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4421 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
4422 if (y2)
4424 x = lang_hooks.decls.omp_clause_dtor
4425 (c, build_simple_mem_ref (y2));
4426 if (x)
4428 gimple_seq tseq = NULL;
4429 dtor = x;
4430 gimplify_stmt (&dtor, &tseq);
4431 gimple_seq_add_seq (dlist, tseq);
4435 else
4437 x = omp_reduction_init (c, TREE_TYPE (type));
4438 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4440 /* reduction(-:var) sums up the partial results, so it
4441 acts identically to reduction(+:var). */
4442 if (code == MINUS_EXPR)
4443 code = PLUS_EXPR;
4445 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4446 if (is_simd)
4448 x = build2 (code, TREE_TYPE (type),
4449 build_simple_mem_ref (y4),
4450 build_simple_mem_ref (y2));
4451 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4454 gimple *g
4455 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4456 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4457 gimple_seq_add_stmt (ilist, g);
4458 if (y3)
4460 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4461 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4462 gimple_seq_add_stmt (ilist, g);
4464 g = gimple_build_assign (i, PLUS_EXPR, i,
4465 build_int_cst (TREE_TYPE (i), 1));
4466 gimple_seq_add_stmt (ilist, g);
4467 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4468 gimple_seq_add_stmt (ilist, g);
4469 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4470 if (y2)
4472 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4473 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4474 gimple_seq_add_stmt (dlist, g);
4475 if (y4)
4477 g = gimple_build_assign
4478 (y4, POINTER_PLUS_EXPR, y4,
4479 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4480 gimple_seq_add_stmt (dlist, g);
4482 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4483 build_int_cst (TREE_TYPE (i2), 1));
4484 gimple_seq_add_stmt (dlist, g);
4485 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4486 gimple_seq_add_stmt (dlist, g);
4487 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4489 continue;
4491 else if (pass == 2)
4493 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4494 x = var;
4495 else
4497 bool by_ref = use_pointer_for_field (var, ctx);
4498 x = build_receiver_ref (var, by_ref, ctx);
4500 if (!omp_is_reference (var))
4501 x = build_fold_addr_expr (x);
4502 x = fold_convert (ptr_type_node, x);
4503 unsigned cnt = task_reduction_cnt - 1;
4504 if (!task_reduction_needs_orig_p)
4505 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
4506 else
4507 cnt = task_reduction_cntorig - 1;
4508 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4509 size_int (cnt), NULL_TREE, NULL_TREE);
4510 gimplify_assign (r, x, ilist);
4511 continue;
4513 else if (pass == 3)
4515 tree type = TREE_TYPE (new_var);
4516 if (!omp_is_reference (var))
4517 type = build_pointer_type (type);
4518 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4520 unsigned cnt = task_reduction_cnt - 1;
4521 if (!task_reduction_needs_orig_p)
4522 cnt += (task_reduction_cntorig_full
4523 - task_reduction_cntorig);
4524 else
4525 cnt = task_reduction_cntorig - 1;
4526 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4527 size_int (cnt), NULL_TREE, NULL_TREE);
4529 else
4531 unsigned int idx = *ctx->task_reduction_map->get (c);
4532 tree off;
4533 if (ctx->task_reductions[1 + idx])
4534 off = fold_convert (sizetype,
4535 ctx->task_reductions[1 + idx]);
4536 else
4537 off = task_reduction_read (ilist, tskred_temp, sizetype,
4538 7 + 3 * idx + 1);
4539 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
4540 tskred_base, off);
4542 x = fold_convert (type, x);
4543 tree t;
4544 if (omp_is_reference (var))
4546 gimplify_assign (new_var, x, ilist);
4547 t = new_var;
4548 new_var = build_simple_mem_ref (new_var);
4550 else
4552 t = create_tmp_var (type);
4553 gimplify_assign (t, x, ilist);
4554 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
4555 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4557 t = fold_convert (build_pointer_type (boolean_type_node), t);
4558 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
4559 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4560 cond = create_tmp_var (TREE_TYPE (t));
4561 gimplify_assign (cond, t, ilist);
4563 else if (is_variable_sized (var))
4565 /* For variable sized types, we need to allocate the
4566 actual storage here. Call alloca and store the
4567 result in the pointer decl that we created elsewhere. */
4568 if (pass == 0)
4569 continue;
4571 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4573 gcall *stmt;
4574 tree tmp, atmp;
4576 ptr = DECL_VALUE_EXPR (new_var);
4577 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4578 ptr = TREE_OPERAND (ptr, 0);
4579 gcc_assert (DECL_P (ptr));
4580 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4582 /* void *tmp = __builtin_alloca */
4583 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4584 stmt = gimple_build_call (atmp, 2, x,
4585 size_int (DECL_ALIGN (var)));
4586 tmp = create_tmp_var_raw (ptr_type_node);
4587 gimple_add_tmp_var (tmp);
4588 gimple_call_set_lhs (stmt, tmp);
4590 gimple_seq_add_stmt (ilist, stmt);
4592 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4593 gimplify_assign (ptr, x, ilist);
4596 else if (omp_is_reference (var)
4597 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
4598 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
4600 /* For references that are being privatized for Fortran,
4601 allocate new backing storage for the new pointer
4602 variable. This allows us to avoid changing all the
4603 code that expects a pointer to something that expects
4604 a direct variable. */
4605 if (pass == 0)
4606 continue;
4608 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4609 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4611 x = build_receiver_ref (var, false, ctx);
4612 x = build_fold_addr_expr_loc (clause_loc, x);
4614 else if (TREE_CONSTANT (x))
4616 /* For reduction in SIMD loop, defer adding the
4617 initialization of the reference, because if we decide
4618 to use SIMD array for it, the initilization could cause
4619 expansion ICE. */
4620 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4621 x = NULL_TREE;
4622 else
4624 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4625 get_name (var));
4626 gimple_add_tmp_var (x);
4627 TREE_ADDRESSABLE (x) = 1;
4628 x = build_fold_addr_expr_loc (clause_loc, x);
4631 else
4633 tree atmp
4634 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4635 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4636 tree al = size_int (TYPE_ALIGN (rtype));
4637 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4640 if (x)
4642 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4643 gimplify_assign (new_var, x, ilist);
4646 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4648 else if ((c_kind == OMP_CLAUSE_REDUCTION
4649 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4650 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4652 if (pass == 0)
4653 continue;
4655 else if (pass != 0)
4656 continue;
4658 switch (OMP_CLAUSE_CODE (c))
4660 case OMP_CLAUSE_SHARED:
4661 /* Ignore shared directives in teams construct inside
4662 target construct. */
4663 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4664 && !is_host_teams_ctx (ctx))
4665 continue;
4666 /* Shared global vars are just accessed directly. */
4667 if (is_global_var (new_var))
4668 break;
4669 /* For taskloop firstprivate/lastprivate, represented
4670 as firstprivate and shared clause on the task, new_var
4671 is the firstprivate var. */
4672 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4673 break;
4674 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4675 needs to be delayed until after fixup_child_record_type so
4676 that we get the correct type during the dereference. */
4677 by_ref = use_pointer_for_field (var, ctx);
4678 x = build_receiver_ref (var, by_ref, ctx);
4679 SET_DECL_VALUE_EXPR (new_var, x);
4680 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4682 /* ??? If VAR is not passed by reference, and the variable
4683 hasn't been initialized yet, then we'll get a warning for
4684 the store into the omp_data_s structure. Ideally, we'd be
4685 able to notice this and not store anything at all, but
4686 we're generating code too early. Suppress the warning. */
4687 if (!by_ref)
4688 TREE_NO_WARNING (var) = 1;
4689 break;
4691 case OMP_CLAUSE_LASTPRIVATE:
4692 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4693 break;
4694 /* FALLTHRU */
4696 case OMP_CLAUSE_PRIVATE:
4697 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4698 x = build_outer_var_ref (var, ctx);
4699 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4701 if (is_task_ctx (ctx))
4702 x = build_receiver_ref (var, false, ctx);
4703 else
4704 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4706 else
4707 x = NULL;
4708 do_private:
4709 tree nx;
4710 nx = lang_hooks.decls.omp_clause_default_ctor
4711 (c, unshare_expr (new_var), x);
4712 if (is_simd)
4714 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4715 if ((TREE_ADDRESSABLE (new_var) || nx || y
4716 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4717 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4718 ivar, lvar))
4720 if (nx)
4721 x = lang_hooks.decls.omp_clause_default_ctor
4722 (c, unshare_expr (ivar), x);
4723 if (nx && x)
4724 gimplify_and_add (x, &llist[0]);
4725 if (y)
4727 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4728 if (y)
4730 gimple_seq tseq = NULL;
4732 dtor = y;
4733 gimplify_stmt (&dtor, &tseq);
4734 gimple_seq_add_seq (&llist[1], tseq);
4737 break;
4740 if (nx)
4741 gimplify_and_add (nx, ilist);
4742 /* FALLTHRU */
4744 do_dtor:
4745 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4746 if (x)
4748 gimple_seq tseq = NULL;
4750 dtor = x;
4751 gimplify_stmt (&dtor, &tseq);
4752 gimple_seq_add_seq (dlist, tseq);
4754 break;
4756 case OMP_CLAUSE_LINEAR:
4757 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4758 goto do_firstprivate;
4759 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4760 x = NULL;
4761 else
4762 x = build_outer_var_ref (var, ctx);
4763 goto do_private;
4765 case OMP_CLAUSE_FIRSTPRIVATE:
4766 if (is_task_ctx (ctx))
4768 if ((omp_is_reference (var)
4769 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
4770 || is_variable_sized (var))
4771 goto do_dtor;
4772 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4773 ctx))
4774 || use_pointer_for_field (var, NULL))
4776 x = build_receiver_ref (var, false, ctx);
4777 SET_DECL_VALUE_EXPR (new_var, x);
4778 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4779 goto do_dtor;
4782 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
4783 && omp_is_reference (var))
4785 x = build_outer_var_ref (var, ctx);
4786 gcc_assert (TREE_CODE (x) == MEM_REF
4787 && integer_zerop (TREE_OPERAND (x, 1)));
4788 x = TREE_OPERAND (x, 0);
4789 x = lang_hooks.decls.omp_clause_copy_ctor
4790 (c, unshare_expr (new_var), x);
4791 gimplify_and_add (x, ilist);
4792 goto do_dtor;
4794 do_firstprivate:
4795 x = build_outer_var_ref (var, ctx);
4796 if (is_simd)
4798 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4799 && gimple_omp_for_combined_into_p (ctx->stmt))
4801 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4802 tree stept = TREE_TYPE (t);
4803 tree ct = omp_find_clause (clauses,
4804 OMP_CLAUSE__LOOPTEMP_);
4805 gcc_assert (ct);
4806 tree l = OMP_CLAUSE_DECL (ct);
4807 tree n1 = fd->loop.n1;
4808 tree step = fd->loop.step;
4809 tree itype = TREE_TYPE (l);
4810 if (POINTER_TYPE_P (itype))
4811 itype = signed_type_for (itype);
4812 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4813 if (TYPE_UNSIGNED (itype)
4814 && fd->loop.cond_code == GT_EXPR)
4815 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4816 fold_build1 (NEGATE_EXPR, itype, l),
4817 fold_build1 (NEGATE_EXPR,
4818 itype, step));
4819 else
4820 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4821 t = fold_build2 (MULT_EXPR, stept,
4822 fold_convert (stept, l), t);
4824 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4826 x = lang_hooks.decls.omp_clause_linear_ctor
4827 (c, new_var, x, t);
4828 gimplify_and_add (x, ilist);
4829 goto do_dtor;
4832 if (POINTER_TYPE_P (TREE_TYPE (x)))
4833 x = fold_build2 (POINTER_PLUS_EXPR,
4834 TREE_TYPE (x), x, t);
4835 else
4836 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4839 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4840 || TREE_ADDRESSABLE (new_var))
4841 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4842 ivar, lvar))
4844 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4846 tree iv = create_tmp_var (TREE_TYPE (new_var));
4847 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4848 gimplify_and_add (x, ilist);
4849 gimple_stmt_iterator gsi
4850 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4851 gassign *g
4852 = gimple_build_assign (unshare_expr (lvar), iv);
4853 gsi_insert_before_without_update (&gsi, g,
4854 GSI_SAME_STMT);
4855 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4856 enum tree_code code = PLUS_EXPR;
4857 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4858 code = POINTER_PLUS_EXPR;
4859 g = gimple_build_assign (iv, code, iv, t);
4860 gsi_insert_before_without_update (&gsi, g,
4861 GSI_SAME_STMT);
4862 break;
4864 x = lang_hooks.decls.omp_clause_copy_ctor
4865 (c, unshare_expr (ivar), x);
4866 gimplify_and_add (x, &llist[0]);
4867 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4868 if (x)
4870 gimple_seq tseq = NULL;
4872 dtor = x;
4873 gimplify_stmt (&dtor, &tseq);
4874 gimple_seq_add_seq (&llist[1], tseq);
4876 break;
4879 x = lang_hooks.decls.omp_clause_copy_ctor
4880 (c, unshare_expr (new_var), x);
4881 gimplify_and_add (x, ilist);
4882 goto do_dtor;
4884 case OMP_CLAUSE__LOOPTEMP_:
4885 case OMP_CLAUSE__REDUCTEMP_:
4886 gcc_assert (is_taskreg_ctx (ctx));
4887 x = build_outer_var_ref (var, ctx);
4888 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4889 gimplify_and_add (x, ilist);
4890 break;
4892 case OMP_CLAUSE_COPYIN:
4893 by_ref = use_pointer_for_field (var, NULL);
4894 x = build_receiver_ref (var, by_ref, ctx);
4895 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4896 append_to_statement_list (x, &copyin_seq);
4897 copyin_by_ref |= by_ref;
4898 break;
4900 case OMP_CLAUSE_REDUCTION:
4901 case OMP_CLAUSE_IN_REDUCTION:
4902 /* OpenACC reductions are initialized using the
4903 GOACC_REDUCTION internal function. */
4904 if (is_gimple_omp_oacc (ctx->stmt))
4905 break;
4906 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4908 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4909 gimple *tseq;
4910 tree ptype = TREE_TYPE (placeholder);
4911 if (cond)
4913 x = error_mark_node;
4914 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
4915 && !task_reduction_needs_orig_p)
4916 x = var;
4917 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4919 tree pptype = build_pointer_type (ptype);
4920 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4921 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4922 size_int (task_reduction_cnt_full
4923 + task_reduction_cntorig - 1),
4924 NULL_TREE, NULL_TREE);
4925 else
4927 unsigned int idx
4928 = *ctx->task_reduction_map->get (c);
4929 x = task_reduction_read (ilist, tskred_temp,
4930 pptype, 7 + 3 * idx);
4932 x = fold_convert (pptype, x);
4933 x = build_simple_mem_ref (x);
4936 else
4938 x = build_outer_var_ref (var, ctx);
4940 if (omp_is_reference (var)
4941 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
4942 x = build_fold_addr_expr_loc (clause_loc, x);
4944 SET_DECL_VALUE_EXPR (placeholder, x);
4945 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4946 tree new_vard = new_var;
4947 if (omp_is_reference (var))
4949 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4950 new_vard = TREE_OPERAND (new_var, 0);
4951 gcc_assert (DECL_P (new_vard));
4953 if (is_simd
4954 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4955 ivar, lvar))
4957 if (new_vard == new_var)
4959 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4960 SET_DECL_VALUE_EXPR (new_var, ivar);
4962 else
4964 SET_DECL_VALUE_EXPR (new_vard,
4965 build_fold_addr_expr (ivar));
4966 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4968 x = lang_hooks.decls.omp_clause_default_ctor
4969 (c, unshare_expr (ivar),
4970 build_outer_var_ref (var, ctx));
4971 if (x)
4972 gimplify_and_add (x, &llist[0]);
4973 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4975 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4976 lower_omp (&tseq, ctx);
4977 gimple_seq_add_seq (&llist[0], tseq);
4979 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4980 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4981 lower_omp (&tseq, ctx);
4982 gimple_seq_add_seq (&llist[1], tseq);
4983 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4984 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4985 if (new_vard == new_var)
4986 SET_DECL_VALUE_EXPR (new_var, lvar);
4987 else
4988 SET_DECL_VALUE_EXPR (new_vard,
4989 build_fold_addr_expr (lvar));
4990 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4991 if (x)
4993 tseq = NULL;
4994 dtor = x;
4995 gimplify_stmt (&dtor, &tseq);
4996 gimple_seq_add_seq (&llist[1], tseq);
4998 break;
5000 /* If this is a reference to constant size reduction var
5001 with placeholder, we haven't emitted the initializer
5002 for it because it is undesirable if SIMD arrays are used.
5003 But if they aren't used, we need to emit the deferred
5004 initialization now. */
5005 else if (omp_is_reference (var) && is_simd)
5006 handle_simd_reference (clause_loc, new_vard, ilist);
5008 tree lab2 = NULL_TREE;
5009 if (cond)
5011 gimple *g;
5012 if (!is_parallel_ctx (ctx))
5014 tree condv = create_tmp_var (boolean_type_node);
5015 tree m = build_simple_mem_ref (cond);
5016 g = gimple_build_assign (condv, m);
5017 gimple_seq_add_stmt (ilist, g);
5018 tree lab1
5019 = create_artificial_label (UNKNOWN_LOCATION);
5020 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5021 g = gimple_build_cond (NE_EXPR, condv,
5022 boolean_false_node,
5023 lab2, lab1);
5024 gimple_seq_add_stmt (ilist, g);
5025 gimple_seq_add_stmt (ilist,
5026 gimple_build_label (lab1));
5028 g = gimple_build_assign (build_simple_mem_ref (cond),
5029 boolean_true_node);
5030 gimple_seq_add_stmt (ilist, g);
5032 x = lang_hooks.decls.omp_clause_default_ctor
5033 (c, unshare_expr (new_var),
5034 cond ? NULL_TREE
5035 : build_outer_var_ref (var, ctx));
5036 if (x)
5037 gimplify_and_add (x, ilist);
5038 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5040 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5041 lower_omp (&tseq, ctx);
5042 gimple_seq_add_seq (ilist, tseq);
5044 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5045 if (is_simd)
5047 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5048 lower_omp (&tseq, ctx);
5049 gimple_seq_add_seq (dlist, tseq);
5050 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5052 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5053 if (cond)
5055 if (lab2)
5056 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5057 break;
5059 goto do_dtor;
5061 else
5063 x = omp_reduction_init (c, TREE_TYPE (new_var));
5064 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
5065 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5067 if (cond)
5069 gimple *g;
5070 tree lab2 = NULL_TREE;
5071 /* GOMP_taskgroup_reduction_register memsets the whole
5072 array to zero. If the initializer is zero, we don't
5073 need to initialize it again, just mark it as ever
5074 used unconditionally, i.e. cond = true. */
5075 if (initializer_zerop (x))
5077 g = gimple_build_assign (build_simple_mem_ref (cond),
5078 boolean_true_node);
5079 gimple_seq_add_stmt (ilist, g);
5080 break;
5083 /* Otherwise, emit
5084 if (!cond) { cond = true; new_var = x; } */
5085 if (!is_parallel_ctx (ctx))
5087 tree condv = create_tmp_var (boolean_type_node);
5088 tree m = build_simple_mem_ref (cond);
5089 g = gimple_build_assign (condv, m);
5090 gimple_seq_add_stmt (ilist, g);
5091 tree lab1
5092 = create_artificial_label (UNKNOWN_LOCATION);
5093 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5094 g = gimple_build_cond (NE_EXPR, condv,
5095 boolean_false_node,
5096 lab2, lab1);
5097 gimple_seq_add_stmt (ilist, g);
5098 gimple_seq_add_stmt (ilist,
5099 gimple_build_label (lab1));
5101 g = gimple_build_assign (build_simple_mem_ref (cond),
5102 boolean_true_node);
5103 gimple_seq_add_stmt (ilist, g);
5104 gimplify_assign (new_var, x, ilist);
5105 if (lab2)
5106 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5107 break;
5110 /* reduction(-:var) sums up the partial results, so it
5111 acts identically to reduction(+:var). */
5112 if (code == MINUS_EXPR)
5113 code = PLUS_EXPR;
5115 tree new_vard = new_var;
5116 if (is_simd && omp_is_reference (var))
5118 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5119 new_vard = TREE_OPERAND (new_var, 0);
5120 gcc_assert (DECL_P (new_vard));
5122 if (is_simd
5123 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5124 ivar, lvar))
5126 tree ref = build_outer_var_ref (var, ctx);
5128 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
5130 if (sctx.is_simt)
5132 if (!simt_lane)
5133 simt_lane = create_tmp_var (unsigned_type_node);
5134 x = build_call_expr_internal_loc
5135 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
5136 TREE_TYPE (ivar), 2, ivar, simt_lane);
5137 x = build2 (code, TREE_TYPE (ivar), ivar, x);
5138 gimplify_assign (ivar, x, &llist[2]);
5140 x = build2 (code, TREE_TYPE (ref), ref, ivar);
5141 ref = build_outer_var_ref (var, ctx);
5142 gimplify_assign (ref, x, &llist[1]);
5144 if (new_vard != new_var)
5146 SET_DECL_VALUE_EXPR (new_vard,
5147 build_fold_addr_expr (lvar));
5148 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5151 else
5153 if (omp_is_reference (var) && is_simd)
5154 handle_simd_reference (clause_loc, new_vard, ilist);
5155 gimplify_assign (new_var, x, ilist);
5156 if (is_simd)
5158 tree ref = build_outer_var_ref (var, ctx);
5160 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5161 ref = build_outer_var_ref (var, ctx);
5162 gimplify_assign (ref, x, dlist);
5166 break;
5168 default:
5169 gcc_unreachable ();
5173 if (tskred_avar)
5175 tree clobber = build_constructor (TREE_TYPE (tskred_avar), NULL);
5176 TREE_THIS_VOLATILE (clobber) = 1;
5177 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
5180 if (known_eq (sctx.max_vf, 1U))
5181 sctx.is_simt = false;
5183 if (sctx.lane || sctx.is_simt)
5185 uid = create_tmp_var (ptr_type_node, "simduid");
5186 /* Don't want uninit warnings on simduid, it is always uninitialized,
5187 but we use it not for the value, but for the DECL_UID only. */
5188 TREE_NO_WARNING (uid) = 1;
5189 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
5190 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
5191 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5192 gimple_omp_for_set_clauses (ctx->stmt, c);
5194 /* Emit calls denoting privatized variables and initializing a pointer to
5195 structure that holds private variables as fields after ompdevlow pass. */
5196 if (sctx.is_simt)
5198 sctx.simt_eargs[0] = uid;
5199 gimple *g
5200 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
5201 gimple_call_set_lhs (g, uid);
5202 gimple_seq_add_stmt (ilist, g);
5203 sctx.simt_eargs.release ();
5205 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
5206 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
5207 gimple_call_set_lhs (g, simtrec);
5208 gimple_seq_add_stmt (ilist, g);
5210 if (sctx.lane)
5212 gimple *g
5213 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
5214 gimple_call_set_lhs (g, sctx.lane);
5215 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5216 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
5217 g = gimple_build_assign (sctx.lane, INTEGER_CST,
5218 build_int_cst (unsigned_type_node, 0));
5219 gimple_seq_add_stmt (ilist, g);
5220 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
5221 if (llist[2])
5223 tree simt_vf = create_tmp_var (unsigned_type_node);
5224 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
5225 gimple_call_set_lhs (g, simt_vf);
5226 gimple_seq_add_stmt (dlist, g);
5228 tree t = build_int_cst (unsigned_type_node, 1);
5229 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
5230 gimple_seq_add_stmt (dlist, g);
5232 t = build_int_cst (unsigned_type_node, 0);
5233 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
5234 gimple_seq_add_stmt (dlist, g);
5236 tree body = create_artificial_label (UNKNOWN_LOCATION);
5237 tree header = create_artificial_label (UNKNOWN_LOCATION);
5238 tree end = create_artificial_label (UNKNOWN_LOCATION);
5239 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
5240 gimple_seq_add_stmt (dlist, gimple_build_label (body));
5242 gimple_seq_add_seq (dlist, llist[2]);
5244 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
5245 gimple_seq_add_stmt (dlist, g);
5247 gimple_seq_add_stmt (dlist, gimple_build_label (header));
5248 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
5249 gimple_seq_add_stmt (dlist, g);
5251 gimple_seq_add_stmt (dlist, gimple_build_label (end));
5253 for (int i = 0; i < 2; i++)
5254 if (llist[i])
5256 tree vf = create_tmp_var (unsigned_type_node);
5257 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
5258 gimple_call_set_lhs (g, vf);
5259 gimple_seq *seq = i == 0 ? ilist : dlist;
5260 gimple_seq_add_stmt (seq, g);
5261 tree t = build_int_cst (unsigned_type_node, 0);
5262 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
5263 gimple_seq_add_stmt (seq, g);
5264 tree body = create_artificial_label (UNKNOWN_LOCATION);
5265 tree header = create_artificial_label (UNKNOWN_LOCATION);
5266 tree end = create_artificial_label (UNKNOWN_LOCATION);
5267 gimple_seq_add_stmt (seq, gimple_build_goto (header));
5268 gimple_seq_add_stmt (seq, gimple_build_label (body));
5269 gimple_seq_add_seq (seq, llist[i]);
5270 t = build_int_cst (unsigned_type_node, 1);
5271 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
5272 gimple_seq_add_stmt (seq, g);
5273 gimple_seq_add_stmt (seq, gimple_build_label (header));
5274 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
5275 gimple_seq_add_stmt (seq, g);
5276 gimple_seq_add_stmt (seq, gimple_build_label (end));
5279 if (sctx.is_simt)
5281 gimple_seq_add_seq (dlist, sctx.simt_dlist);
5282 gimple *g
5283 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
5284 gimple_seq_add_stmt (dlist, g);
5287 /* The copyin sequence is not to be executed by the main thread, since
5288 that would result in self-copies. Perhaps not visible to scalars,
5289 but it certainly is to C++ operator=. */
5290 if (copyin_seq)
5292 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
5294 x = build2 (NE_EXPR, boolean_type_node, x,
5295 build_int_cst (TREE_TYPE (x), 0));
5296 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
5297 gimplify_and_add (x, ilist);
5300 /* If any copyin variable is passed by reference, we must ensure the
5301 master thread doesn't modify it before it is copied over in all
5302 threads. Similarly for variables in both firstprivate and
5303 lastprivate clauses we need to ensure the lastprivate copying
5304 happens after firstprivate copying in all threads. And similarly
5305 for UDRs if initializer expression refers to omp_orig. */
5306 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
5308 /* Don't add any barrier for #pragma omp simd or
5309 #pragma omp distribute. */
5310 if (!is_task_ctx (ctx)
5311 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
5312 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
5313 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
5316 /* If max_vf is non-zero, then we can use only a vectorization factor
5317 up to the max_vf we chose. So stick it into the safelen clause. */
5318 if (maybe_ne (sctx.max_vf, 0U))
5320 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
5321 OMP_CLAUSE_SAFELEN);
5322 poly_uint64 safe_len;
5323 if (c == NULL_TREE
5324 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
5325 && maybe_gt (safe_len, sctx.max_vf)))
5327 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
5328 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
5329 sctx.max_vf);
5330 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5331 gimple_omp_for_set_clauses (ctx->stmt, c);
5337 /* Generate code to implement the LASTPRIVATE clauses. This is used for
5338 both parallel and workshare constructs. PREDICATE may be NULL if it's
5339 always true. */
5341 static void
5342 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
5343 omp_context *ctx)
5345 tree x, c, label = NULL, orig_clauses = clauses;
5346 bool par_clauses = false;
5347 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
5349 /* Early exit if there are no lastprivate or linear clauses. */
5350 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
5351 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
5352 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
5353 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
5354 break;
5355 if (clauses == NULL)
5357 /* If this was a workshare clause, see if it had been combined
5358 with its parallel. In that case, look for the clauses on the
5359 parallel statement itself. */
5360 if (is_parallel_ctx (ctx))
5361 return;
5363 ctx = ctx->outer;
5364 if (ctx == NULL || !is_parallel_ctx (ctx))
5365 return;
5367 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
5368 OMP_CLAUSE_LASTPRIVATE);
5369 if (clauses == NULL)
5370 return;
5371 par_clauses = true;
5374 bool maybe_simt = false;
5375 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5376 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5378 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
5379 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
5380 if (simduid)
5381 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
5384 if (predicate)
5386 gcond *stmt;
5387 tree label_true, arm1, arm2;
5388 enum tree_code pred_code = TREE_CODE (predicate);
5390 label = create_artificial_label (UNKNOWN_LOCATION);
5391 label_true = create_artificial_label (UNKNOWN_LOCATION);
5392 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
5394 arm1 = TREE_OPERAND (predicate, 0);
5395 arm2 = TREE_OPERAND (predicate, 1);
5396 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
5397 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
5399 else
5401 arm1 = predicate;
5402 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
5403 arm2 = boolean_false_node;
5404 pred_code = NE_EXPR;
5406 if (maybe_simt)
5408 c = build2 (pred_code, boolean_type_node, arm1, arm2);
5409 c = fold_convert (integer_type_node, c);
5410 simtcond = create_tmp_var (integer_type_node);
5411 gimplify_assign (simtcond, c, stmt_list);
5412 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
5413 1, simtcond);
5414 c = create_tmp_var (integer_type_node);
5415 gimple_call_set_lhs (g, c);
5416 gimple_seq_add_stmt (stmt_list, g);
5417 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
5418 label_true, label);
5420 else
5421 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
5422 gimple_seq_add_stmt (stmt_list, stmt);
5423 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
5426 for (c = clauses; c ;)
5428 tree var, new_var;
5429 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5431 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5432 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5433 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
5435 var = OMP_CLAUSE_DECL (c);
5436 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5437 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
5438 && is_taskloop_ctx (ctx))
5440 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
5441 new_var = lookup_decl (var, ctx->outer);
5443 else
5445 new_var = lookup_decl (var, ctx);
5446 /* Avoid uninitialized warnings for lastprivate and
5447 for linear iterators. */
5448 if (predicate
5449 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5450 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
5451 TREE_NO_WARNING (new_var) = 1;
5454 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
5456 tree val = DECL_VALUE_EXPR (new_var);
5457 if (TREE_CODE (val) == ARRAY_REF
5458 && VAR_P (TREE_OPERAND (val, 0))
5459 && lookup_attribute ("omp simd array",
5460 DECL_ATTRIBUTES (TREE_OPERAND (val,
5461 0))))
5463 if (lastlane == NULL)
5465 lastlane = create_tmp_var (unsigned_type_node);
5466 gcall *g
5467 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
5468 2, simduid,
5469 TREE_OPERAND (val, 1));
5470 gimple_call_set_lhs (g, lastlane);
5471 gimple_seq_add_stmt (stmt_list, g);
5473 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
5474 TREE_OPERAND (val, 0), lastlane,
5475 NULL_TREE, NULL_TREE);
5478 else if (maybe_simt)
5480 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
5481 ? DECL_VALUE_EXPR (new_var)
5482 : new_var);
5483 if (simtlast == NULL)
5485 simtlast = create_tmp_var (unsigned_type_node);
5486 gcall *g = gimple_build_call_internal
5487 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
5488 gimple_call_set_lhs (g, simtlast);
5489 gimple_seq_add_stmt (stmt_list, g);
5491 x = build_call_expr_internal_loc
5492 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
5493 TREE_TYPE (val), 2, val, simtlast);
5494 new_var = unshare_expr (new_var);
5495 gimplify_assign (new_var, x, stmt_list);
5496 new_var = unshare_expr (new_var);
5499 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5500 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
5502 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
5503 gimple_seq_add_seq (stmt_list,
5504 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
5505 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
5507 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5508 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
5510 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
5511 gimple_seq_add_seq (stmt_list,
5512 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
5513 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
5516 x = NULL_TREE;
5517 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5518 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
5520 gcc_checking_assert (is_taskloop_ctx (ctx));
5521 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
5522 ctx->outer->outer);
5523 if (is_global_var (ovar))
5524 x = ovar;
5526 if (!x)
5527 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
5528 if (omp_is_reference (var))
5529 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5530 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
5531 gimplify_and_add (x, stmt_list);
5533 c = OMP_CLAUSE_CHAIN (c);
5534 if (c == NULL && !par_clauses)
5536 /* If this was a workshare clause, see if it had been combined
5537 with its parallel. In that case, continue looking for the
5538 clauses also on the parallel statement itself. */
5539 if (is_parallel_ctx (ctx))
5540 break;
5542 ctx = ctx->outer;
5543 if (ctx == NULL || !is_parallel_ctx (ctx))
5544 break;
5546 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
5547 OMP_CLAUSE_LASTPRIVATE);
5548 par_clauses = true;
5552 if (label)
5553 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
5556 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
5557 (which might be a placeholder). INNER is true if this is an inner
5558 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
5559 join markers. Generate the before-loop forking sequence in
5560 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
5561 general form of these sequences is
5563 GOACC_REDUCTION_SETUP
5564 GOACC_FORK
5565 GOACC_REDUCTION_INIT
5567 GOACC_REDUCTION_FINI
5568 GOACC_JOIN
5569 GOACC_REDUCTION_TEARDOWN. */
5571 static void
5572 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
5573 gcall *fork, gcall *join, gimple_seq *fork_seq,
5574 gimple_seq *join_seq, omp_context *ctx)
5576 gimple_seq before_fork = NULL;
5577 gimple_seq after_fork = NULL;
5578 gimple_seq before_join = NULL;
5579 gimple_seq after_join = NULL;
5580 tree init_code = NULL_TREE, fini_code = NULL_TREE,
5581 setup_code = NULL_TREE, teardown_code = NULL_TREE;
5582 unsigned offset = 0;
5584 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5585 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5587 tree orig = OMP_CLAUSE_DECL (c);
5588 tree var = maybe_lookup_decl (orig, ctx);
5589 tree ref_to_res = NULL_TREE;
5590 tree incoming, outgoing, v1, v2, v3;
5591 bool is_private = false;
5593 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
5594 if (rcode == MINUS_EXPR)
5595 rcode = PLUS_EXPR;
5596 else if (rcode == TRUTH_ANDIF_EXPR)
5597 rcode = BIT_AND_EXPR;
5598 else if (rcode == TRUTH_ORIF_EXPR)
5599 rcode = BIT_IOR_EXPR;
5600 tree op = build_int_cst (unsigned_type_node, rcode);
5602 if (!var)
5603 var = orig;
5605 incoming = outgoing = var;
5607 if (!inner)
5609 /* See if an outer construct also reduces this variable. */
5610 omp_context *outer = ctx;
5612 while (omp_context *probe = outer->outer)
5614 enum gimple_code type = gimple_code (probe->stmt);
5615 tree cls;
5617 switch (type)
5619 case GIMPLE_OMP_FOR:
5620 cls = gimple_omp_for_clauses (probe->stmt);
5621 break;
5623 case GIMPLE_OMP_TARGET:
5624 if (gimple_omp_target_kind (probe->stmt)
5625 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
5626 goto do_lookup;
5628 cls = gimple_omp_target_clauses (probe->stmt);
5629 break;
5631 default:
5632 goto do_lookup;
5635 outer = probe;
5636 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
5637 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
5638 && orig == OMP_CLAUSE_DECL (cls))
5640 incoming = outgoing = lookup_decl (orig, probe);
5641 goto has_outer_reduction;
5643 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
5644 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
5645 && orig == OMP_CLAUSE_DECL (cls))
5647 is_private = true;
5648 goto do_lookup;
5652 do_lookup:
5653 /* This is the outermost construct with this reduction,
5654 see if there's a mapping for it. */
5655 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
5656 && maybe_lookup_field (orig, outer) && !is_private)
5658 ref_to_res = build_receiver_ref (orig, false, outer);
5659 if (omp_is_reference (orig))
5660 ref_to_res = build_simple_mem_ref (ref_to_res);
5662 tree type = TREE_TYPE (var);
5663 if (POINTER_TYPE_P (type))
5664 type = TREE_TYPE (type);
5666 outgoing = var;
5667 incoming = omp_reduction_init_op (loc, rcode, type);
5669 else
5671 /* Try to look at enclosing contexts for reduction var,
5672 use original if no mapping found. */
5673 tree t = NULL_TREE;
5674 omp_context *c = ctx->outer;
5675 while (c && !t)
5677 t = maybe_lookup_decl (orig, c);
5678 c = c->outer;
5680 incoming = outgoing = (t ? t : orig);
5683 has_outer_reduction:;
5686 if (!ref_to_res)
5687 ref_to_res = integer_zero_node;
5689 if (omp_is_reference (orig))
5691 tree type = TREE_TYPE (var);
5692 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
5694 if (!inner)
5696 tree x = create_tmp_var (TREE_TYPE (type), id);
5697 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
5700 v1 = create_tmp_var (type, id);
5701 v2 = create_tmp_var (type, id);
5702 v3 = create_tmp_var (type, id);
5704 gimplify_assign (v1, var, fork_seq);
5705 gimplify_assign (v2, var, fork_seq);
5706 gimplify_assign (v3, var, fork_seq);
5708 var = build_simple_mem_ref (var);
5709 v1 = build_simple_mem_ref (v1);
5710 v2 = build_simple_mem_ref (v2);
5711 v3 = build_simple_mem_ref (v3);
5712 outgoing = build_simple_mem_ref (outgoing);
5714 if (!TREE_CONSTANT (incoming))
5715 incoming = build_simple_mem_ref (incoming);
5717 else
5718 v1 = v2 = v3 = var;
5720 /* Determine position in reduction buffer, which may be used
5721 by target. The parser has ensured that this is not a
5722 variable-sized type. */
5723 fixed_size_mode mode
5724 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
5725 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
5726 offset = (offset + align - 1) & ~(align - 1);
5727 tree off = build_int_cst (sizetype, offset);
5728 offset += GET_MODE_SIZE (mode);
5730 if (!init_code)
5732 init_code = build_int_cst (integer_type_node,
5733 IFN_GOACC_REDUCTION_INIT);
5734 fini_code = build_int_cst (integer_type_node,
5735 IFN_GOACC_REDUCTION_FINI);
5736 setup_code = build_int_cst (integer_type_node,
5737 IFN_GOACC_REDUCTION_SETUP);
5738 teardown_code = build_int_cst (integer_type_node,
5739 IFN_GOACC_REDUCTION_TEARDOWN);
5742 tree setup_call
5743 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5744 TREE_TYPE (var), 6, setup_code,
5745 unshare_expr (ref_to_res),
5746 incoming, level, op, off);
5747 tree init_call
5748 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5749 TREE_TYPE (var), 6, init_code,
5750 unshare_expr (ref_to_res),
5751 v1, level, op, off);
5752 tree fini_call
5753 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5754 TREE_TYPE (var), 6, fini_code,
5755 unshare_expr (ref_to_res),
5756 v2, level, op, off);
5757 tree teardown_call
5758 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5759 TREE_TYPE (var), 6, teardown_code,
5760 ref_to_res, v3, level, op, off);
5762 gimplify_assign (v1, setup_call, &before_fork);
5763 gimplify_assign (v2, init_call, &after_fork);
5764 gimplify_assign (v3, fini_call, &before_join);
5765 gimplify_assign (outgoing, teardown_call, &after_join);
5768 /* Now stitch things together. */
5769 gimple_seq_add_seq (fork_seq, before_fork);
5770 if (fork)
5771 gimple_seq_add_stmt (fork_seq, fork);
5772 gimple_seq_add_seq (fork_seq, after_fork);
5774 gimple_seq_add_seq (join_seq, before_join);
5775 if (join)
5776 gimple_seq_add_stmt (join_seq, join);
5777 gimple_seq_add_seq (join_seq, after_join);
5780 /* Generate code to implement the REDUCTION clauses. */
5782 static void
5783 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5785 gimple_seq sub_seq = NULL;
5786 gimple *stmt;
5787 tree x, c;
5788 int count = 0;
5790 /* OpenACC loop reductions are handled elsewhere. */
5791 if (is_gimple_omp_oacc (ctx->stmt))
5792 return;
5794 /* SIMD reductions are handled in lower_rec_input_clauses. */
5795 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5796 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5797 return;
5799 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5800 update in that case, otherwise use a lock. */
5801 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5802 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5803 && !OMP_CLAUSE_REDUCTION_TASK (c))
5805 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5806 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5808 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5809 count = -1;
5810 break;
5812 count++;
5815 if (count == 0)
5816 return;
5818 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5820 tree var, ref, new_var, orig_var;
5821 enum tree_code code;
5822 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5824 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5825 || OMP_CLAUSE_REDUCTION_TASK (c))
5826 continue;
5828 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
5829 orig_var = var = OMP_CLAUSE_DECL (c);
5830 if (TREE_CODE (var) == MEM_REF)
5832 var = TREE_OPERAND (var, 0);
5833 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5834 var = TREE_OPERAND (var, 0);
5835 if (TREE_CODE (var) == ADDR_EXPR)
5836 var = TREE_OPERAND (var, 0);
5837 else
5839 /* If this is a pointer or referenced based array
5840 section, the var could be private in the outer
5841 context e.g. on orphaned loop construct. Pretend this
5842 is private variable's outer reference. */
5843 ccode = OMP_CLAUSE_PRIVATE;
5844 if (TREE_CODE (var) == INDIRECT_REF)
5845 var = TREE_OPERAND (var, 0);
5847 orig_var = var;
5848 if (is_variable_sized (var))
5850 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5851 var = DECL_VALUE_EXPR (var);
5852 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5853 var = TREE_OPERAND (var, 0);
5854 gcc_assert (DECL_P (var));
5857 new_var = lookup_decl (var, ctx);
5858 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5859 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5860 ref = build_outer_var_ref (var, ctx, ccode);
5861 code = OMP_CLAUSE_REDUCTION_CODE (c);
5863 /* reduction(-:var) sums up the partial results, so it acts
5864 identically to reduction(+:var). */
5865 if (code == MINUS_EXPR)
5866 code = PLUS_EXPR;
5868 if (count == 1)
5870 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5872 addr = save_expr (addr);
5873 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5874 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5875 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5876 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
5877 gimplify_and_add (x, stmt_seqp);
5878 return;
5880 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5882 tree d = OMP_CLAUSE_DECL (c);
5883 tree type = TREE_TYPE (d);
5884 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5885 tree i = create_tmp_var (TREE_TYPE (v));
5886 tree ptype = build_pointer_type (TREE_TYPE (type));
5887 tree bias = TREE_OPERAND (d, 1);
5888 d = TREE_OPERAND (d, 0);
5889 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5891 tree b = TREE_OPERAND (d, 1);
5892 b = maybe_lookup_decl (b, ctx);
5893 if (b == NULL)
5895 b = TREE_OPERAND (d, 1);
5896 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5898 if (integer_zerop (bias))
5899 bias = b;
5900 else
5902 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5903 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5904 TREE_TYPE (b), b, bias);
5906 d = TREE_OPERAND (d, 0);
5908 /* For ref build_outer_var_ref already performs this, so
5909 only new_var needs a dereference. */
5910 if (TREE_CODE (d) == INDIRECT_REF)
5912 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5913 gcc_assert (omp_is_reference (var) && var == orig_var);
5915 else if (TREE_CODE (d) == ADDR_EXPR)
5917 if (orig_var == var)
5919 new_var = build_fold_addr_expr (new_var);
5920 ref = build_fold_addr_expr (ref);
5923 else
5925 gcc_assert (orig_var == var);
5926 if (omp_is_reference (var))
5927 ref = build_fold_addr_expr (ref);
5929 if (DECL_P (v))
5931 tree t = maybe_lookup_decl (v, ctx);
5932 if (t)
5933 v = t;
5934 else
5935 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5936 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5938 if (!integer_zerop (bias))
5940 bias = fold_convert_loc (clause_loc, sizetype, bias);
5941 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5942 TREE_TYPE (new_var), new_var,
5943 unshare_expr (bias));
5944 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5945 TREE_TYPE (ref), ref, bias);
5947 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5948 ref = fold_convert_loc (clause_loc, ptype, ref);
5949 tree m = create_tmp_var (ptype);
5950 gimplify_assign (m, new_var, stmt_seqp);
5951 new_var = m;
5952 m = create_tmp_var (ptype);
5953 gimplify_assign (m, ref, stmt_seqp);
5954 ref = m;
5955 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5956 tree body = create_artificial_label (UNKNOWN_LOCATION);
5957 tree end = create_artificial_label (UNKNOWN_LOCATION);
5958 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5959 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5960 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5961 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5963 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5964 tree decl_placeholder
5965 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5966 SET_DECL_VALUE_EXPR (placeholder, out);
5967 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5968 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5969 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5970 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5971 gimple_seq_add_seq (&sub_seq,
5972 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5973 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5974 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5975 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5977 else
5979 x = build2 (code, TREE_TYPE (out), out, priv);
5980 out = unshare_expr (out);
5981 gimplify_assign (out, x, &sub_seq);
5983 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5984 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5985 gimple_seq_add_stmt (&sub_seq, g);
5986 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5987 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5988 gimple_seq_add_stmt (&sub_seq, g);
5989 g = gimple_build_assign (i, PLUS_EXPR, i,
5990 build_int_cst (TREE_TYPE (i), 1));
5991 gimple_seq_add_stmt (&sub_seq, g);
5992 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5993 gimple_seq_add_stmt (&sub_seq, g);
5994 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5996 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5998 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6000 if (omp_is_reference (var)
6001 && !useless_type_conversion_p (TREE_TYPE (placeholder),
6002 TREE_TYPE (ref)))
6003 ref = build_fold_addr_expr_loc (clause_loc, ref);
6004 SET_DECL_VALUE_EXPR (placeholder, ref);
6005 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6006 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6007 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6008 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6009 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6011 else
6013 x = build2 (code, TREE_TYPE (ref), ref, new_var);
6014 ref = build_outer_var_ref (var, ctx);
6015 gimplify_assign (ref, x, &sub_seq);
6019 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
6021 gimple_seq_add_stmt (stmt_seqp, stmt);
6023 gimple_seq_add_seq (stmt_seqp, sub_seq);
6025 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
6027 gimple_seq_add_stmt (stmt_seqp, stmt);
6031 /* Generate code to implement the COPYPRIVATE clauses. */
6033 static void
6034 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
6035 omp_context *ctx)
6037 tree c;
6039 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6041 tree var, new_var, ref, x;
6042 bool by_ref;
6043 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6045 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
6046 continue;
6048 var = OMP_CLAUSE_DECL (c);
6049 by_ref = use_pointer_for_field (var, NULL);
6051 ref = build_sender_ref (var, ctx);
6052 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
6053 if (by_ref)
6055 x = build_fold_addr_expr_loc (clause_loc, new_var);
6056 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
6058 gimplify_assign (ref, x, slist);
6060 ref = build_receiver_ref (var, false, ctx);
6061 if (by_ref)
6063 ref = fold_convert_loc (clause_loc,
6064 build_pointer_type (TREE_TYPE (new_var)),
6065 ref);
6066 ref = build_fold_indirect_ref_loc (clause_loc, ref);
6068 if (omp_is_reference (var))
6070 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
6071 ref = build_simple_mem_ref_loc (clause_loc, ref);
6072 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6074 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
6075 gimplify_and_add (x, rlist);
6080 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
6081 and REDUCTION from the sender (aka parent) side. */
6083 static void
6084 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
6085 omp_context *ctx)
6087 tree c, t;
6088 int ignored_looptemp = 0;
6089 bool is_taskloop = false;
6091 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
6092 by GOMP_taskloop. */
6093 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
6095 ignored_looptemp = 2;
6096 is_taskloop = true;
6099 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6101 tree val, ref, x, var;
6102 bool by_ref, do_in = false, do_out = false;
6103 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6105 switch (OMP_CLAUSE_CODE (c))
6107 case OMP_CLAUSE_PRIVATE:
6108 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
6109 break;
6110 continue;
6111 case OMP_CLAUSE_FIRSTPRIVATE:
6112 case OMP_CLAUSE_COPYIN:
6113 case OMP_CLAUSE_LASTPRIVATE:
6114 case OMP_CLAUSE_IN_REDUCTION:
6115 case OMP_CLAUSE__REDUCTEMP_:
6116 break;
6117 case OMP_CLAUSE_REDUCTION:
6118 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
6119 continue;
6120 break;
6121 case OMP_CLAUSE_SHARED:
6122 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
6123 break;
6124 continue;
6125 case OMP_CLAUSE__LOOPTEMP_:
6126 if (ignored_looptemp)
6128 ignored_looptemp--;
6129 continue;
6131 break;
6132 default:
6133 continue;
6136 val = OMP_CLAUSE_DECL (c);
6137 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6138 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
6139 && TREE_CODE (val) == MEM_REF)
6141 val = TREE_OPERAND (val, 0);
6142 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
6143 val = TREE_OPERAND (val, 0);
6144 if (TREE_CODE (val) == INDIRECT_REF
6145 || TREE_CODE (val) == ADDR_EXPR)
6146 val = TREE_OPERAND (val, 0);
6147 if (is_variable_sized (val))
6148 continue;
6151 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
6152 outer taskloop region. */
6153 omp_context *ctx_for_o = ctx;
6154 if (is_taskloop
6155 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
6156 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
6157 ctx_for_o = ctx->outer;
6159 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
6161 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
6162 && is_global_var (var)
6163 && (val == OMP_CLAUSE_DECL (c)
6164 || !is_task_ctx (ctx)
6165 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
6166 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
6167 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
6168 != POINTER_TYPE)))))
6169 continue;
6171 t = omp_member_access_dummy_var (var);
6172 if (t)
6174 var = DECL_VALUE_EXPR (var);
6175 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
6176 if (o != t)
6177 var = unshare_and_remap (var, t, o);
6178 else
6179 var = unshare_expr (var);
6182 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
6184 /* Handle taskloop firstprivate/lastprivate, where the
6185 lastprivate on GIMPLE_OMP_TASK is represented as
6186 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
6187 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
6188 x = omp_build_component_ref (ctx->sender_decl, f);
6189 if (use_pointer_for_field (val, ctx))
6190 var = build_fold_addr_expr (var);
6191 gimplify_assign (x, var, ilist);
6192 DECL_ABSTRACT_ORIGIN (f) = NULL;
6193 continue;
6196 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6197 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
6198 || val == OMP_CLAUSE_DECL (c))
6199 && is_variable_sized (val))
6200 continue;
6201 by_ref = use_pointer_for_field (val, NULL);
6203 switch (OMP_CLAUSE_CODE (c))
6205 case OMP_CLAUSE_FIRSTPRIVATE:
6206 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
6207 && !by_ref
6208 && is_task_ctx (ctx))
6209 TREE_NO_WARNING (var) = 1;
6210 do_in = true;
6211 break;
6213 case OMP_CLAUSE_PRIVATE:
6214 case OMP_CLAUSE_COPYIN:
6215 case OMP_CLAUSE__LOOPTEMP_:
6216 case OMP_CLAUSE__REDUCTEMP_:
6217 do_in = true;
6218 break;
6220 case OMP_CLAUSE_LASTPRIVATE:
6221 if (by_ref || omp_is_reference (val))
6223 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
6224 continue;
6225 do_in = true;
6227 else
6229 do_out = true;
6230 if (lang_hooks.decls.omp_private_outer_ref (val))
6231 do_in = true;
6233 break;
6235 case OMP_CLAUSE_REDUCTION:
6236 case OMP_CLAUSE_IN_REDUCTION:
6237 do_in = true;
6238 if (val == OMP_CLAUSE_DECL (c))
6240 if (is_task_ctx (ctx))
6241 by_ref = use_pointer_for_field (val, ctx);
6242 else
6243 do_out = !(by_ref || omp_is_reference (val));
6245 else
6246 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
6247 break;
6249 default:
6250 gcc_unreachable ();
6253 if (do_in)
6255 ref = build_sender_ref (val, ctx);
6256 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
6257 gimplify_assign (ref, x, ilist);
6258 if (is_task_ctx (ctx))
6259 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
6262 if (do_out)
6264 ref = build_sender_ref (val, ctx);
6265 gimplify_assign (var, ref, olist);
6270 /* Generate code to implement SHARED from the sender (aka parent)
6271 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
6272 list things that got automatically shared. */
6274 static void
6275 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
6277 tree var, ovar, nvar, t, f, x, record_type;
6279 if (ctx->record_type == NULL)
6280 return;
6282 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
6283 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
6285 ovar = DECL_ABSTRACT_ORIGIN (f);
6286 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
6287 continue;
6289 nvar = maybe_lookup_decl (ovar, ctx);
6290 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
6291 continue;
6293 /* If CTX is a nested parallel directive. Find the immediately
6294 enclosing parallel or workshare construct that contains a
6295 mapping for OVAR. */
6296 var = lookup_decl_in_outer_ctx (ovar, ctx);
6298 t = omp_member_access_dummy_var (var);
6299 if (t)
6301 var = DECL_VALUE_EXPR (var);
6302 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
6303 if (o != t)
6304 var = unshare_and_remap (var, t, o);
6305 else
6306 var = unshare_expr (var);
6309 if (use_pointer_for_field (ovar, ctx))
6311 x = build_sender_ref (ovar, ctx);
6312 var = build_fold_addr_expr (var);
6313 gimplify_assign (x, var, ilist);
6315 else
6317 x = build_sender_ref (ovar, ctx);
6318 gimplify_assign (x, var, ilist);
6320 if (!TREE_READONLY (var)
6321 /* We don't need to receive a new reference to a result
6322 or parm decl. In fact we may not store to it as we will
6323 invalidate any pending RSO and generate wrong gimple
6324 during inlining. */
6325 && !((TREE_CODE (var) == RESULT_DECL
6326 || TREE_CODE (var) == PARM_DECL)
6327 && DECL_BY_REFERENCE (var)))
6329 x = build_sender_ref (ovar, ctx);
6330 gimplify_assign (var, x, olist);
6336 /* Emit an OpenACC head marker call, encapulating the partitioning and
6337 other information that must be processed by the target compiler.
6338 Return the maximum number of dimensions the associated loop might
6339 be partitioned over. */
6341 static unsigned
6342 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
6343 gimple_seq *seq, omp_context *ctx)
6345 unsigned levels = 0;
6346 unsigned tag = 0;
6347 tree gang_static = NULL_TREE;
6348 auto_vec<tree, 5> args;
6350 args.quick_push (build_int_cst
6351 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
6352 args.quick_push (ddvar);
6353 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6355 switch (OMP_CLAUSE_CODE (c))
6357 case OMP_CLAUSE_GANG:
6358 tag |= OLF_DIM_GANG;
6359 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
6360 /* static:* is represented by -1, and we can ignore it, as
6361 scheduling is always static. */
6362 if (gang_static && integer_minus_onep (gang_static))
6363 gang_static = NULL_TREE;
6364 levels++;
6365 break;
6367 case OMP_CLAUSE_WORKER:
6368 tag |= OLF_DIM_WORKER;
6369 levels++;
6370 break;
6372 case OMP_CLAUSE_VECTOR:
6373 tag |= OLF_DIM_VECTOR;
6374 levels++;
6375 break;
6377 case OMP_CLAUSE_SEQ:
6378 tag |= OLF_SEQ;
6379 break;
6381 case OMP_CLAUSE_AUTO:
6382 tag |= OLF_AUTO;
6383 break;
6385 case OMP_CLAUSE_INDEPENDENT:
6386 tag |= OLF_INDEPENDENT;
6387 break;
6389 case OMP_CLAUSE_TILE:
6390 tag |= OLF_TILE;
6391 break;
6393 default:
6394 continue;
6398 if (gang_static)
6400 if (DECL_P (gang_static))
6401 gang_static = build_outer_var_ref (gang_static, ctx);
6402 tag |= OLF_GANG_STATIC;
6405 /* In a parallel region, loops are implicitly INDEPENDENT. */
6406 omp_context *tgt = enclosing_target_ctx (ctx);
6407 if (!tgt || is_oacc_parallel (tgt))
6408 tag |= OLF_INDEPENDENT;
6410 if (tag & OLF_TILE)
6411 /* Tiling could use all 3 levels. */
6412 levels = 3;
6413 else
6415 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
6416 Ensure at least one level, or 2 for possible auto
6417 partitioning */
6418 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
6419 << OLF_DIM_BASE) | OLF_SEQ));
6421 if (levels < 1u + maybe_auto)
6422 levels = 1u + maybe_auto;
6425 args.quick_push (build_int_cst (integer_type_node, levels));
6426 args.quick_push (build_int_cst (integer_type_node, tag));
6427 if (gang_static)
6428 args.quick_push (gang_static);
6430 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
6431 gimple_set_location (call, loc);
6432 gimple_set_lhs (call, ddvar);
6433 gimple_seq_add_stmt (seq, call);
6435 return levels;
6438 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
6439 partitioning level of the enclosed region. */
6441 static void
6442 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
6443 tree tofollow, gimple_seq *seq)
6445 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
6446 : IFN_UNIQUE_OACC_TAIL_MARK);
6447 tree marker = build_int_cst (integer_type_node, marker_kind);
6448 int nargs = 2 + (tofollow != NULL_TREE);
6449 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
6450 marker, ddvar, tofollow);
6451 gimple_set_location (call, loc);
6452 gimple_set_lhs (call, ddvar);
6453 gimple_seq_add_stmt (seq, call);
6456 /* Generate the before and after OpenACC loop sequences. CLAUSES are
6457 the loop clauses, from which we extract reductions. Initialize
6458 HEAD and TAIL. */
6460 static void
6461 lower_oacc_head_tail (location_t loc, tree clauses,
6462 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
6464 bool inner = false;
6465 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
6466 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
6468 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
6469 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
6470 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
6472 gcc_assert (count);
6473 for (unsigned done = 1; count; count--, done++)
6475 gimple_seq fork_seq = NULL;
6476 gimple_seq join_seq = NULL;
6478 tree place = build_int_cst (integer_type_node, -1);
6479 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
6480 fork_kind, ddvar, place);
6481 gimple_set_location (fork, loc);
6482 gimple_set_lhs (fork, ddvar);
6484 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
6485 join_kind, ddvar, place);
6486 gimple_set_location (join, loc);
6487 gimple_set_lhs (join, ddvar);
6489 /* Mark the beginning of this level sequence. */
6490 if (inner)
6491 lower_oacc_loop_marker (loc, ddvar, true,
6492 build_int_cst (integer_type_node, count),
6493 &fork_seq);
6494 lower_oacc_loop_marker (loc, ddvar, false,
6495 build_int_cst (integer_type_node, done),
6496 &join_seq);
6498 lower_oacc_reductions (loc, clauses, place, inner,
6499 fork, join, &fork_seq, &join_seq, ctx);
6501 /* Append this level to head. */
6502 gimple_seq_add_seq (head, fork_seq);
6503 /* Prepend it to tail. */
6504 gimple_seq_add_seq (&join_seq, *tail);
6505 *tail = join_seq;
6507 inner = true;
6510 /* Mark the end of the sequence. */
6511 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
6512 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
6515 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
6516 catch handler and return it. This prevents programs from violating the
6517 structured block semantics with throws. */
6519 static gimple_seq
6520 maybe_catch_exception (gimple_seq body)
6522 gimple *g;
6523 tree decl;
6525 if (!flag_exceptions)
6526 return body;
6528 if (lang_hooks.eh_protect_cleanup_actions != NULL)
6529 decl = lang_hooks.eh_protect_cleanup_actions ();
6530 else
6531 decl = builtin_decl_explicit (BUILT_IN_TRAP);
6533 g = gimple_build_eh_must_not_throw (decl);
6534 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
6535 GIMPLE_TRY_CATCH);
6537 return gimple_seq_alloc_with_stmt (g);
6541 /* Routines to lower OMP directives into OMP-GIMPLE. */
6543 /* If ctx is a worksharing context inside of a cancellable parallel
6544 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
6545 and conditional branch to parallel's cancel_label to handle
6546 cancellation in the implicit barrier. */
6548 static void
6549 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
6550 gimple_seq *body)
6552 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
6553 if (gimple_omp_return_nowait_p (omp_return))
6554 return;
6555 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
6556 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
6557 && outer->cancellable)
6559 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
6560 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
6561 tree lhs = create_tmp_var (c_bool_type);
6562 gimple_omp_return_set_lhs (omp_return, lhs);
6563 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
6564 gimple *g = gimple_build_cond (NE_EXPR, lhs,
6565 fold_convert (c_bool_type,
6566 boolean_false_node),
6567 outer->cancel_label, fallthru_label);
6568 gimple_seq_add_stmt (body, g);
6569 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
6571 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
6572 return;
6575 /* Find the first task_reduction or reduction clause or return NULL
6576 if there are none. */
6578 static inline tree
6579 omp_task_reductions_find_first (tree clauses, enum tree_code code,
6580 enum omp_clause_code ccode)
6582 while (1)
6584 clauses = omp_find_clause (clauses, ccode);
6585 if (clauses == NULL_TREE)
6586 return NULL_TREE;
6587 if (ccode != OMP_CLAUSE_REDUCTION
6588 || code == OMP_TASKLOOP
6589 || OMP_CLAUSE_REDUCTION_TASK (clauses))
6590 return clauses;
6591 clauses = OMP_CLAUSE_CHAIN (clauses);
6595 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
6596 gimple_seq *, gimple_seq *);
6598 /* Lower the OpenMP sections directive in the current statement in GSI_P.
6599 CTX is the enclosing OMP context for the current statement. */
6601 static void
6602 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6604 tree block, control;
6605 gimple_stmt_iterator tgsi;
6606 gomp_sections *stmt;
6607 gimple *t;
6608 gbind *new_stmt, *bind;
6609 gimple_seq ilist, dlist, olist, tred_dlist = NULL, new_body;
6611 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
6613 push_gimplify_context ();
6615 dlist = NULL;
6616 ilist = NULL;
6618 tree rclauses
6619 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
6620 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
6621 tree rtmp = NULL_TREE;
6622 if (rclauses)
6624 tree type = build_pointer_type (pointer_sized_int_node);
6625 tree temp = create_tmp_var (type);
6626 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
6627 OMP_CLAUSE_DECL (c) = temp;
6628 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
6629 gimple_omp_sections_set_clauses (stmt, c);
6630 lower_omp_task_reductions (ctx, OMP_SECTIONS,
6631 gimple_omp_sections_clauses (stmt),
6632 &ilist, &tred_dlist);
6633 rclauses = c;
6634 rtmp = make_ssa_name (type);
6635 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
6638 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
6639 &ilist, &dlist, ctx, NULL);
6641 new_body = gimple_omp_body (stmt);
6642 gimple_omp_set_body (stmt, NULL);
6643 tgsi = gsi_start (new_body);
6644 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
6646 omp_context *sctx;
6647 gimple *sec_start;
6649 sec_start = gsi_stmt (tgsi);
6650 sctx = maybe_lookup_ctx (sec_start);
6651 gcc_assert (sctx);
6653 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
6654 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
6655 GSI_CONTINUE_LINKING);
6656 gimple_omp_set_body (sec_start, NULL);
6658 if (gsi_one_before_end_p (tgsi))
6660 gimple_seq l = NULL;
6661 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
6662 &l, ctx);
6663 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
6664 gimple_omp_section_set_last (sec_start);
6667 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
6668 GSI_CONTINUE_LINKING);
6671 block = make_node (BLOCK);
6672 bind = gimple_build_bind (NULL, new_body, block);
6674 olist = NULL;
6675 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
6677 block = make_node (BLOCK);
6678 new_stmt = gimple_build_bind (NULL, NULL, block);
6679 gsi_replace (gsi_p, new_stmt, true);
6681 pop_gimplify_context (new_stmt);
6682 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6683 BLOCK_VARS (block) = gimple_bind_vars (bind);
6684 if (BLOCK_VARS (block))
6685 TREE_USED (block) = 1;
6687 new_body = NULL;
6688 gimple_seq_add_seq (&new_body, ilist);
6689 gimple_seq_add_stmt (&new_body, stmt);
6690 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
6691 gimple_seq_add_stmt (&new_body, bind);
6693 control = create_tmp_var (unsigned_type_node, ".section");
6694 t = gimple_build_omp_continue (control, control);
6695 gimple_omp_sections_set_control (stmt, control);
6696 gimple_seq_add_stmt (&new_body, t);
6698 gimple_seq_add_seq (&new_body, olist);
6699 if (ctx->cancellable)
6700 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
6701 gimple_seq_add_seq (&new_body, dlist);
6703 new_body = maybe_catch_exception (new_body);
6705 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
6706 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6707 t = gimple_build_omp_return (nowait);
6708 gimple_seq_add_stmt (&new_body, t);
6709 gimple_seq_add_seq (&new_body, tred_dlist);
6710 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
6712 if (rclauses)
6713 OMP_CLAUSE_DECL (rclauses) = rtmp;
6715 gimple_bind_set_body (new_stmt, new_body);
6719 /* A subroutine of lower_omp_single. Expand the simple form of
6720 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
6722 if (GOMP_single_start ())
6723 BODY;
6724 [ GOMP_barrier (); ] -> unless 'nowait' is present.
6726 FIXME. It may be better to delay expanding the logic of this until
6727 pass_expand_omp. The expanded logic may make the job more difficult
6728 to a synchronization analysis pass. */
6730 static void
6731 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
6733 location_t loc = gimple_location (single_stmt);
6734 tree tlabel = create_artificial_label (loc);
6735 tree flabel = create_artificial_label (loc);
6736 gimple *call, *cond;
6737 tree lhs, decl;
6739 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
6740 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
6741 call = gimple_build_call (decl, 0);
6742 gimple_call_set_lhs (call, lhs);
6743 gimple_seq_add_stmt (pre_p, call);
6745 cond = gimple_build_cond (EQ_EXPR, lhs,
6746 fold_convert_loc (loc, TREE_TYPE (lhs),
6747 boolean_true_node),
6748 tlabel, flabel);
6749 gimple_seq_add_stmt (pre_p, cond);
6750 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
6751 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6752 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
6756 /* A subroutine of lower_omp_single. Expand the simple form of
6757 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
6759 #pragma omp single copyprivate (a, b, c)
6761 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
6764 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6766 BODY;
6767 copyout.a = a;
6768 copyout.b = b;
6769 copyout.c = c;
6770 GOMP_single_copy_end (&copyout);
6772 else
6774 a = copyout_p->a;
6775 b = copyout_p->b;
6776 c = copyout_p->c;
6778 GOMP_barrier ();
6781 FIXME. It may be better to delay expanding the logic of this until
6782 pass_expand_omp. The expanded logic may make the job more difficult
6783 to a synchronization analysis pass. */
6785 static void
6786 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
6787 omp_context *ctx)
6789 tree ptr_type, t, l0, l1, l2, bfn_decl;
6790 gimple_seq copyin_seq;
6791 location_t loc = gimple_location (single_stmt);
6793 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
6795 ptr_type = build_pointer_type (ctx->record_type);
6796 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
6798 l0 = create_artificial_label (loc);
6799 l1 = create_artificial_label (loc);
6800 l2 = create_artificial_label (loc);
6802 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6803 t = build_call_expr_loc (loc, bfn_decl, 0);
6804 t = fold_convert_loc (loc, ptr_type, t);
6805 gimplify_assign (ctx->receiver_decl, t, pre_p);
6807 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6808 build_int_cst (ptr_type, 0));
6809 t = build3 (COND_EXPR, void_type_node, t,
6810 build_and_jump (&l0), build_and_jump (&l1));
6811 gimplify_and_add (t, pre_p);
6813 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6815 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6817 copyin_seq = NULL;
6818 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6819 &copyin_seq, ctx);
6821 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6822 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6823 t = build_call_expr_loc (loc, bfn_decl, 1, t);
6824 gimplify_and_add (t, pre_p);
6826 t = build_and_jump (&l2);
6827 gimplify_and_add (t, pre_p);
6829 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6831 gimple_seq_add_seq (pre_p, copyin_seq);
6833 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6837 /* Expand code for an OpenMP single directive. */
6839 static void
6840 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6842 tree block;
6843 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6844 gbind *bind;
6845 gimple_seq bind_body, bind_body_tail = NULL, dlist;
6847 push_gimplify_context ();
6849 block = make_node (BLOCK);
6850 bind = gimple_build_bind (NULL, NULL, block);
6851 gsi_replace (gsi_p, bind, true);
6852 bind_body = NULL;
6853 dlist = NULL;
6854 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6855 &bind_body, &dlist, ctx, NULL);
6856 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6858 gimple_seq_add_stmt (&bind_body, single_stmt);
6860 if (ctx->record_type)
6861 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6862 else
6863 lower_omp_single_simple (single_stmt, &bind_body);
6865 gimple_omp_set_body (single_stmt, NULL);
6867 gimple_seq_add_seq (&bind_body, dlist);
6869 bind_body = maybe_catch_exception (bind_body);
6871 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6872 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6873 gimple *g = gimple_build_omp_return (nowait);
6874 gimple_seq_add_stmt (&bind_body_tail, g);
6875 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
6876 if (ctx->record_type)
6878 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6879 tree clobber = build_constructor (ctx->record_type, NULL);
6880 TREE_THIS_VOLATILE (clobber) = 1;
6881 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6882 clobber), GSI_SAME_STMT);
6884 gimple_seq_add_seq (&bind_body, bind_body_tail);
6885 gimple_bind_set_body (bind, bind_body);
6887 pop_gimplify_context (bind);
6889 gimple_bind_append_vars (bind, ctx->block_vars);
6890 BLOCK_VARS (block) = ctx->block_vars;
6891 if (BLOCK_VARS (block))
6892 TREE_USED (block) = 1;
6896 /* Expand code for an OpenMP master directive. */
6898 static void
6899 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6901 tree block, lab = NULL, x, bfn_decl;
6902 gimple *stmt = gsi_stmt (*gsi_p);
6903 gbind *bind;
6904 location_t loc = gimple_location (stmt);
6905 gimple_seq tseq;
6907 push_gimplify_context ();
6909 block = make_node (BLOCK);
6910 bind = gimple_build_bind (NULL, NULL, block);
6911 gsi_replace (gsi_p, bind, true);
6912 gimple_bind_add_stmt (bind, stmt);
6914 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6915 x = build_call_expr_loc (loc, bfn_decl, 0);
6916 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6917 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6918 tseq = NULL;
6919 gimplify_and_add (x, &tseq);
6920 gimple_bind_add_seq (bind, tseq);
6922 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6923 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6924 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6925 gimple_omp_set_body (stmt, NULL);
6927 gimple_bind_add_stmt (bind, gimple_build_label (lab));
6929 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6931 pop_gimplify_context (bind);
6933 gimple_bind_append_vars (bind, ctx->block_vars);
6934 BLOCK_VARS (block) = ctx->block_vars;
6937 /* Helper function for lower_omp_task_reductions. For a specific PASS
6938 find out the current clause it should be processed, or return false
6939 if all have been processed already. */
6941 static inline bool
6942 omp_task_reduction_iterate (int pass, enum tree_code code,
6943 enum omp_clause_code ccode, tree *c, tree *decl,
6944 tree *type, tree *next)
6946 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
6948 if (ccode == OMP_CLAUSE_REDUCTION
6949 && code != OMP_TASKLOOP
6950 && !OMP_CLAUSE_REDUCTION_TASK (*c))
6951 continue;
6952 *decl = OMP_CLAUSE_DECL (*c);
6953 *type = TREE_TYPE (*decl);
6954 if (TREE_CODE (*decl) == MEM_REF)
6956 if (pass != 1)
6957 continue;
6959 else
6961 if (omp_is_reference (*decl))
6962 *type = TREE_TYPE (*type);
6963 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
6964 continue;
6966 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
6967 return true;
6969 *decl = NULL_TREE;
6970 *type = NULL_TREE;
6971 *next = NULL_TREE;
6972 return false;
6975 /* Lower task_reduction and reduction clauses (the latter unless CODE is
6976 OMP_TASKGROUP only with task modifier). Register mapping of those in
6977 START sequence and reducing them and unregister them in the END sequence. */
6979 static void
6980 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
6981 gimple_seq *start, gimple_seq *end)
6983 enum omp_clause_code ccode
6984 = (code == OMP_TASKGROUP
6985 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
6986 tree cancellable = NULL_TREE;
6987 clauses = omp_task_reductions_find_first (clauses, code, ccode);
6988 if (clauses == NULL_TREE)
6989 return;
6990 if (code == OMP_FOR || code == OMP_SECTIONS)
6992 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
6993 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
6994 && outer->cancellable)
6996 cancellable = error_mark_node;
6997 break;
6999 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7000 break;
7002 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
7003 tree *last = &TYPE_FIELDS (record_type);
7004 unsigned cnt = 0;
7005 if (cancellable)
7007 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
7008 ptr_type_node);
7009 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
7010 integer_type_node);
7011 *last = field;
7012 DECL_CHAIN (field) = ifield;
7013 last = &DECL_CHAIN (ifield);
7014 DECL_CONTEXT (field) = record_type;
7015 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
7016 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
7017 DECL_CONTEXT (ifield) = record_type;
7018 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
7019 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
7021 for (int pass = 0; pass < 2; pass++)
7023 tree decl, type, next;
7024 for (tree c = clauses;
7025 omp_task_reduction_iterate (pass, code, ccode,
7026 &c, &decl, &type, &next); c = next)
7028 ++cnt;
7029 tree new_type = type;
7030 if (ctx->outer)
7031 new_type = remap_type (type, &ctx->outer->cb);
7032 tree field
7033 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
7034 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
7035 new_type);
7036 if (DECL_P (decl) && type == TREE_TYPE (decl))
7038 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
7039 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
7040 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
7042 else
7043 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
7044 DECL_CONTEXT (field) = record_type;
7045 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
7046 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
7047 *last = field;
7048 last = &DECL_CHAIN (field);
7049 tree bfield
7050 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
7051 boolean_type_node);
7052 DECL_CONTEXT (bfield) = record_type;
7053 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
7054 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
7055 *last = bfield;
7056 last = &DECL_CHAIN (bfield);
7059 *last = NULL_TREE;
7060 layout_type (record_type);
7062 /* Build up an array which registers with the runtime all the reductions
7063 and deregisters them at the end. Format documented in libgomp/task.c. */
7064 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
7065 tree avar = create_tmp_var_raw (atype);
7066 gimple_add_tmp_var (avar);
7067 TREE_ADDRESSABLE (avar) = 1;
7068 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
7069 NULL_TREE, NULL_TREE);
7070 tree t = build_int_cst (pointer_sized_int_node, cnt);
7071 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7072 gimple_seq seq = NULL;
7073 tree sz = fold_convert (pointer_sized_int_node,
7074 TYPE_SIZE_UNIT (record_type));
7075 int cachesz = 64;
7076 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
7077 build_int_cst (pointer_sized_int_node, cachesz - 1));
7078 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
7079 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
7080 ctx->task_reductions.create (1 + cnt);
7081 ctx->task_reduction_map = new hash_map<tree, unsigned>;
7082 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
7083 ? sz : NULL_TREE);
7084 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
7085 gimple_seq_add_seq (start, seq);
7086 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
7087 NULL_TREE, NULL_TREE);
7088 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
7089 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
7090 NULL_TREE, NULL_TREE);
7091 t = build_int_cst (pointer_sized_int_node,
7092 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
7093 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7094 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
7095 NULL_TREE, NULL_TREE);
7096 t = build_int_cst (pointer_sized_int_node, -1);
7097 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7098 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
7099 NULL_TREE, NULL_TREE);
7100 t = build_int_cst (pointer_sized_int_node, 0);
7101 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7103 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
7104 and for each task reduction checks a bool right after the private variable
7105 within that thread's chunk; if the bool is clear, it hasn't been
7106 initialized and thus isn't going to be reduced nor destructed, otherwise
7107 reduce and destruct it. */
7108 tree idx = create_tmp_var (size_type_node);
7109 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
7110 tree num_thr_sz = create_tmp_var (size_type_node);
7111 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7112 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
7113 tree lab3 = NULL_TREE;
7114 gimple *g;
7115 if (code == OMP_FOR || code == OMP_SECTIONS)
7117 /* For worksharing constructs, only perform it in the master thread,
7118 with the exception of cancelled implicit barriers - then only handle
7119 the current thread. */
7120 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
7121 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
7122 tree thr_num = create_tmp_var (integer_type_node);
7123 g = gimple_build_call (t, 0);
7124 gimple_call_set_lhs (g, thr_num);
7125 gimple_seq_add_stmt (end, g);
7126 if (cancellable)
7128 tree c;
7129 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7130 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
7131 lab3 = create_artificial_label (UNKNOWN_LOCATION);
7132 if (code == OMP_FOR)
7133 c = gimple_omp_for_clauses (ctx->stmt);
7134 else if (code == OMP_SECTIONS)
7135 c = gimple_omp_sections_clauses (ctx->stmt);
7136 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
7137 cancellable = c;
7138 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
7139 lab5, lab6);
7140 gimple_seq_add_stmt (end, g);
7141 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7142 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
7143 gimple_seq_add_stmt (end, g);
7144 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
7145 build_one_cst (TREE_TYPE (idx)));
7146 gimple_seq_add_stmt (end, g);
7147 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
7148 gimple_seq_add_stmt (end, gimple_build_label (lab6));
7150 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
7151 gimple_seq_add_stmt (end, g);
7152 gimple_seq_add_stmt (end, gimple_build_label (lab4));
7154 if (code != OMP_PARALLEL)
7156 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
7157 tree num_thr = create_tmp_var (integer_type_node);
7158 g = gimple_build_call (t, 0);
7159 gimple_call_set_lhs (g, num_thr);
7160 gimple_seq_add_stmt (end, g);
7161 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
7162 gimple_seq_add_stmt (end, g);
7163 if (cancellable)
7164 gimple_seq_add_stmt (end, gimple_build_label (lab3));
7166 else
7168 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7169 OMP_CLAUSE__REDUCTEMP_);
7170 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
7171 t = fold_convert (size_type_node, t);
7172 gimplify_assign (num_thr_sz, t, end);
7174 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
7175 NULL_TREE, NULL_TREE);
7176 tree data = create_tmp_var (pointer_sized_int_node);
7177 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
7178 gimple_seq_add_stmt (end, gimple_build_label (lab1));
7179 tree ptr;
7180 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
7181 ptr = create_tmp_var (build_pointer_type (record_type));
7182 else
7183 ptr = create_tmp_var (ptr_type_node);
7184 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
7186 tree field = TYPE_FIELDS (record_type);
7187 cnt = 0;
7188 if (cancellable)
7189 field = DECL_CHAIN (DECL_CHAIN (field));
7190 for (int pass = 0; pass < 2; pass++)
7192 tree decl, type, next;
7193 for (tree c = clauses;
7194 omp_task_reduction_iterate (pass, code, ccode,
7195 &c, &decl, &type, &next); c = next)
7197 tree var = decl, ref;
7198 if (TREE_CODE (decl) == MEM_REF)
7200 var = TREE_OPERAND (var, 0);
7201 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7202 var = TREE_OPERAND (var, 0);
7203 tree v = var;
7204 if (TREE_CODE (var) == ADDR_EXPR)
7205 var = TREE_OPERAND (var, 0);
7206 else if (TREE_CODE (var) == INDIRECT_REF)
7207 var = TREE_OPERAND (var, 0);
7208 tree orig_var = var;
7209 if (is_variable_sized (var))
7211 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7212 var = DECL_VALUE_EXPR (var);
7213 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7214 var = TREE_OPERAND (var, 0);
7215 gcc_assert (DECL_P (var));
7217 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
7218 if (orig_var != var)
7219 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
7220 else if (TREE_CODE (v) == ADDR_EXPR)
7221 t = build_fold_addr_expr (t);
7222 else if (TREE_CODE (v) == INDIRECT_REF)
7223 t = build_fold_indirect_ref (t);
7224 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
7226 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
7227 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7228 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
7230 if (!integer_zerop (TREE_OPERAND (decl, 1)))
7231 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
7232 fold_convert (size_type_node,
7233 TREE_OPERAND (decl, 1)));
7235 else
7237 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
7238 if (!omp_is_reference (decl))
7239 t = build_fold_addr_expr (t);
7241 t = fold_convert (pointer_sized_int_node, t);
7242 seq = NULL;
7243 t = force_gimple_operand (t, &seq, true, NULL_TREE);
7244 gimple_seq_add_seq (start, seq);
7245 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7246 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
7247 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7248 t = unshare_expr (byte_position (field));
7249 t = fold_convert (pointer_sized_int_node, t);
7250 ctx->task_reduction_map->put (c, cnt);
7251 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
7252 ? t : NULL_TREE);
7253 seq = NULL;
7254 t = force_gimple_operand (t, &seq, true, NULL_TREE);
7255 gimple_seq_add_seq (start, seq);
7256 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7257 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
7258 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7260 tree bfield = DECL_CHAIN (field);
7261 tree cond;
7262 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
7263 /* In parallel or worksharing all threads unconditionally
7264 initialize all their task reduction private variables. */
7265 cond = boolean_true_node;
7266 else if (TREE_TYPE (ptr) == ptr_type_node)
7268 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
7269 unshare_expr (byte_position (bfield)));
7270 seq = NULL;
7271 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
7272 gimple_seq_add_seq (end, seq);
7273 tree pbool = build_pointer_type (TREE_TYPE (bfield));
7274 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
7275 build_int_cst (pbool, 0));
7277 else
7278 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
7279 build_simple_mem_ref (ptr), bfield, NULL_TREE);
7280 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
7281 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
7282 tree condv = create_tmp_var (boolean_type_node);
7283 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
7284 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
7285 lab3, lab4);
7286 gimple_seq_add_stmt (end, g);
7287 gimple_seq_add_stmt (end, gimple_build_label (lab3));
7288 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
7290 /* If this reduction doesn't need destruction and parallel
7291 has been cancelled, there is nothing to do for this
7292 reduction, so jump around the merge operation. */
7293 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7294 g = gimple_build_cond (NE_EXPR, cancellable,
7295 build_zero_cst (TREE_TYPE (cancellable)),
7296 lab4, lab5);
7297 gimple_seq_add_stmt (end, g);
7298 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7301 tree new_var;
7302 if (TREE_TYPE (ptr) == ptr_type_node)
7304 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
7305 unshare_expr (byte_position (field)));
7306 seq = NULL;
7307 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
7308 gimple_seq_add_seq (end, seq);
7309 tree pbool = build_pointer_type (TREE_TYPE (field));
7310 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
7311 build_int_cst (pbool, 0));
7313 else
7314 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
7315 build_simple_mem_ref (ptr), field, NULL_TREE);
7317 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7318 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
7319 ref = build_simple_mem_ref (ref);
7320 /* reduction(-:var) sums up the partial results, so it acts
7321 identically to reduction(+:var). */
7322 if (rcode == MINUS_EXPR)
7323 rcode = PLUS_EXPR;
7324 if (TREE_CODE (decl) == MEM_REF)
7326 tree type = TREE_TYPE (new_var);
7327 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7328 tree i = create_tmp_var (TREE_TYPE (v));
7329 tree ptype = build_pointer_type (TREE_TYPE (type));
7330 if (DECL_P (v))
7332 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7333 tree vv = create_tmp_var (TREE_TYPE (v));
7334 gimplify_assign (vv, v, start);
7335 v = vv;
7337 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7338 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
7339 new_var = build_fold_addr_expr (new_var);
7340 new_var = fold_convert (ptype, new_var);
7341 ref = fold_convert (ptype, ref);
7342 tree m = create_tmp_var (ptype);
7343 gimplify_assign (m, new_var, end);
7344 new_var = m;
7345 m = create_tmp_var (ptype);
7346 gimplify_assign (m, ref, end);
7347 ref = m;
7348 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
7349 tree body = create_artificial_label (UNKNOWN_LOCATION);
7350 tree endl = create_artificial_label (UNKNOWN_LOCATION);
7351 gimple_seq_add_stmt (end, gimple_build_label (body));
7352 tree priv = build_simple_mem_ref (new_var);
7353 tree out = build_simple_mem_ref (ref);
7354 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7356 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7357 tree decl_placeholder
7358 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7359 tree lab6 = NULL_TREE;
7360 if (cancellable)
7362 /* If this reduction needs destruction and parallel
7363 has been cancelled, jump around the merge operation
7364 to the destruction. */
7365 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7366 lab6 = create_artificial_label (UNKNOWN_LOCATION);
7367 tree zero = build_zero_cst (TREE_TYPE (cancellable));
7368 g = gimple_build_cond (NE_EXPR, cancellable, zero,
7369 lab6, lab5);
7370 gimple_seq_add_stmt (end, g);
7371 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7373 SET_DECL_VALUE_EXPR (placeholder, out);
7374 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7375 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7376 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7377 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7378 gimple_seq_add_seq (end,
7379 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7380 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7381 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
7383 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7384 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7386 if (cancellable)
7387 gimple_seq_add_stmt (end, gimple_build_label (lab6));
7388 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
7389 if (x)
7391 gimple_seq tseq = NULL;
7392 gimplify_stmt (&x, &tseq);
7393 gimple_seq_add_seq (end, tseq);
7396 else
7398 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
7399 out = unshare_expr (out);
7400 gimplify_assign (out, x, end);
7402 gimple *g
7403 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7404 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7405 gimple_seq_add_stmt (end, g);
7406 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7407 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7408 gimple_seq_add_stmt (end, g);
7409 g = gimple_build_assign (i, PLUS_EXPR, i,
7410 build_int_cst (TREE_TYPE (i), 1));
7411 gimple_seq_add_stmt (end, g);
7412 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
7413 gimple_seq_add_stmt (end, g);
7414 gimple_seq_add_stmt (end, gimple_build_label (endl));
7416 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7418 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7419 tree oldv = NULL_TREE;
7420 tree lab6 = NULL_TREE;
7421 if (cancellable)
7423 /* If this reduction needs destruction and parallel
7424 has been cancelled, jump around the merge operation
7425 to the destruction. */
7426 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7427 lab6 = create_artificial_label (UNKNOWN_LOCATION);
7428 tree zero = build_zero_cst (TREE_TYPE (cancellable));
7429 g = gimple_build_cond (NE_EXPR, cancellable, zero,
7430 lab6, lab5);
7431 gimple_seq_add_stmt (end, g);
7432 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7434 if (omp_is_reference (decl)
7435 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7436 TREE_TYPE (ref)))
7437 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
7438 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
7439 tree refv = create_tmp_var (TREE_TYPE (ref));
7440 gimplify_assign (refv, ref, end);
7441 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
7442 SET_DECL_VALUE_EXPR (placeholder, ref);
7443 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7444 tree d = maybe_lookup_decl (decl, ctx);
7445 gcc_assert (d);
7446 if (DECL_HAS_VALUE_EXPR_P (d))
7447 oldv = DECL_VALUE_EXPR (d);
7448 if (omp_is_reference (var))
7450 tree v = fold_convert (TREE_TYPE (d),
7451 build_fold_addr_expr (new_var));
7452 SET_DECL_VALUE_EXPR (d, v);
7454 else
7455 SET_DECL_VALUE_EXPR (d, new_var);
7456 DECL_HAS_VALUE_EXPR_P (d) = 1;
7457 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7458 if (oldv)
7459 SET_DECL_VALUE_EXPR (d, oldv);
7460 else
7462 SET_DECL_VALUE_EXPR (d, NULL_TREE);
7463 DECL_HAS_VALUE_EXPR_P (d) = 0;
7465 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7466 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7467 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
7468 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7469 if (cancellable)
7470 gimple_seq_add_stmt (end, gimple_build_label (lab6));
7471 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
7472 if (x)
7474 gimple_seq tseq = NULL;
7475 gimplify_stmt (&x, &tseq);
7476 gimple_seq_add_seq (end, tseq);
7479 else
7481 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
7482 ref = unshare_expr (ref);
7483 gimplify_assign (ref, x, end);
7485 gimple_seq_add_stmt (end, gimple_build_label (lab4));
7486 ++cnt;
7487 field = DECL_CHAIN (bfield);
7491 if (code == OMP_TASKGROUP)
7493 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
7494 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
7495 gimple_seq_add_stmt (start, g);
7497 else
7499 tree c;
7500 if (code == OMP_FOR)
7501 c = gimple_omp_for_clauses (ctx->stmt);
7502 else if (code == OMP_SECTIONS)
7503 c = gimple_omp_sections_clauses (ctx->stmt);
7504 else
7505 c = gimple_omp_taskreg_clauses (ctx->stmt);
7506 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
7507 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
7508 build_fold_addr_expr (avar));
7509 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
7512 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
7513 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
7514 size_one_node));
7515 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
7516 gimple_seq_add_stmt (end, g);
7517 gimple_seq_add_stmt (end, gimple_build_label (lab2));
7518 if (code == OMP_FOR || code == OMP_SECTIONS)
7520 enum built_in_function bfn
7521 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
7522 t = builtin_decl_explicit (bfn);
7523 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
7524 tree arg;
7525 if (cancellable)
7527 arg = create_tmp_var (c_bool_type);
7528 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
7529 cancellable));
7531 else
7532 arg = build_int_cst (c_bool_type, 0);
7533 g = gimple_build_call (t, 1, arg);
7535 else
7537 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
7538 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
7540 gimple_seq_add_stmt (end, g);
7541 t = build_constructor (atype, NULL);
7542 TREE_THIS_VOLATILE (t) = 1;
7543 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
7546 /* Expand code for an OpenMP taskgroup directive. */
7548 static void
7549 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7551 gimple *stmt = gsi_stmt (*gsi_p);
7552 gcall *x;
7553 gbind *bind;
7554 gimple_seq dseq = NULL;
7555 tree block = make_node (BLOCK);
7557 bind = gimple_build_bind (NULL, NULL, block);
7558 gsi_replace (gsi_p, bind, true);
7559 gimple_bind_add_stmt (bind, stmt);
7561 push_gimplify_context ();
7563 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
7565 gimple_bind_add_stmt (bind, x);
7567 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
7568 gimple_omp_taskgroup_clauses (stmt),
7569 gimple_bind_body_ptr (bind), &dseq);
7571 lower_omp (gimple_omp_body_ptr (stmt), ctx);
7572 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7573 gimple_omp_set_body (stmt, NULL);
7575 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
7576 gimple_bind_add_seq (bind, dseq);
7578 pop_gimplify_context (bind);
7580 gimple_bind_append_vars (bind, ctx->block_vars);
7581 BLOCK_VARS (block) = ctx->block_vars;
7585 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
7587 static void
7588 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
7589 omp_context *ctx)
7591 struct omp_for_data fd;
7592 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
7593 return;
7595 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
7596 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
7597 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
7598 if (!fd.ordered)
7599 return;
7601 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
7602 tree c = gimple_omp_ordered_clauses (ord_stmt);
7603 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7604 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
7606 /* Merge depend clauses from multiple adjacent
7607 #pragma omp ordered depend(sink:...) constructs
7608 into one #pragma omp ordered depend(sink:...), so that
7609 we can optimize them together. */
7610 gimple_stmt_iterator gsi = *gsi_p;
7611 gsi_next (&gsi);
7612 while (!gsi_end_p (gsi))
7614 gimple *stmt = gsi_stmt (gsi);
7615 if (is_gimple_debug (stmt)
7616 || gimple_code (stmt) == GIMPLE_NOP)
7618 gsi_next (&gsi);
7619 continue;
7621 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
7622 break;
7623 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
7624 c = gimple_omp_ordered_clauses (ord_stmt2);
7625 if (c == NULL_TREE
7626 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
7627 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
7628 break;
7629 while (*list_p)
7630 list_p = &OMP_CLAUSE_CHAIN (*list_p);
7631 *list_p = c;
7632 gsi_remove (&gsi, true);
7636 /* Canonicalize sink dependence clauses into one folded clause if
7637 possible.
7639 The basic algorithm is to create a sink vector whose first
7640 element is the GCD of all the first elements, and whose remaining
7641 elements are the minimum of the subsequent columns.
7643 We ignore dependence vectors whose first element is zero because
7644 such dependencies are known to be executed by the same thread.
7646 We take into account the direction of the loop, so a minimum
7647 becomes a maximum if the loop is iterating forwards. We also
7648 ignore sink clauses where the loop direction is unknown, or where
7649 the offsets are clearly invalid because they are not a multiple
7650 of the loop increment.
7652 For example:
7654 #pragma omp for ordered(2)
7655 for (i=0; i < N; ++i)
7656 for (j=0; j < M; ++j)
7658 #pragma omp ordered \
7659 depend(sink:i-8,j-2) \
7660 depend(sink:i,j-1) \ // Completely ignored because i+0.
7661 depend(sink:i-4,j-3) \
7662 depend(sink:i-6,j-4)
7663 #pragma omp ordered depend(source)
7666 Folded clause is:
7668 depend(sink:-gcd(8,4,6),-min(2,3,4))
7669 -or-
7670 depend(sink:-2,-2)
7673 /* FIXME: Computing GCD's where the first element is zero is
7674 non-trivial in the presence of collapsed loops. Do this later. */
7675 if (fd.collapse > 1)
7676 return;
7678 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
7680 /* wide_int is not a POD so it must be default-constructed. */
7681 for (unsigned i = 0; i != 2 * len - 1; ++i)
7682 new (static_cast<void*>(folded_deps + i)) wide_int ();
7684 tree folded_dep = NULL_TREE;
7685 /* TRUE if the first dimension's offset is negative. */
7686 bool neg_offset_p = false;
7688 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
7689 unsigned int i;
7690 while ((c = *list_p) != NULL)
7692 bool remove = false;
7694 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
7695 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
7696 goto next_ordered_clause;
7698 tree vec;
7699 for (vec = OMP_CLAUSE_DECL (c), i = 0;
7700 vec && TREE_CODE (vec) == TREE_LIST;
7701 vec = TREE_CHAIN (vec), ++i)
7703 gcc_assert (i < len);
7705 /* omp_extract_for_data has canonicalized the condition. */
7706 gcc_assert (fd.loops[i].cond_code == LT_EXPR
7707 || fd.loops[i].cond_code == GT_EXPR);
7708 bool forward = fd.loops[i].cond_code == LT_EXPR;
7709 bool maybe_lexically_later = true;
7711 /* While the committee makes up its mind, bail if we have any
7712 non-constant steps. */
7713 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
7714 goto lower_omp_ordered_ret;
7716 tree itype = TREE_TYPE (TREE_VALUE (vec));
7717 if (POINTER_TYPE_P (itype))
7718 itype = sizetype;
7719 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
7720 TYPE_PRECISION (itype),
7721 TYPE_SIGN (itype));
7723 /* Ignore invalid offsets that are not multiples of the step. */
7724 if (!wi::multiple_of_p (wi::abs (offset),
7725 wi::abs (wi::to_wide (fd.loops[i].step)),
7726 UNSIGNED))
7728 warning_at (OMP_CLAUSE_LOCATION (c), 0,
7729 "ignoring sink clause with offset that is not "
7730 "a multiple of the loop step");
7731 remove = true;
7732 goto next_ordered_clause;
7735 /* Calculate the first dimension. The first dimension of
7736 the folded dependency vector is the GCD of the first
7737 elements, while ignoring any first elements whose offset
7738 is 0. */
7739 if (i == 0)
7741 /* Ignore dependence vectors whose first dimension is 0. */
7742 if (offset == 0)
7744 remove = true;
7745 goto next_ordered_clause;
7747 else
7749 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
7751 error_at (OMP_CLAUSE_LOCATION (c),
7752 "first offset must be in opposite direction "
7753 "of loop iterations");
7754 goto lower_omp_ordered_ret;
7756 if (forward)
7757 offset = -offset;
7758 neg_offset_p = forward;
7759 /* Initialize the first time around. */
7760 if (folded_dep == NULL_TREE)
7762 folded_dep = c;
7763 folded_deps[0] = offset;
7765 else
7766 folded_deps[0] = wi::gcd (folded_deps[0],
7767 offset, UNSIGNED);
7770 /* Calculate minimum for the remaining dimensions. */
7771 else
7773 folded_deps[len + i - 1] = offset;
7774 if (folded_dep == c)
7775 folded_deps[i] = offset;
7776 else if (maybe_lexically_later
7777 && !wi::eq_p (folded_deps[i], offset))
7779 if (forward ^ wi::gts_p (folded_deps[i], offset))
7781 unsigned int j;
7782 folded_dep = c;
7783 for (j = 1; j <= i; j++)
7784 folded_deps[j] = folded_deps[len + j - 1];
7786 else
7787 maybe_lexically_later = false;
7791 gcc_assert (i == len);
7793 remove = true;
7795 next_ordered_clause:
7796 if (remove)
7797 *list_p = OMP_CLAUSE_CHAIN (c);
7798 else
7799 list_p = &OMP_CLAUSE_CHAIN (c);
7802 if (folded_dep)
7804 if (neg_offset_p)
7805 folded_deps[0] = -folded_deps[0];
7807 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
7808 if (POINTER_TYPE_P (itype))
7809 itype = sizetype;
7811 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
7812 = wide_int_to_tree (itype, folded_deps[0]);
7813 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
7814 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
7817 lower_omp_ordered_ret:
7819 /* Ordered without clauses is #pragma omp threads, while we want
7820 a nop instead if we remove all clauses. */
7821 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
7822 gsi_replace (gsi_p, gimple_build_nop (), true);
7826 /* Expand code for an OpenMP ordered directive. */
7828 static void
7829 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7831 tree block;
7832 gimple *stmt = gsi_stmt (*gsi_p), *g;
7833 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
7834 gcall *x;
7835 gbind *bind;
7836 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
7837 OMP_CLAUSE_SIMD);
7838 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
7839 loop. */
7840 bool maybe_simt
7841 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
7842 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
7843 OMP_CLAUSE_THREADS);
7845 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
7846 OMP_CLAUSE_DEPEND))
7848 /* FIXME: This is needs to be moved to the expansion to verify various
7849 conditions only testable on cfg with dominators computed, and also
7850 all the depend clauses to be merged still might need to be available
7851 for the runtime checks. */
7852 if (0)
7853 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
7854 return;
7857 push_gimplify_context ();
7859 block = make_node (BLOCK);
7860 bind = gimple_build_bind (NULL, NULL, block);
7861 gsi_replace (gsi_p, bind, true);
7862 gimple_bind_add_stmt (bind, stmt);
7864 if (simd)
7866 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
7867 build_int_cst (NULL_TREE, threads));
7868 cfun->has_simduid_loops = true;
7870 else
7871 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
7873 gimple_bind_add_stmt (bind, x);
7875 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
7876 if (maybe_simt)
7878 counter = create_tmp_var (integer_type_node);
7879 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
7880 gimple_call_set_lhs (g, counter);
7881 gimple_bind_add_stmt (bind, g);
7883 body = create_artificial_label (UNKNOWN_LOCATION);
7884 test = create_artificial_label (UNKNOWN_LOCATION);
7885 gimple_bind_add_stmt (bind, gimple_build_label (body));
7887 tree simt_pred = create_tmp_var (integer_type_node);
7888 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
7889 gimple_call_set_lhs (g, simt_pred);
7890 gimple_bind_add_stmt (bind, g);
7892 tree t = create_artificial_label (UNKNOWN_LOCATION);
7893 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
7894 gimple_bind_add_stmt (bind, g);
7896 gimple_bind_add_stmt (bind, gimple_build_label (t));
7898 lower_omp (gimple_omp_body_ptr (stmt), ctx);
7899 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
7900 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7901 gimple_omp_set_body (stmt, NULL);
7903 if (maybe_simt)
7905 gimple_bind_add_stmt (bind, gimple_build_label (test));
7906 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
7907 gimple_bind_add_stmt (bind, g);
7909 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
7910 tree nonneg = create_tmp_var (integer_type_node);
7911 gimple_seq tseq = NULL;
7912 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
7913 gimple_bind_add_seq (bind, tseq);
7915 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
7916 gimple_call_set_lhs (g, nonneg);
7917 gimple_bind_add_stmt (bind, g);
7919 tree end = create_artificial_label (UNKNOWN_LOCATION);
7920 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
7921 gimple_bind_add_stmt (bind, g);
7923 gimple_bind_add_stmt (bind, gimple_build_label (end));
7925 if (simd)
7926 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
7927 build_int_cst (NULL_TREE, threads));
7928 else
7929 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
7931 gimple_bind_add_stmt (bind, x);
7933 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
7935 pop_gimplify_context (bind);
7937 gimple_bind_append_vars (bind, ctx->block_vars);
7938 BLOCK_VARS (block) = gimple_bind_vars (bind);
7942 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
7943 substitution of a couple of function calls. But in the NAMED case,
7944 requires that languages coordinate a symbol name. It is therefore
7945 best put here in common code. */
7947 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
7949 static void
7950 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7952 tree block;
7953 tree name, lock, unlock;
7954 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
7955 gbind *bind;
7956 location_t loc = gimple_location (stmt);
7957 gimple_seq tbody;
7959 name = gimple_omp_critical_name (stmt);
7960 if (name)
7962 tree decl;
7964 if (!critical_name_mutexes)
7965 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
7967 tree *n = critical_name_mutexes->get (name);
7968 if (n == NULL)
7970 char *new_str;
7972 decl = create_tmp_var_raw (ptr_type_node);
7974 new_str = ACONCAT ((".gomp_critical_user_",
7975 IDENTIFIER_POINTER (name), NULL));
7976 DECL_NAME (decl) = get_identifier (new_str);
7977 TREE_PUBLIC (decl) = 1;
7978 TREE_STATIC (decl) = 1;
7979 DECL_COMMON (decl) = 1;
7980 DECL_ARTIFICIAL (decl) = 1;
7981 DECL_IGNORED_P (decl) = 1;
7983 varpool_node::finalize_decl (decl);
7985 critical_name_mutexes->put (name, decl);
7987 else
7988 decl = *n;
7990 /* If '#pragma omp critical' is inside offloaded region or
7991 inside function marked as offloadable, the symbol must be
7992 marked as offloadable too. */
7993 omp_context *octx;
7994 if (cgraph_node::get (current_function_decl)->offloadable)
7995 varpool_node::get_create (decl)->offloadable = 1;
7996 else
7997 for (octx = ctx->outer; octx; octx = octx->outer)
7998 if (is_gimple_omp_offloaded (octx->stmt))
8000 varpool_node::get_create (decl)->offloadable = 1;
8001 break;
8004 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
8005 lock = build_call_expr_loc (loc, lock, 1,
8006 build_fold_addr_expr_loc (loc, decl));
8008 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
8009 unlock = build_call_expr_loc (loc, unlock, 1,
8010 build_fold_addr_expr_loc (loc, decl));
8012 else
8014 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
8015 lock = build_call_expr_loc (loc, lock, 0);
8017 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
8018 unlock = build_call_expr_loc (loc, unlock, 0);
8021 push_gimplify_context ();
8023 block = make_node (BLOCK);
8024 bind = gimple_build_bind (NULL, NULL, block);
8025 gsi_replace (gsi_p, bind, true);
8026 gimple_bind_add_stmt (bind, stmt);
8028 tbody = gimple_bind_body (bind);
8029 gimplify_and_add (lock, &tbody);
8030 gimple_bind_set_body (bind, tbody);
8032 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8033 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8034 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8035 gimple_omp_set_body (stmt, NULL);
8037 tbody = gimple_bind_body (bind);
8038 gimplify_and_add (unlock, &tbody);
8039 gimple_bind_set_body (bind, tbody);
8041 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8043 pop_gimplify_context (bind);
8044 gimple_bind_append_vars (bind, ctx->block_vars);
8045 BLOCK_VARS (block) = gimple_bind_vars (bind);
8048 /* A subroutine of lower_omp_for. Generate code to emit the predicate
8049 for a lastprivate clause. Given a loop control predicate of (V
8050 cond N2), we gate the clause on (!(V cond N2)). The lowered form
8051 is appended to *DLIST, iterator initialization is appended to
8052 *BODY_P. */
8054 static void
8055 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
8056 gimple_seq *dlist, struct omp_context *ctx)
8058 tree clauses, cond, vinit;
8059 enum tree_code cond_code;
8060 gimple_seq stmts;
8062 cond_code = fd->loop.cond_code;
8063 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
8065 /* When possible, use a strict equality expression. This can let VRP
8066 type optimizations deduce the value and remove a copy. */
8067 if (tree_fits_shwi_p (fd->loop.step))
8069 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
8070 if (step == 1 || step == -1)
8071 cond_code = EQ_EXPR;
8074 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
8075 || gimple_omp_for_grid_phony (fd->for_stmt))
8076 cond = omp_grid_lastprivate_predicate (fd);
8077 else
8079 tree n2 = fd->loop.n2;
8080 if (fd->collapse > 1
8081 && TREE_CODE (n2) != INTEGER_CST
8082 && gimple_omp_for_combined_into_p (fd->for_stmt))
8084 struct omp_context *taskreg_ctx = NULL;
8085 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
8087 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
8088 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
8089 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
8091 if (gimple_omp_for_combined_into_p (gfor))
8093 gcc_assert (ctx->outer->outer
8094 && is_parallel_ctx (ctx->outer->outer));
8095 taskreg_ctx = ctx->outer->outer;
8097 else
8099 struct omp_for_data outer_fd;
8100 omp_extract_for_data (gfor, &outer_fd, NULL);
8101 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
8104 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
8105 taskreg_ctx = ctx->outer->outer;
8107 else if (is_taskreg_ctx (ctx->outer))
8108 taskreg_ctx = ctx->outer;
8109 if (taskreg_ctx)
8111 int i;
8112 tree taskreg_clauses
8113 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
8114 tree innerc = omp_find_clause (taskreg_clauses,
8115 OMP_CLAUSE__LOOPTEMP_);
8116 gcc_assert (innerc);
8117 for (i = 0; i < fd->collapse; i++)
8119 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
8120 OMP_CLAUSE__LOOPTEMP_);
8121 gcc_assert (innerc);
8123 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
8124 OMP_CLAUSE__LOOPTEMP_);
8125 if (innerc)
8126 n2 = fold_convert (TREE_TYPE (n2),
8127 lookup_decl (OMP_CLAUSE_DECL (innerc),
8128 taskreg_ctx));
8131 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
8134 clauses = gimple_omp_for_clauses (fd->for_stmt);
8135 stmts = NULL;
8136 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
8137 if (!gimple_seq_empty_p (stmts))
8139 gimple_seq_add_seq (&stmts, *dlist);
8140 *dlist = stmts;
8142 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
8143 vinit = fd->loop.n1;
8144 if (cond_code == EQ_EXPR
8145 && tree_fits_shwi_p (fd->loop.n2)
8146 && ! integer_zerop (fd->loop.n2))
8147 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
8148 else
8149 vinit = unshare_expr (vinit);
8151 /* Initialize the iterator variable, so that threads that don't execute
8152 any iterations don't execute the lastprivate clauses by accident. */
8153 gimplify_assign (fd->loop.v, vinit, body_p);
8158 /* Lower code for an OMP loop directive. */
8160 static void
8161 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8163 tree *rhs_p, block;
8164 struct omp_for_data fd, *fdp = NULL;
8165 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
8166 gbind *new_stmt;
8167 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
8168 gimple_seq cnt_list = NULL;
8169 gimple_seq oacc_head = NULL, oacc_tail = NULL;
8170 size_t i;
8172 push_gimplify_context ();
8174 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
8176 block = make_node (BLOCK);
8177 new_stmt = gimple_build_bind (NULL, NULL, block);
8178 /* Replace at gsi right away, so that 'stmt' is no member
8179 of a sequence anymore as we're going to add to a different
8180 one below. */
8181 gsi_replace (gsi_p, new_stmt, true);
8183 /* Move declaration of temporaries in the loop body before we make
8184 it go away. */
8185 omp_for_body = gimple_omp_body (stmt);
8186 if (!gimple_seq_empty_p (omp_for_body)
8187 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
8189 gbind *inner_bind
8190 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
8191 tree vars = gimple_bind_vars (inner_bind);
8192 gimple_bind_append_vars (new_stmt, vars);
8193 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
8194 keep them on the inner_bind and it's block. */
8195 gimple_bind_set_vars (inner_bind, NULL_TREE);
8196 if (gimple_bind_block (inner_bind))
8197 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
8200 if (gimple_omp_for_combined_into_p (stmt))
8202 omp_extract_for_data (stmt, &fd, NULL);
8203 fdp = &fd;
8205 /* We need two temporaries with fd.loop.v type (istart/iend)
8206 and then (fd.collapse - 1) temporaries with the same
8207 type for count2 ... countN-1 vars if not constant. */
8208 size_t count = 2;
8209 tree type = fd.iter_type;
8210 if (fd.collapse > 1
8211 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
8212 count += fd.collapse - 1;
8213 bool taskreg_for
8214 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
8215 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
8216 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
8217 tree simtc = NULL;
8218 tree clauses = *pc;
8219 if (taskreg_for)
8220 outerc
8221 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
8222 OMP_CLAUSE__LOOPTEMP_);
8223 if (ctx->simt_stmt)
8224 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
8225 OMP_CLAUSE__LOOPTEMP_);
8226 for (i = 0; i < count; i++)
8228 tree temp;
8229 if (taskreg_for)
8231 gcc_assert (outerc);
8232 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
8233 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
8234 OMP_CLAUSE__LOOPTEMP_);
8236 else
8238 /* If there are 2 adjacent SIMD stmts, one with _simt_
8239 clause, another without, make sure they have the same
8240 decls in _looptemp_ clauses, because the outer stmt
8241 they are combined into will look up just one inner_stmt. */
8242 if (ctx->simt_stmt)
8243 temp = OMP_CLAUSE_DECL (simtc);
8244 else
8245 temp = create_tmp_var (type);
8246 insert_decl_map (&ctx->outer->cb, temp, temp);
8248 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
8249 OMP_CLAUSE_DECL (*pc) = temp;
8250 pc = &OMP_CLAUSE_CHAIN (*pc);
8251 if (ctx->simt_stmt)
8252 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
8253 OMP_CLAUSE__LOOPTEMP_);
8255 *pc = clauses;
8258 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
8259 dlist = NULL;
8260 body = NULL;
8261 tree rclauses
8262 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
8263 OMP_CLAUSE_REDUCTION);
8264 tree rtmp = NULL_TREE;
8265 if (rclauses)
8267 tree type = build_pointer_type (pointer_sized_int_node);
8268 tree temp = create_tmp_var (type);
8269 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8270 OMP_CLAUSE_DECL (c) = temp;
8271 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
8272 gimple_omp_for_set_clauses (stmt, c);
8273 lower_omp_task_reductions (ctx, OMP_FOR,
8274 gimple_omp_for_clauses (stmt),
8275 &tred_ilist, &tred_dlist);
8276 rclauses = c;
8277 rtmp = make_ssa_name (type);
8278 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
8281 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
8282 fdp);
8283 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
8284 gimple_omp_for_pre_body (stmt));
8286 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8288 /* Lower the header expressions. At this point, we can assume that
8289 the header is of the form:
8291 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
8293 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
8294 using the .omp_data_s mapping, if needed. */
8295 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
8297 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
8298 if (!is_gimple_min_invariant (*rhs_p))
8299 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
8300 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
8301 recompute_tree_invariant_for_addr_expr (*rhs_p);
8303 rhs_p = gimple_omp_for_final_ptr (stmt, i);
8304 if (!is_gimple_min_invariant (*rhs_p))
8305 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
8306 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
8307 recompute_tree_invariant_for_addr_expr (*rhs_p);
8309 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
8310 if (!is_gimple_min_invariant (*rhs_p))
8311 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
8313 if (rclauses)
8314 gimple_seq_add_seq (&tred_ilist, cnt_list);
8315 else
8316 gimple_seq_add_seq (&body, cnt_list);
8318 /* Once lowered, extract the bounds and clauses. */
8319 omp_extract_for_data (stmt, &fd, NULL);
8321 if (is_gimple_omp_oacc (ctx->stmt)
8322 && !ctx_in_oacc_kernels_region (ctx))
8323 lower_oacc_head_tail (gimple_location (stmt),
8324 gimple_omp_for_clauses (stmt),
8325 &oacc_head, &oacc_tail, ctx);
8327 /* Add OpenACC partitioning and reduction markers just before the loop. */
8328 if (oacc_head)
8329 gimple_seq_add_seq (&body, oacc_head);
8331 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
8333 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
8334 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
8335 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8336 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8338 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
8339 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
8340 OMP_CLAUSE_LINEAR_STEP (c)
8341 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
8342 ctx);
8345 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
8346 && gimple_omp_for_grid_phony (stmt));
8347 if (!phony_loop)
8348 gimple_seq_add_stmt (&body, stmt);
8349 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
8351 if (!phony_loop)
8352 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
8353 fd.loop.v));
8355 /* After the loop, add exit clauses. */
8356 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
8358 if (ctx->cancellable)
8359 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
8361 gimple_seq_add_seq (&body, dlist);
8363 if (rclauses)
8365 gimple_seq_add_seq (&tred_ilist, body);
8366 body = tred_ilist;
8369 body = maybe_catch_exception (body);
8371 if (!phony_loop)
8373 /* Region exit marker goes at the end of the loop body. */
8374 gimple *g = gimple_build_omp_return (fd.have_nowait);
8375 gimple_seq_add_stmt (&body, g);
8377 gimple_seq_add_seq (&body, tred_dlist);
8379 maybe_add_implicit_barrier_cancel (ctx, g, &body);
8381 if (rclauses)
8382 OMP_CLAUSE_DECL (rclauses) = rtmp;
8385 /* Add OpenACC joining and reduction markers just after the loop. */
8386 if (oacc_tail)
8387 gimple_seq_add_seq (&body, oacc_tail);
8389 pop_gimplify_context (new_stmt);
8391 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8392 maybe_remove_omp_member_access_dummy_vars (new_stmt);
8393 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
8394 if (BLOCK_VARS (block))
8395 TREE_USED (block) = 1;
8397 gimple_bind_set_body (new_stmt, body);
8398 gimple_omp_set_body (stmt, NULL);
8399 gimple_omp_for_set_pre_body (stmt, NULL);
8402 /* Callback for walk_stmts. Check if the current statement only contains
8403 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
8405 static tree
8406 check_combined_parallel (gimple_stmt_iterator *gsi_p,
8407 bool *handled_ops_p,
8408 struct walk_stmt_info *wi)
8410 int *info = (int *) wi->info;
8411 gimple *stmt = gsi_stmt (*gsi_p);
8413 *handled_ops_p = true;
8414 switch (gimple_code (stmt))
8416 WALK_SUBSTMTS;
8418 case GIMPLE_DEBUG:
8419 break;
8420 case GIMPLE_OMP_FOR:
8421 case GIMPLE_OMP_SECTIONS:
8422 *info = *info == 0 ? 1 : -1;
8423 break;
8424 default:
8425 *info = -1;
8426 break;
8428 return NULL;
8431 struct omp_taskcopy_context
8433 /* This field must be at the beginning, as we do "inheritance": Some
8434 callback functions for tree-inline.c (e.g., omp_copy_decl)
8435 receive a copy_body_data pointer that is up-casted to an
8436 omp_context pointer. */
8437 copy_body_data cb;
8438 omp_context *ctx;
8441 static tree
8442 task_copyfn_copy_decl (tree var, copy_body_data *cb)
8444 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
8446 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
8447 return create_tmp_var (TREE_TYPE (var));
8449 return var;
8452 static tree
8453 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
8455 tree name, new_fields = NULL, type, f;
8457 type = lang_hooks.types.make_type (RECORD_TYPE);
8458 name = DECL_NAME (TYPE_NAME (orig_type));
8459 name = build_decl (gimple_location (tcctx->ctx->stmt),
8460 TYPE_DECL, name, type);
8461 TYPE_NAME (type) = name;
8463 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
8465 tree new_f = copy_node (f);
8466 DECL_CONTEXT (new_f) = type;
8467 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
8468 TREE_CHAIN (new_f) = new_fields;
8469 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
8470 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
8471 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
8472 &tcctx->cb, NULL);
8473 new_fields = new_f;
8474 tcctx->cb.decl_map->put (f, new_f);
8476 TYPE_FIELDS (type) = nreverse (new_fields);
8477 layout_type (type);
8478 return type;
8481 /* Create task copyfn. */
8483 static void
8484 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
8486 struct function *child_cfun;
8487 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
8488 tree record_type, srecord_type, bind, list;
8489 bool record_needs_remap = false, srecord_needs_remap = false;
8490 splay_tree_node n;
8491 struct omp_taskcopy_context tcctx;
8492 location_t loc = gimple_location (task_stmt);
8493 size_t looptempno = 0;
8495 child_fn = gimple_omp_task_copy_fn (task_stmt);
8496 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
8497 gcc_assert (child_cfun->cfg == NULL);
8498 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
8500 /* Reset DECL_CONTEXT on function arguments. */
8501 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
8502 DECL_CONTEXT (t) = child_fn;
8504 /* Populate the function. */
8505 push_gimplify_context ();
8506 push_cfun (child_cfun);
8508 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
8509 TREE_SIDE_EFFECTS (bind) = 1;
8510 list = NULL;
8511 DECL_SAVED_TREE (child_fn) = bind;
8512 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
8514 /* Remap src and dst argument types if needed. */
8515 record_type = ctx->record_type;
8516 srecord_type = ctx->srecord_type;
8517 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
8518 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
8520 record_needs_remap = true;
8521 break;
8523 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
8524 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
8526 srecord_needs_remap = true;
8527 break;
8530 if (record_needs_remap || srecord_needs_remap)
8532 memset (&tcctx, '\0', sizeof (tcctx));
8533 tcctx.cb.src_fn = ctx->cb.src_fn;
8534 tcctx.cb.dst_fn = child_fn;
8535 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
8536 gcc_checking_assert (tcctx.cb.src_node);
8537 tcctx.cb.dst_node = tcctx.cb.src_node;
8538 tcctx.cb.src_cfun = ctx->cb.src_cfun;
8539 tcctx.cb.copy_decl = task_copyfn_copy_decl;
8540 tcctx.cb.eh_lp_nr = 0;
8541 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
8542 tcctx.cb.decl_map = new hash_map<tree, tree>;
8543 tcctx.ctx = ctx;
8545 if (record_needs_remap)
8546 record_type = task_copyfn_remap_type (&tcctx, record_type);
8547 if (srecord_needs_remap)
8548 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
8550 else
8551 tcctx.cb.decl_map = NULL;
8553 arg = DECL_ARGUMENTS (child_fn);
8554 TREE_TYPE (arg) = build_pointer_type (record_type);
8555 sarg = DECL_CHAIN (arg);
8556 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
8558 /* First pass: initialize temporaries used in record_type and srecord_type
8559 sizes and field offsets. */
8560 if (tcctx.cb.decl_map)
8561 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8562 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
8564 tree *p;
8566 decl = OMP_CLAUSE_DECL (c);
8567 p = tcctx.cb.decl_map->get (decl);
8568 if (p == NULL)
8569 continue;
8570 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
8571 sf = (tree) n->value;
8572 sf = *tcctx.cb.decl_map->get (sf);
8573 src = build_simple_mem_ref_loc (loc, sarg);
8574 src = omp_build_component_ref (src, sf);
8575 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
8576 append_to_statement_list (t, &list);
8579 /* Second pass: copy shared var pointers and copy construct non-VLA
8580 firstprivate vars. */
8581 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8582 switch (OMP_CLAUSE_CODE (c))
8584 splay_tree_key key;
8585 case OMP_CLAUSE_SHARED:
8586 decl = OMP_CLAUSE_DECL (c);
8587 key = (splay_tree_key) decl;
8588 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8589 key = (splay_tree_key) &DECL_UID (decl);
8590 n = splay_tree_lookup (ctx->field_map, key);
8591 if (n == NULL)
8592 break;
8593 f = (tree) n->value;
8594 if (tcctx.cb.decl_map)
8595 f = *tcctx.cb.decl_map->get (f);
8596 n = splay_tree_lookup (ctx->sfield_map, key);
8597 sf = (tree) n->value;
8598 if (tcctx.cb.decl_map)
8599 sf = *tcctx.cb.decl_map->get (sf);
8600 src = build_simple_mem_ref_loc (loc, sarg);
8601 src = omp_build_component_ref (src, sf);
8602 dst = build_simple_mem_ref_loc (loc, arg);
8603 dst = omp_build_component_ref (dst, f);
8604 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8605 append_to_statement_list (t, &list);
8606 break;
8607 case OMP_CLAUSE_REDUCTION:
8608 case OMP_CLAUSE_IN_REDUCTION:
8609 decl = OMP_CLAUSE_DECL (c);
8610 if (TREE_CODE (decl) == MEM_REF)
8612 decl = TREE_OPERAND (decl, 0);
8613 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
8614 decl = TREE_OPERAND (decl, 0);
8615 if (TREE_CODE (decl) == INDIRECT_REF
8616 || TREE_CODE (decl) == ADDR_EXPR)
8617 decl = TREE_OPERAND (decl, 0);
8619 key = (splay_tree_key) decl;
8620 n = splay_tree_lookup (ctx->field_map, key);
8621 if (n == NULL)
8622 break;
8623 f = (tree) n->value;
8624 if (tcctx.cb.decl_map)
8625 f = *tcctx.cb.decl_map->get (f);
8626 n = splay_tree_lookup (ctx->sfield_map, key);
8627 sf = (tree) n->value;
8628 if (tcctx.cb.decl_map)
8629 sf = *tcctx.cb.decl_map->get (sf);
8630 src = build_simple_mem_ref_loc (loc, sarg);
8631 src = omp_build_component_ref (src, sf);
8632 if (decl != OMP_CLAUSE_DECL (c)
8633 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8634 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
8635 src = build_simple_mem_ref_loc (loc, src);
8636 dst = build_simple_mem_ref_loc (loc, arg);
8637 dst = omp_build_component_ref (dst, f);
8638 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8639 append_to_statement_list (t, &list);
8640 break;
8641 case OMP_CLAUSE__LOOPTEMP_:
8642 /* Fields for first two _looptemp_ clauses are initialized by
8643 GOMP_taskloop*, the rest are handled like firstprivate. */
8644 if (looptempno < 2)
8646 looptempno++;
8647 break;
8649 /* FALLTHRU */
8650 case OMP_CLAUSE__REDUCTEMP_:
8651 case OMP_CLAUSE_FIRSTPRIVATE:
8652 decl = OMP_CLAUSE_DECL (c);
8653 if (is_variable_sized (decl))
8654 break;
8655 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
8656 if (n == NULL)
8657 break;
8658 f = (tree) n->value;
8659 if (tcctx.cb.decl_map)
8660 f = *tcctx.cb.decl_map->get (f);
8661 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
8662 if (n != NULL)
8664 sf = (tree) n->value;
8665 if (tcctx.cb.decl_map)
8666 sf = *tcctx.cb.decl_map->get (sf);
8667 src = build_simple_mem_ref_loc (loc, sarg);
8668 src = omp_build_component_ref (src, sf);
8669 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
8670 src = build_simple_mem_ref_loc (loc, src);
8672 else
8673 src = decl;
8674 dst = build_simple_mem_ref_loc (loc, arg);
8675 dst = omp_build_component_ref (dst, f);
8676 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
8677 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8678 else
8679 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
8680 append_to_statement_list (t, &list);
8681 break;
8682 case OMP_CLAUSE_PRIVATE:
8683 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
8684 break;
8685 decl = OMP_CLAUSE_DECL (c);
8686 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
8687 f = (tree) n->value;
8688 if (tcctx.cb.decl_map)
8689 f = *tcctx.cb.decl_map->get (f);
8690 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
8691 if (n != NULL)
8693 sf = (tree) n->value;
8694 if (tcctx.cb.decl_map)
8695 sf = *tcctx.cb.decl_map->get (sf);
8696 src = build_simple_mem_ref_loc (loc, sarg);
8697 src = omp_build_component_ref (src, sf);
8698 if (use_pointer_for_field (decl, NULL))
8699 src = build_simple_mem_ref_loc (loc, src);
8701 else
8702 src = decl;
8703 dst = build_simple_mem_ref_loc (loc, arg);
8704 dst = omp_build_component_ref (dst, f);
8705 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8706 append_to_statement_list (t, &list);
8707 break;
8708 default:
8709 break;
8712 /* Last pass: handle VLA firstprivates. */
8713 if (tcctx.cb.decl_map)
8714 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8715 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
8717 tree ind, ptr, df;
8719 decl = OMP_CLAUSE_DECL (c);
8720 if (!is_variable_sized (decl))
8721 continue;
8722 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
8723 if (n == NULL)
8724 continue;
8725 f = (tree) n->value;
8726 f = *tcctx.cb.decl_map->get (f);
8727 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
8728 ind = DECL_VALUE_EXPR (decl);
8729 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
8730 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
8731 n = splay_tree_lookup (ctx->sfield_map,
8732 (splay_tree_key) TREE_OPERAND (ind, 0));
8733 sf = (tree) n->value;
8734 sf = *tcctx.cb.decl_map->get (sf);
8735 src = build_simple_mem_ref_loc (loc, sarg);
8736 src = omp_build_component_ref (src, sf);
8737 src = build_simple_mem_ref_loc (loc, src);
8738 dst = build_simple_mem_ref_loc (loc, arg);
8739 dst = omp_build_component_ref (dst, f);
8740 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
8741 append_to_statement_list (t, &list);
8742 n = splay_tree_lookup (ctx->field_map,
8743 (splay_tree_key) TREE_OPERAND (ind, 0));
8744 df = (tree) n->value;
8745 df = *tcctx.cb.decl_map->get (df);
8746 ptr = build_simple_mem_ref_loc (loc, arg);
8747 ptr = omp_build_component_ref (ptr, df);
8748 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
8749 build_fold_addr_expr_loc (loc, dst));
8750 append_to_statement_list (t, &list);
8753 t = build1 (RETURN_EXPR, void_type_node, NULL);
8754 append_to_statement_list (t, &list);
8756 if (tcctx.cb.decl_map)
8757 delete tcctx.cb.decl_map;
8758 pop_gimplify_context (NULL);
8759 BIND_EXPR_BODY (bind) = list;
8760 pop_cfun ();
8763 static void
8764 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
8766 tree c, clauses;
8767 gimple *g;
8768 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
8770 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
8771 gcc_assert (clauses);
8772 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8773 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8774 switch (OMP_CLAUSE_DEPEND_KIND (c))
8776 case OMP_CLAUSE_DEPEND_LAST:
8777 /* Lowering already done at gimplification. */
8778 return;
8779 case OMP_CLAUSE_DEPEND_IN:
8780 cnt[2]++;
8781 break;
8782 case OMP_CLAUSE_DEPEND_OUT:
8783 case OMP_CLAUSE_DEPEND_INOUT:
8784 cnt[0]++;
8785 break;
8786 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8787 cnt[1]++;
8788 break;
8789 case OMP_CLAUSE_DEPEND_DEPOBJ:
8790 cnt[3]++;
8791 break;
8792 case OMP_CLAUSE_DEPEND_SOURCE:
8793 case OMP_CLAUSE_DEPEND_SINK:
8794 /* FALLTHRU */
8795 default:
8796 gcc_unreachable ();
8798 if (cnt[1] || cnt[3])
8799 idx = 5;
8800 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
8801 tree type = build_array_type_nelts (ptr_type_node, total + idx);
8802 tree array = create_tmp_var (type);
8803 TREE_ADDRESSABLE (array) = 1;
8804 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
8805 NULL_TREE);
8806 if (idx == 5)
8808 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
8809 gimple_seq_add_stmt (iseq, g);
8810 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
8811 NULL_TREE);
8813 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
8814 gimple_seq_add_stmt (iseq, g);
8815 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
8817 r = build4 (ARRAY_REF, ptr_type_node, array,
8818 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
8819 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
8820 gimple_seq_add_stmt (iseq, g);
8822 for (i = 0; i < 4; i++)
8824 if (cnt[i] == 0)
8825 continue;
8826 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8827 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
8828 continue;
8829 else
8831 switch (OMP_CLAUSE_DEPEND_KIND (c))
8833 case OMP_CLAUSE_DEPEND_IN:
8834 if (i != 2)
8835 continue;
8836 break;
8837 case OMP_CLAUSE_DEPEND_OUT:
8838 case OMP_CLAUSE_DEPEND_INOUT:
8839 if (i != 0)
8840 continue;
8841 break;
8842 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8843 if (i != 1)
8844 continue;
8845 break;
8846 case OMP_CLAUSE_DEPEND_DEPOBJ:
8847 if (i != 3)
8848 continue;
8849 break;
8850 default:
8851 gcc_unreachable ();
8853 tree t = OMP_CLAUSE_DECL (c);
8854 t = fold_convert (ptr_type_node, t);
8855 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
8856 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
8857 NULL_TREE, NULL_TREE);
8858 g = gimple_build_assign (r, t);
8859 gimple_seq_add_stmt (iseq, g);
8862 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8863 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8864 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8865 OMP_CLAUSE_CHAIN (c) = *pclauses;
8866 *pclauses = c;
8867 tree clobber = build_constructor (type, NULL);
8868 TREE_THIS_VOLATILE (clobber) = 1;
8869 g = gimple_build_assign (array, clobber);
8870 gimple_seq_add_stmt (oseq, g);
8873 /* Lower the OpenMP parallel or task directive in the current statement
8874 in GSI_P. CTX holds context information for the directive. */
8876 static void
8877 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8879 tree clauses;
8880 tree child_fn, t;
8881 gimple *stmt = gsi_stmt (*gsi_p);
8882 gbind *par_bind, *bind, *dep_bind = NULL;
8883 gimple_seq par_body;
8884 location_t loc = gimple_location (stmt);
8886 clauses = gimple_omp_taskreg_clauses (stmt);
8887 if (gimple_code (stmt) == GIMPLE_OMP_TASK
8888 && gimple_omp_task_taskwait_p (stmt))
8890 par_bind = NULL;
8891 par_body = NULL;
8893 else
8895 par_bind
8896 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
8897 par_body = gimple_bind_body (par_bind);
8899 child_fn = ctx->cb.dst_fn;
8900 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
8901 && !gimple_omp_parallel_combined_p (stmt))
8903 struct walk_stmt_info wi;
8904 int ws_num = 0;
8906 memset (&wi, 0, sizeof (wi));
8907 wi.info = &ws_num;
8908 wi.val_only = true;
8909 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
8910 if (ws_num == 1)
8911 gimple_omp_parallel_set_combined_p (stmt, true);
8913 gimple_seq dep_ilist = NULL;
8914 gimple_seq dep_olist = NULL;
8915 if (gimple_code (stmt) == GIMPLE_OMP_TASK
8916 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
8918 push_gimplify_context ();
8919 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
8920 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
8921 &dep_ilist, &dep_olist);
8924 if (gimple_code (stmt) == GIMPLE_OMP_TASK
8925 && gimple_omp_task_taskwait_p (stmt))
8927 if (dep_bind)
8929 gsi_replace (gsi_p, dep_bind, true);
8930 gimple_bind_add_seq (dep_bind, dep_ilist);
8931 gimple_bind_add_stmt (dep_bind, stmt);
8932 gimple_bind_add_seq (dep_bind, dep_olist);
8933 pop_gimplify_context (dep_bind);
8935 return;
8938 if (ctx->srecord_type)
8939 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
8941 gimple_seq tskred_ilist = NULL;
8942 gimple_seq tskred_olist = NULL;
8943 if ((is_task_ctx (ctx)
8944 && gimple_omp_task_taskloop_p (ctx->stmt)
8945 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
8946 OMP_CLAUSE_REDUCTION))
8947 || (is_parallel_ctx (ctx)
8948 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
8949 OMP_CLAUSE__REDUCTEMP_)))
8951 if (dep_bind == NULL)
8953 push_gimplify_context ();
8954 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
8956 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
8957 : OMP_PARALLEL,
8958 gimple_omp_taskreg_clauses (ctx->stmt),
8959 &tskred_ilist, &tskred_olist);
8962 push_gimplify_context ();
8964 gimple_seq par_olist = NULL;
8965 gimple_seq par_ilist = NULL;
8966 gimple_seq par_rlist = NULL;
8967 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
8968 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
8969 if (phony_construct && ctx->record_type)
8971 gcc_checking_assert (!ctx->receiver_decl);
8972 ctx->receiver_decl = create_tmp_var
8973 (build_reference_type (ctx->record_type), ".omp_rec");
8975 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
8976 lower_omp (&par_body, ctx);
8977 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
8978 lower_reduction_clauses (clauses, &par_rlist, ctx);
8980 /* Declare all the variables created by mapping and the variables
8981 declared in the scope of the parallel body. */
8982 record_vars_into (ctx->block_vars, child_fn);
8983 maybe_remove_omp_member_access_dummy_vars (par_bind);
8984 record_vars_into (gimple_bind_vars (par_bind), child_fn);
8986 if (ctx->record_type)
8988 ctx->sender_decl
8989 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
8990 : ctx->record_type, ".omp_data_o");
8991 DECL_NAMELESS (ctx->sender_decl) = 1;
8992 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
8993 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
8996 gimple_seq olist = NULL;
8997 gimple_seq ilist = NULL;
8998 lower_send_clauses (clauses, &ilist, &olist, ctx);
8999 lower_send_shared_vars (&ilist, &olist, ctx);
9001 if (ctx->record_type)
9003 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
9004 TREE_THIS_VOLATILE (clobber) = 1;
9005 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
9006 clobber));
9009 /* Once all the expansions are done, sequence all the different
9010 fragments inside gimple_omp_body. */
9012 gimple_seq new_body = NULL;
9014 if (ctx->record_type)
9016 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
9017 /* fixup_child_record_type might have changed receiver_decl's type. */
9018 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
9019 gimple_seq_add_stmt (&new_body,
9020 gimple_build_assign (ctx->receiver_decl, t));
9023 gimple_seq_add_seq (&new_body, par_ilist);
9024 gimple_seq_add_seq (&new_body, par_body);
9025 gimple_seq_add_seq (&new_body, par_rlist);
9026 if (ctx->cancellable)
9027 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
9028 gimple_seq_add_seq (&new_body, par_olist);
9029 new_body = maybe_catch_exception (new_body);
9030 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
9031 gimple_seq_add_stmt (&new_body,
9032 gimple_build_omp_continue (integer_zero_node,
9033 integer_zero_node));
9034 if (!phony_construct)
9036 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
9037 gimple_omp_set_body (stmt, new_body);
9040 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
9041 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9042 else
9043 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
9044 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
9045 gimple_bind_add_seq (bind, ilist);
9046 if (!phony_construct)
9047 gimple_bind_add_stmt (bind, stmt);
9048 else
9049 gimple_bind_add_seq (bind, new_body);
9050 gimple_bind_add_seq (bind, olist);
9052 pop_gimplify_context (NULL);
9054 if (dep_bind)
9056 gimple_bind_add_seq (dep_bind, dep_ilist);
9057 gimple_bind_add_seq (dep_bind, tskred_ilist);
9058 gimple_bind_add_stmt (dep_bind, bind);
9059 gimple_bind_add_seq (dep_bind, tskred_olist);
9060 gimple_bind_add_seq (dep_bind, dep_olist);
9061 pop_gimplify_context (dep_bind);
9065 /* Lower the GIMPLE_OMP_TARGET in the current statement
9066 in GSI_P. CTX holds context information for the directive. */
9068 static void
9069 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9071 tree clauses;
9072 tree child_fn, t, c;
9073 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
9074 gbind *tgt_bind, *bind, *dep_bind = NULL;
9075 gimple_seq tgt_body, olist, ilist, fplist, new_body;
9076 location_t loc = gimple_location (stmt);
9077 bool offloaded, data_region;
9078 unsigned int map_cnt = 0;
9080 offloaded = is_gimple_omp_offloaded (stmt);
9081 switch (gimple_omp_target_kind (stmt))
9083 case GF_OMP_TARGET_KIND_REGION:
9084 case GF_OMP_TARGET_KIND_UPDATE:
9085 case GF_OMP_TARGET_KIND_ENTER_DATA:
9086 case GF_OMP_TARGET_KIND_EXIT_DATA:
9087 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
9088 case GF_OMP_TARGET_KIND_OACC_KERNELS:
9089 case GF_OMP_TARGET_KIND_OACC_UPDATE:
9090 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
9091 case GF_OMP_TARGET_KIND_OACC_DECLARE:
9092 data_region = false;
9093 break;
9094 case GF_OMP_TARGET_KIND_DATA:
9095 case GF_OMP_TARGET_KIND_OACC_DATA:
9096 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
9097 data_region = true;
9098 break;
9099 default:
9100 gcc_unreachable ();
9103 clauses = gimple_omp_target_clauses (stmt);
9105 gimple_seq dep_ilist = NULL;
9106 gimple_seq dep_olist = NULL;
9107 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
9109 push_gimplify_context ();
9110 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9111 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
9112 &dep_ilist, &dep_olist);
9115 tgt_bind = NULL;
9116 tgt_body = NULL;
9117 if (offloaded)
9119 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
9120 tgt_body = gimple_bind_body (tgt_bind);
9122 else if (data_region)
9123 tgt_body = gimple_omp_body (stmt);
9124 child_fn = ctx->cb.dst_fn;
9126 push_gimplify_context ();
9127 fplist = NULL;
9129 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9130 switch (OMP_CLAUSE_CODE (c))
9132 tree var, x;
9134 default:
9135 break;
9136 case OMP_CLAUSE_MAP:
9137 #if CHECKING_P
9138 /* First check what we're prepared to handle in the following. */
9139 switch (OMP_CLAUSE_MAP_KIND (c))
9141 case GOMP_MAP_ALLOC:
9142 case GOMP_MAP_TO:
9143 case GOMP_MAP_FROM:
9144 case GOMP_MAP_TOFROM:
9145 case GOMP_MAP_POINTER:
9146 case GOMP_MAP_TO_PSET:
9147 case GOMP_MAP_DELETE:
9148 case GOMP_MAP_RELEASE:
9149 case GOMP_MAP_ALWAYS_TO:
9150 case GOMP_MAP_ALWAYS_FROM:
9151 case GOMP_MAP_ALWAYS_TOFROM:
9152 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9153 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9154 case GOMP_MAP_STRUCT:
9155 case GOMP_MAP_ALWAYS_POINTER:
9156 break;
9157 case GOMP_MAP_FORCE_ALLOC:
9158 case GOMP_MAP_FORCE_TO:
9159 case GOMP_MAP_FORCE_FROM:
9160 case GOMP_MAP_FORCE_TOFROM:
9161 case GOMP_MAP_FORCE_PRESENT:
9162 case GOMP_MAP_FORCE_DEVICEPTR:
9163 case GOMP_MAP_DEVICE_RESIDENT:
9164 case GOMP_MAP_LINK:
9165 gcc_assert (is_gimple_omp_oacc (stmt));
9166 break;
9167 default:
9168 gcc_unreachable ();
9170 #endif
9171 /* FALLTHRU */
9172 case OMP_CLAUSE_TO:
9173 case OMP_CLAUSE_FROM:
9174 oacc_firstprivate:
9175 var = OMP_CLAUSE_DECL (c);
9176 if (!DECL_P (var))
9178 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
9179 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9180 && (OMP_CLAUSE_MAP_KIND (c)
9181 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
9182 map_cnt++;
9183 continue;
9186 if (DECL_SIZE (var)
9187 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
9189 tree var2 = DECL_VALUE_EXPR (var);
9190 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
9191 var2 = TREE_OPERAND (var2, 0);
9192 gcc_assert (DECL_P (var2));
9193 var = var2;
9196 if (offloaded
9197 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9198 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9199 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9201 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9203 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
9204 && varpool_node::get_create (var)->offloadable)
9205 continue;
9207 tree type = build_pointer_type (TREE_TYPE (var));
9208 tree new_var = lookup_decl (var, ctx);
9209 x = create_tmp_var_raw (type, get_name (new_var));
9210 gimple_add_tmp_var (x);
9211 x = build_simple_mem_ref (x);
9212 SET_DECL_VALUE_EXPR (new_var, x);
9213 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9215 continue;
9218 if (!maybe_lookup_field (var, ctx))
9219 continue;
9221 /* Don't remap oacc parallel reduction variables, because the
9222 intermediate result must be local to each gang. */
9223 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9224 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
9226 x = build_receiver_ref (var, true, ctx);
9227 tree new_var = lookup_decl (var, ctx);
9229 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9230 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
9231 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9232 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9233 x = build_simple_mem_ref (x);
9234 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9236 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
9237 if (omp_is_reference (new_var)
9238 && TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE)
9240 /* Create a local object to hold the instance
9241 value. */
9242 tree type = TREE_TYPE (TREE_TYPE (new_var));
9243 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
9244 tree inst = create_tmp_var (type, id);
9245 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
9246 x = build_fold_addr_expr (inst);
9248 gimplify_assign (new_var, x, &fplist);
9250 else if (DECL_P (new_var))
9252 SET_DECL_VALUE_EXPR (new_var, x);
9253 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9255 else
9256 gcc_unreachable ();
9258 map_cnt++;
9259 break;
9261 case OMP_CLAUSE_FIRSTPRIVATE:
9262 if (is_oacc_parallel (ctx))
9263 goto oacc_firstprivate;
9264 map_cnt++;
9265 var = OMP_CLAUSE_DECL (c);
9266 if (!omp_is_reference (var)
9267 && !is_gimple_reg_type (TREE_TYPE (var)))
9269 tree new_var = lookup_decl (var, ctx);
9270 if (is_variable_sized (var))
9272 tree pvar = DECL_VALUE_EXPR (var);
9273 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9274 pvar = TREE_OPERAND (pvar, 0);
9275 gcc_assert (DECL_P (pvar));
9276 tree new_pvar = lookup_decl (pvar, ctx);
9277 x = build_fold_indirect_ref (new_pvar);
9278 TREE_THIS_NOTRAP (x) = 1;
9280 else
9281 x = build_receiver_ref (var, true, ctx);
9282 SET_DECL_VALUE_EXPR (new_var, x);
9283 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9285 break;
9287 case OMP_CLAUSE_PRIVATE:
9288 if (is_gimple_omp_oacc (ctx->stmt))
9289 break;
9290 var = OMP_CLAUSE_DECL (c);
9291 if (is_variable_sized (var))
9293 tree new_var = lookup_decl (var, ctx);
9294 tree pvar = DECL_VALUE_EXPR (var);
9295 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9296 pvar = TREE_OPERAND (pvar, 0);
9297 gcc_assert (DECL_P (pvar));
9298 tree new_pvar = lookup_decl (pvar, ctx);
9299 x = build_fold_indirect_ref (new_pvar);
9300 TREE_THIS_NOTRAP (x) = 1;
9301 SET_DECL_VALUE_EXPR (new_var, x);
9302 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9304 break;
9306 case OMP_CLAUSE_USE_DEVICE_PTR:
9307 case OMP_CLAUSE_IS_DEVICE_PTR:
9308 var = OMP_CLAUSE_DECL (c);
9309 map_cnt++;
9310 if (is_variable_sized (var))
9312 tree new_var = lookup_decl (var, ctx);
9313 tree pvar = DECL_VALUE_EXPR (var);
9314 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9315 pvar = TREE_OPERAND (pvar, 0);
9316 gcc_assert (DECL_P (pvar));
9317 tree new_pvar = lookup_decl (pvar, ctx);
9318 x = build_fold_indirect_ref (new_pvar);
9319 TREE_THIS_NOTRAP (x) = 1;
9320 SET_DECL_VALUE_EXPR (new_var, x);
9321 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9323 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9325 tree new_var = lookup_decl (var, ctx);
9326 tree type = build_pointer_type (TREE_TYPE (var));
9327 x = create_tmp_var_raw (type, get_name (new_var));
9328 gimple_add_tmp_var (x);
9329 x = build_simple_mem_ref (x);
9330 SET_DECL_VALUE_EXPR (new_var, x);
9331 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9333 else
9335 tree new_var = lookup_decl (var, ctx);
9336 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
9337 gimple_add_tmp_var (x);
9338 SET_DECL_VALUE_EXPR (new_var, x);
9339 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9341 break;
9344 if (offloaded)
9346 target_nesting_level++;
9347 lower_omp (&tgt_body, ctx);
9348 target_nesting_level--;
9350 else if (data_region)
9351 lower_omp (&tgt_body, ctx);
9353 if (offloaded)
9355 /* Declare all the variables created by mapping and the variables
9356 declared in the scope of the target body. */
9357 record_vars_into (ctx->block_vars, child_fn);
9358 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
9359 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
9362 olist = NULL;
9363 ilist = NULL;
9364 if (ctx->record_type)
9366 ctx->sender_decl
9367 = create_tmp_var (ctx->record_type, ".omp_data_arr");
9368 DECL_NAMELESS (ctx->sender_decl) = 1;
9369 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
9370 t = make_tree_vec (3);
9371 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
9372 TREE_VEC_ELT (t, 1)
9373 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
9374 ".omp_data_sizes");
9375 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
9376 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
9377 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
9378 tree tkind_type = short_unsigned_type_node;
9379 int talign_shift = 8;
9380 TREE_VEC_ELT (t, 2)
9381 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
9382 ".omp_data_kinds");
9383 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
9384 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
9385 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
9386 gimple_omp_target_set_data_arg (stmt, t);
9388 vec<constructor_elt, va_gc> *vsize;
9389 vec<constructor_elt, va_gc> *vkind;
9390 vec_alloc (vsize, map_cnt);
9391 vec_alloc (vkind, map_cnt);
9392 unsigned int map_idx = 0;
9394 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9395 switch (OMP_CLAUSE_CODE (c))
9397 tree ovar, nc, s, purpose, var, x, type;
9398 unsigned int talign;
9400 default:
9401 break;
9403 case OMP_CLAUSE_MAP:
9404 case OMP_CLAUSE_TO:
9405 case OMP_CLAUSE_FROM:
9406 oacc_firstprivate_map:
9407 nc = c;
9408 ovar = OMP_CLAUSE_DECL (c);
9409 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9410 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9411 || (OMP_CLAUSE_MAP_KIND (c)
9412 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
9413 break;
9414 if (!DECL_P (ovar))
9416 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9417 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
9419 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
9420 == get_base_address (ovar));
9421 nc = OMP_CLAUSE_CHAIN (c);
9422 ovar = OMP_CLAUSE_DECL (nc);
9424 else
9426 tree x = build_sender_ref (ovar, ctx);
9427 tree v
9428 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
9429 gimplify_assign (x, v, &ilist);
9430 nc = NULL_TREE;
9433 else
9435 if (DECL_SIZE (ovar)
9436 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
9438 tree ovar2 = DECL_VALUE_EXPR (ovar);
9439 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
9440 ovar2 = TREE_OPERAND (ovar2, 0);
9441 gcc_assert (DECL_P (ovar2));
9442 ovar = ovar2;
9444 if (!maybe_lookup_field (ovar, ctx))
9445 continue;
9448 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
9449 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
9450 talign = DECL_ALIGN_UNIT (ovar);
9451 if (nc)
9453 var = lookup_decl_in_outer_ctx (ovar, ctx);
9454 x = build_sender_ref (ovar, ctx);
9456 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9457 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
9458 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9459 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
9461 gcc_assert (offloaded);
9462 tree avar
9463 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
9464 mark_addressable (avar);
9465 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
9466 talign = DECL_ALIGN_UNIT (avar);
9467 avar = build_fold_addr_expr (avar);
9468 gimplify_assign (x, avar, &ilist);
9470 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9472 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
9473 if (!omp_is_reference (var))
9475 if (is_gimple_reg (var)
9476 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
9477 TREE_NO_WARNING (var) = 1;
9478 var = build_fold_addr_expr (var);
9480 else
9481 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
9482 gimplify_assign (x, var, &ilist);
9484 else if (is_gimple_reg (var))
9486 gcc_assert (offloaded);
9487 tree avar = create_tmp_var (TREE_TYPE (var));
9488 mark_addressable (avar);
9489 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
9490 if (GOMP_MAP_COPY_TO_P (map_kind)
9491 || map_kind == GOMP_MAP_POINTER
9492 || map_kind == GOMP_MAP_TO_PSET
9493 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
9495 /* If we need to initialize a temporary
9496 with VAR because it is not addressable, and
9497 the variable hasn't been initialized yet, then
9498 we'll get a warning for the store to avar.
9499 Don't warn in that case, the mapping might
9500 be implicit. */
9501 TREE_NO_WARNING (var) = 1;
9502 gimplify_assign (avar, var, &ilist);
9504 avar = build_fold_addr_expr (avar);
9505 gimplify_assign (x, avar, &ilist);
9506 if ((GOMP_MAP_COPY_FROM_P (map_kind)
9507 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
9508 && !TYPE_READONLY (TREE_TYPE (var)))
9510 x = unshare_expr (x);
9511 x = build_simple_mem_ref (x);
9512 gimplify_assign (var, x, &olist);
9515 else
9517 var = build_fold_addr_expr (var);
9518 gimplify_assign (x, var, &ilist);
9521 s = NULL_TREE;
9522 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9524 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
9525 s = TREE_TYPE (ovar);
9526 if (TREE_CODE (s) == REFERENCE_TYPE)
9527 s = TREE_TYPE (s);
9528 s = TYPE_SIZE_UNIT (s);
9530 else
9531 s = OMP_CLAUSE_SIZE (c);
9532 if (s == NULL_TREE)
9533 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
9534 s = fold_convert (size_type_node, s);
9535 purpose = size_int (map_idx++);
9536 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
9537 if (TREE_CODE (s) != INTEGER_CST)
9538 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
9540 unsigned HOST_WIDE_INT tkind, tkind_zero;
9541 switch (OMP_CLAUSE_CODE (c))
9543 case OMP_CLAUSE_MAP:
9544 tkind = OMP_CLAUSE_MAP_KIND (c);
9545 tkind_zero = tkind;
9546 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
9547 switch (tkind)
9549 case GOMP_MAP_ALLOC:
9550 case GOMP_MAP_TO:
9551 case GOMP_MAP_FROM:
9552 case GOMP_MAP_TOFROM:
9553 case GOMP_MAP_ALWAYS_TO:
9554 case GOMP_MAP_ALWAYS_FROM:
9555 case GOMP_MAP_ALWAYS_TOFROM:
9556 case GOMP_MAP_RELEASE:
9557 case GOMP_MAP_FORCE_TO:
9558 case GOMP_MAP_FORCE_FROM:
9559 case GOMP_MAP_FORCE_TOFROM:
9560 case GOMP_MAP_FORCE_PRESENT:
9561 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
9562 break;
9563 case GOMP_MAP_DELETE:
9564 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
9565 default:
9566 break;
9568 if (tkind_zero != tkind)
9570 if (integer_zerop (s))
9571 tkind = tkind_zero;
9572 else if (integer_nonzerop (s))
9573 tkind_zero = tkind;
9575 break;
9576 case OMP_CLAUSE_FIRSTPRIVATE:
9577 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
9578 tkind = GOMP_MAP_TO;
9579 tkind_zero = tkind;
9580 break;
9581 case OMP_CLAUSE_TO:
9582 tkind = GOMP_MAP_TO;
9583 tkind_zero = tkind;
9584 break;
9585 case OMP_CLAUSE_FROM:
9586 tkind = GOMP_MAP_FROM;
9587 tkind_zero = tkind;
9588 break;
9589 default:
9590 gcc_unreachable ();
9592 gcc_checking_assert (tkind
9593 < (HOST_WIDE_INT_C (1U) << talign_shift));
9594 gcc_checking_assert (tkind_zero
9595 < (HOST_WIDE_INT_C (1U) << talign_shift));
9596 talign = ceil_log2 (talign);
9597 tkind |= talign << talign_shift;
9598 tkind_zero |= talign << talign_shift;
9599 gcc_checking_assert (tkind
9600 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
9601 gcc_checking_assert (tkind_zero
9602 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
9603 if (tkind == tkind_zero)
9604 x = build_int_cstu (tkind_type, tkind);
9605 else
9607 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
9608 x = build3 (COND_EXPR, tkind_type,
9609 fold_build2 (EQ_EXPR, boolean_type_node,
9610 unshare_expr (s), size_zero_node),
9611 build_int_cstu (tkind_type, tkind_zero),
9612 build_int_cstu (tkind_type, tkind));
9614 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
9615 if (nc && nc != c)
9616 c = nc;
9617 break;
9619 case OMP_CLAUSE_FIRSTPRIVATE:
9620 if (is_oacc_parallel (ctx))
9621 goto oacc_firstprivate_map;
9622 ovar = OMP_CLAUSE_DECL (c);
9623 if (omp_is_reference (ovar))
9624 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
9625 else
9626 talign = DECL_ALIGN_UNIT (ovar);
9627 var = lookup_decl_in_outer_ctx (ovar, ctx);
9628 x = build_sender_ref (ovar, ctx);
9629 tkind = GOMP_MAP_FIRSTPRIVATE;
9630 type = TREE_TYPE (ovar);
9631 if (omp_is_reference (ovar))
9632 type = TREE_TYPE (type);
9633 if ((INTEGRAL_TYPE_P (type)
9634 && TYPE_PRECISION (type) <= POINTER_SIZE)
9635 || TREE_CODE (type) == POINTER_TYPE)
9637 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
9638 tree t = var;
9639 if (omp_is_reference (var))
9640 t = build_simple_mem_ref (var);
9641 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
9642 TREE_NO_WARNING (var) = 1;
9643 if (TREE_CODE (type) != POINTER_TYPE)
9644 t = fold_convert (pointer_sized_int_node, t);
9645 t = fold_convert (TREE_TYPE (x), t);
9646 gimplify_assign (x, t, &ilist);
9648 else if (omp_is_reference (var))
9649 gimplify_assign (x, var, &ilist);
9650 else if (is_gimple_reg (var))
9652 tree avar = create_tmp_var (TREE_TYPE (var));
9653 mark_addressable (avar);
9654 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
9655 TREE_NO_WARNING (var) = 1;
9656 gimplify_assign (avar, var, &ilist);
9657 avar = build_fold_addr_expr (avar);
9658 gimplify_assign (x, avar, &ilist);
9660 else
9662 var = build_fold_addr_expr (var);
9663 gimplify_assign (x, var, &ilist);
9665 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
9666 s = size_int (0);
9667 else if (omp_is_reference (ovar))
9668 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
9669 else
9670 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
9671 s = fold_convert (size_type_node, s);
9672 purpose = size_int (map_idx++);
9673 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
9674 if (TREE_CODE (s) != INTEGER_CST)
9675 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
9677 gcc_checking_assert (tkind
9678 < (HOST_WIDE_INT_C (1U) << talign_shift));
9679 talign = ceil_log2 (talign);
9680 tkind |= talign << talign_shift;
9681 gcc_checking_assert (tkind
9682 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
9683 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
9684 build_int_cstu (tkind_type, tkind));
9685 break;
9687 case OMP_CLAUSE_USE_DEVICE_PTR:
9688 case OMP_CLAUSE_IS_DEVICE_PTR:
9689 ovar = OMP_CLAUSE_DECL (c);
9690 var = lookup_decl_in_outer_ctx (ovar, ctx);
9691 x = build_sender_ref (ovar, ctx);
9692 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
9693 tkind = GOMP_MAP_USE_DEVICE_PTR;
9694 else
9695 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
9696 type = TREE_TYPE (ovar);
9697 if (TREE_CODE (type) == ARRAY_TYPE)
9698 var = build_fold_addr_expr (var);
9699 else
9701 if (omp_is_reference (ovar))
9703 type = TREE_TYPE (type);
9704 if (TREE_CODE (type) != ARRAY_TYPE)
9705 var = build_simple_mem_ref (var);
9706 var = fold_convert (TREE_TYPE (x), var);
9709 gimplify_assign (x, var, &ilist);
9710 s = size_int (0);
9711 purpose = size_int (map_idx++);
9712 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
9713 gcc_checking_assert (tkind
9714 < (HOST_WIDE_INT_C (1U) << talign_shift));
9715 gcc_checking_assert (tkind
9716 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
9717 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
9718 build_int_cstu (tkind_type, tkind));
9719 break;
9722 gcc_assert (map_idx == map_cnt);
9724 DECL_INITIAL (TREE_VEC_ELT (t, 1))
9725 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
9726 DECL_INITIAL (TREE_VEC_ELT (t, 2))
9727 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
9728 for (int i = 1; i <= 2; i++)
9729 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
9731 gimple_seq initlist = NULL;
9732 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
9733 TREE_VEC_ELT (t, i)),
9734 &initlist, true, NULL_TREE);
9735 gimple_seq_add_seq (&ilist, initlist);
9737 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
9738 NULL);
9739 TREE_THIS_VOLATILE (clobber) = 1;
9740 gimple_seq_add_stmt (&olist,
9741 gimple_build_assign (TREE_VEC_ELT (t, i),
9742 clobber));
9745 tree clobber = build_constructor (ctx->record_type, NULL);
9746 TREE_THIS_VOLATILE (clobber) = 1;
9747 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
9748 clobber));
9751 /* Once all the expansions are done, sequence all the different
9752 fragments inside gimple_omp_body. */
9754 new_body = NULL;
9756 if (offloaded
9757 && ctx->record_type)
9759 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
9760 /* fixup_child_record_type might have changed receiver_decl's type. */
9761 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
9762 gimple_seq_add_stmt (&new_body,
9763 gimple_build_assign (ctx->receiver_decl, t));
9765 gimple_seq_add_seq (&new_body, fplist);
9767 if (offloaded || data_region)
9769 tree prev = NULL_TREE;
9770 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9771 switch (OMP_CLAUSE_CODE (c))
9773 tree var, x;
9774 default:
9775 break;
9776 case OMP_CLAUSE_FIRSTPRIVATE:
9777 if (is_gimple_omp_oacc (ctx->stmt))
9778 break;
9779 var = OMP_CLAUSE_DECL (c);
9780 if (omp_is_reference (var)
9781 || is_gimple_reg_type (TREE_TYPE (var)))
9783 tree new_var = lookup_decl (var, ctx);
9784 tree type;
9785 type = TREE_TYPE (var);
9786 if (omp_is_reference (var))
9787 type = TREE_TYPE (type);
9788 if ((INTEGRAL_TYPE_P (type)
9789 && TYPE_PRECISION (type) <= POINTER_SIZE)
9790 || TREE_CODE (type) == POINTER_TYPE)
9792 x = build_receiver_ref (var, false, ctx);
9793 if (TREE_CODE (type) != POINTER_TYPE)
9794 x = fold_convert (pointer_sized_int_node, x);
9795 x = fold_convert (type, x);
9796 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
9797 fb_rvalue);
9798 if (omp_is_reference (var))
9800 tree v = create_tmp_var_raw (type, get_name (var));
9801 gimple_add_tmp_var (v);
9802 TREE_ADDRESSABLE (v) = 1;
9803 gimple_seq_add_stmt (&new_body,
9804 gimple_build_assign (v, x));
9805 x = build_fold_addr_expr (v);
9807 gimple_seq_add_stmt (&new_body,
9808 gimple_build_assign (new_var, x));
9810 else
9812 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
9813 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
9814 fb_rvalue);
9815 gimple_seq_add_stmt (&new_body,
9816 gimple_build_assign (new_var, x));
9819 else if (is_variable_sized (var))
9821 tree pvar = DECL_VALUE_EXPR (var);
9822 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9823 pvar = TREE_OPERAND (pvar, 0);
9824 gcc_assert (DECL_P (pvar));
9825 tree new_var = lookup_decl (pvar, ctx);
9826 x = build_receiver_ref (var, false, ctx);
9827 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9828 gimple_seq_add_stmt (&new_body,
9829 gimple_build_assign (new_var, x));
9831 break;
9832 case OMP_CLAUSE_PRIVATE:
9833 if (is_gimple_omp_oacc (ctx->stmt))
9834 break;
9835 var = OMP_CLAUSE_DECL (c);
9836 if (omp_is_reference (var))
9838 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9839 tree new_var = lookup_decl (var, ctx);
9840 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
9841 if (TREE_CONSTANT (x))
9843 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
9844 get_name (var));
9845 gimple_add_tmp_var (x);
9846 TREE_ADDRESSABLE (x) = 1;
9847 x = build_fold_addr_expr_loc (clause_loc, x);
9849 else
9850 break;
9852 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
9853 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9854 gimple_seq_add_stmt (&new_body,
9855 gimple_build_assign (new_var, x));
9857 break;
9858 case OMP_CLAUSE_USE_DEVICE_PTR:
9859 case OMP_CLAUSE_IS_DEVICE_PTR:
9860 var = OMP_CLAUSE_DECL (c);
9861 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
9862 x = build_sender_ref (var, ctx);
9863 else
9864 x = build_receiver_ref (var, false, ctx);
9865 if (is_variable_sized (var))
9867 tree pvar = DECL_VALUE_EXPR (var);
9868 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9869 pvar = TREE_OPERAND (pvar, 0);
9870 gcc_assert (DECL_P (pvar));
9871 tree new_var = lookup_decl (pvar, ctx);
9872 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9873 gimple_seq_add_stmt (&new_body,
9874 gimple_build_assign (new_var, x));
9876 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9878 tree new_var = lookup_decl (var, ctx);
9879 new_var = DECL_VALUE_EXPR (new_var);
9880 gcc_assert (TREE_CODE (new_var) == MEM_REF);
9881 new_var = TREE_OPERAND (new_var, 0);
9882 gcc_assert (DECL_P (new_var));
9883 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9884 gimple_seq_add_stmt (&new_body,
9885 gimple_build_assign (new_var, x));
9887 else
9889 tree type = TREE_TYPE (var);
9890 tree new_var = lookup_decl (var, ctx);
9891 if (omp_is_reference (var))
9893 type = TREE_TYPE (type);
9894 if (TREE_CODE (type) != ARRAY_TYPE)
9896 tree v = create_tmp_var_raw (type, get_name (var));
9897 gimple_add_tmp_var (v);
9898 TREE_ADDRESSABLE (v) = 1;
9899 x = fold_convert (type, x);
9900 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
9901 fb_rvalue);
9902 gimple_seq_add_stmt (&new_body,
9903 gimple_build_assign (v, x));
9904 x = build_fold_addr_expr (v);
9907 new_var = DECL_VALUE_EXPR (new_var);
9908 x = fold_convert (TREE_TYPE (new_var), x);
9909 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9910 gimple_seq_add_stmt (&new_body,
9911 gimple_build_assign (new_var, x));
9913 break;
9915 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
9916 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
9917 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
9918 or references to VLAs. */
9919 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
9920 switch (OMP_CLAUSE_CODE (c))
9922 tree var;
9923 default:
9924 break;
9925 case OMP_CLAUSE_MAP:
9926 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9927 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9929 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9930 poly_int64 offset = 0;
9931 gcc_assert (prev);
9932 var = OMP_CLAUSE_DECL (c);
9933 if (DECL_P (var)
9934 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
9935 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
9936 ctx))
9937 && varpool_node::get_create (var)->offloadable)
9938 break;
9939 if (TREE_CODE (var) == INDIRECT_REF
9940 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
9941 var = TREE_OPERAND (var, 0);
9942 if (TREE_CODE (var) == COMPONENT_REF)
9944 var = get_addr_base_and_unit_offset (var, &offset);
9945 gcc_assert (var != NULL_TREE && DECL_P (var));
9947 else if (DECL_SIZE (var)
9948 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
9950 tree var2 = DECL_VALUE_EXPR (var);
9951 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
9952 var2 = TREE_OPERAND (var2, 0);
9953 gcc_assert (DECL_P (var2));
9954 var = var2;
9956 tree new_var = lookup_decl (var, ctx), x;
9957 tree type = TREE_TYPE (new_var);
9958 bool is_ref;
9959 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
9960 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9961 == COMPONENT_REF))
9963 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
9964 is_ref = true;
9965 new_var = build2 (MEM_REF, type,
9966 build_fold_addr_expr (new_var),
9967 build_int_cst (build_pointer_type (type),
9968 offset));
9970 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
9972 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
9973 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
9974 new_var = build2 (MEM_REF, type,
9975 build_fold_addr_expr (new_var),
9976 build_int_cst (build_pointer_type (type),
9977 offset));
9979 else
9980 is_ref = omp_is_reference (var);
9981 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9982 is_ref = false;
9983 bool ref_to_array = false;
9984 if (is_ref)
9986 type = TREE_TYPE (type);
9987 if (TREE_CODE (type) == ARRAY_TYPE)
9989 type = build_pointer_type (type);
9990 ref_to_array = true;
9993 else if (TREE_CODE (type) == ARRAY_TYPE)
9995 tree decl2 = DECL_VALUE_EXPR (new_var);
9996 gcc_assert (TREE_CODE (decl2) == MEM_REF);
9997 decl2 = TREE_OPERAND (decl2, 0);
9998 gcc_assert (DECL_P (decl2));
9999 new_var = decl2;
10000 type = TREE_TYPE (new_var);
10002 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
10003 x = fold_convert_loc (clause_loc, type, x);
10004 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
10006 tree bias = OMP_CLAUSE_SIZE (c);
10007 if (DECL_P (bias))
10008 bias = lookup_decl (bias, ctx);
10009 bias = fold_convert_loc (clause_loc, sizetype, bias);
10010 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
10011 bias);
10012 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
10013 TREE_TYPE (x), x, bias);
10015 if (ref_to_array)
10016 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
10017 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10018 if (is_ref && !ref_to_array)
10020 tree t = create_tmp_var_raw (type, get_name (var));
10021 gimple_add_tmp_var (t);
10022 TREE_ADDRESSABLE (t) = 1;
10023 gimple_seq_add_stmt (&new_body,
10024 gimple_build_assign (t, x));
10025 x = build_fold_addr_expr_loc (clause_loc, t);
10027 gimple_seq_add_stmt (&new_body,
10028 gimple_build_assign (new_var, x));
10029 prev = NULL_TREE;
10031 else if (OMP_CLAUSE_CHAIN (c)
10032 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
10033 == OMP_CLAUSE_MAP
10034 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
10035 == GOMP_MAP_FIRSTPRIVATE_POINTER
10036 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
10037 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
10038 prev = c;
10039 break;
10040 case OMP_CLAUSE_PRIVATE:
10041 var = OMP_CLAUSE_DECL (c);
10042 if (is_variable_sized (var))
10044 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10045 tree new_var = lookup_decl (var, ctx);
10046 tree pvar = DECL_VALUE_EXPR (var);
10047 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10048 pvar = TREE_OPERAND (pvar, 0);
10049 gcc_assert (DECL_P (pvar));
10050 tree new_pvar = lookup_decl (pvar, ctx);
10051 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10052 tree al = size_int (DECL_ALIGN (var));
10053 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
10054 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
10055 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
10056 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10057 gimple_seq_add_stmt (&new_body,
10058 gimple_build_assign (new_pvar, x));
10060 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
10062 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10063 tree new_var = lookup_decl (var, ctx);
10064 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
10065 if (TREE_CONSTANT (x))
10066 break;
10067 else
10069 tree atmp
10070 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10071 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
10072 tree al = size_int (TYPE_ALIGN (rtype));
10073 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
10076 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
10077 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10078 gimple_seq_add_stmt (&new_body,
10079 gimple_build_assign (new_var, x));
10081 break;
10084 gimple_seq fork_seq = NULL;
10085 gimple_seq join_seq = NULL;
10087 if (is_oacc_parallel (ctx))
10089 /* If there are reductions on the offloaded region itself, treat
10090 them as a dummy GANG loop. */
10091 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
10093 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
10094 false, NULL, NULL, &fork_seq, &join_seq, ctx);
10097 gimple_seq_add_seq (&new_body, fork_seq);
10098 gimple_seq_add_seq (&new_body, tgt_body);
10099 gimple_seq_add_seq (&new_body, join_seq);
10101 if (offloaded)
10102 new_body = maybe_catch_exception (new_body);
10104 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
10105 gimple_omp_set_body (stmt, new_body);
10108 bind = gimple_build_bind (NULL, NULL,
10109 tgt_bind ? gimple_bind_block (tgt_bind)
10110 : NULL_TREE);
10111 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
10112 gimple_bind_add_seq (bind, ilist);
10113 gimple_bind_add_stmt (bind, stmt);
10114 gimple_bind_add_seq (bind, olist);
10116 pop_gimplify_context (NULL);
10118 if (dep_bind)
10120 gimple_bind_add_seq (dep_bind, dep_ilist);
10121 gimple_bind_add_stmt (dep_bind, bind);
10122 gimple_bind_add_seq (dep_bind, dep_olist);
10123 pop_gimplify_context (dep_bind);
10127 /* Expand code for an OpenMP teams directive. */
10129 static void
10130 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10132 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
10133 push_gimplify_context ();
10135 tree block = make_node (BLOCK);
10136 gbind *bind = gimple_build_bind (NULL, NULL, block);
10137 gsi_replace (gsi_p, bind, true);
10138 gimple_seq bind_body = NULL;
10139 gimple_seq dlist = NULL;
10140 gimple_seq olist = NULL;
10142 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
10143 OMP_CLAUSE_NUM_TEAMS);
10144 if (num_teams == NULL_TREE)
10145 num_teams = build_int_cst (unsigned_type_node, 0);
10146 else
10148 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
10149 num_teams = fold_convert (unsigned_type_node, num_teams);
10150 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
10152 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
10153 OMP_CLAUSE_THREAD_LIMIT);
10154 if (thread_limit == NULL_TREE)
10155 thread_limit = build_int_cst (unsigned_type_node, 0);
10156 else
10158 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
10159 thread_limit = fold_convert (unsigned_type_node, thread_limit);
10160 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
10161 fb_rvalue);
10164 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
10165 &bind_body, &dlist, ctx, NULL);
10166 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
10167 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
10168 if (!gimple_omp_teams_grid_phony (teams_stmt))
10170 gimple_seq_add_stmt (&bind_body, teams_stmt);
10171 location_t loc = gimple_location (teams_stmt);
10172 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
10173 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
10174 gimple_set_location (call, loc);
10175 gimple_seq_add_stmt (&bind_body, call);
10178 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
10179 gimple_omp_set_body (teams_stmt, NULL);
10180 gimple_seq_add_seq (&bind_body, olist);
10181 gimple_seq_add_seq (&bind_body, dlist);
10182 if (!gimple_omp_teams_grid_phony (teams_stmt))
10183 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
10184 gimple_bind_set_body (bind, bind_body);
10186 pop_gimplify_context (bind);
10188 gimple_bind_append_vars (bind, ctx->block_vars);
10189 BLOCK_VARS (block) = ctx->block_vars;
10190 if (BLOCK_VARS (block))
10191 TREE_USED (block) = 1;
10194 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
10196 static void
10197 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10199 gimple *stmt = gsi_stmt (*gsi_p);
10200 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10201 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
10202 gimple_build_omp_return (false));
10206 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
10207 regimplified. If DATA is non-NULL, lower_omp_1 is outside
10208 of OMP context, but with task_shared_vars set. */
10210 static tree
10211 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
10212 void *data)
10214 tree t = *tp;
10216 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
10217 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
10218 return t;
10220 if (task_shared_vars
10221 && DECL_P (t)
10222 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
10223 return t;
10225 /* If a global variable has been privatized, TREE_CONSTANT on
10226 ADDR_EXPR might be wrong. */
10227 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
10228 recompute_tree_invariant_for_addr_expr (t);
10230 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
10231 return NULL_TREE;
10234 /* Data to be communicated between lower_omp_regimplify_operands and
10235 lower_omp_regimplify_operands_p. */
10237 struct lower_omp_regimplify_operands_data
10239 omp_context *ctx;
10240 vec<tree> *decls;
10243 /* Helper function for lower_omp_regimplify_operands. Find
10244 omp_member_access_dummy_var vars and adjust temporarily their
10245 DECL_VALUE_EXPRs if needed. */
10247 static tree
10248 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
10249 void *data)
10251 tree t = omp_member_access_dummy_var (*tp);
10252 if (t)
10254 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
10255 lower_omp_regimplify_operands_data *ldata
10256 = (lower_omp_regimplify_operands_data *) wi->info;
10257 tree o = maybe_lookup_decl (t, ldata->ctx);
10258 if (o != t)
10260 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
10261 ldata->decls->safe_push (*tp);
10262 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
10263 SET_DECL_VALUE_EXPR (*tp, v);
10266 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
10267 return NULL_TREE;
10270 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
10271 of omp_member_access_dummy_var vars during regimplification. */
10273 static void
10274 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
10275 gimple_stmt_iterator *gsi_p)
10277 auto_vec<tree, 10> decls;
10278 if (ctx)
10280 struct walk_stmt_info wi;
10281 memset (&wi, '\0', sizeof (wi));
10282 struct lower_omp_regimplify_operands_data data;
10283 data.ctx = ctx;
10284 data.decls = &decls;
10285 wi.info = &data;
10286 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
10288 gimple_regimplify_operands (stmt, gsi_p);
10289 while (!decls.is_empty ())
10291 tree t = decls.pop ();
10292 tree v = decls.pop ();
10293 SET_DECL_VALUE_EXPR (t, v);
10297 static void
10298 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10300 gimple *stmt = gsi_stmt (*gsi_p);
10301 struct walk_stmt_info wi;
10302 gcall *call_stmt;
10304 if (gimple_has_location (stmt))
10305 input_location = gimple_location (stmt);
10307 if (task_shared_vars)
10308 memset (&wi, '\0', sizeof (wi));
10310 /* If we have issued syntax errors, avoid doing any heavy lifting.
10311 Just replace the OMP directives with a NOP to avoid
10312 confusing RTL expansion. */
10313 if (seen_error () && is_gimple_omp (stmt))
10315 gsi_replace (gsi_p, gimple_build_nop (), true);
10316 return;
10319 switch (gimple_code (stmt))
10321 case GIMPLE_COND:
10323 gcond *cond_stmt = as_a <gcond *> (stmt);
10324 if ((ctx || task_shared_vars)
10325 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
10326 lower_omp_regimplify_p,
10327 ctx ? NULL : &wi, NULL)
10328 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
10329 lower_omp_regimplify_p,
10330 ctx ? NULL : &wi, NULL)))
10331 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
10333 break;
10334 case GIMPLE_CATCH:
10335 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
10336 break;
10337 case GIMPLE_EH_FILTER:
10338 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
10339 break;
10340 case GIMPLE_TRY:
10341 lower_omp (gimple_try_eval_ptr (stmt), ctx);
10342 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
10343 break;
10344 case GIMPLE_TRANSACTION:
10345 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
10346 ctx);
10347 break;
10348 case GIMPLE_BIND:
10349 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
10350 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
10351 break;
10352 case GIMPLE_OMP_PARALLEL:
10353 case GIMPLE_OMP_TASK:
10354 ctx = maybe_lookup_ctx (stmt);
10355 gcc_assert (ctx);
10356 if (ctx->cancellable)
10357 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
10358 lower_omp_taskreg (gsi_p, ctx);
10359 break;
10360 case GIMPLE_OMP_FOR:
10361 ctx = maybe_lookup_ctx (stmt);
10362 gcc_assert (ctx);
10363 if (ctx->cancellable)
10364 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
10365 lower_omp_for (gsi_p, ctx);
10366 break;
10367 case GIMPLE_OMP_SECTIONS:
10368 ctx = maybe_lookup_ctx (stmt);
10369 gcc_assert (ctx);
10370 if (ctx->cancellable)
10371 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
10372 lower_omp_sections (gsi_p, ctx);
10373 break;
10374 case GIMPLE_OMP_SINGLE:
10375 ctx = maybe_lookup_ctx (stmt);
10376 gcc_assert (ctx);
10377 lower_omp_single (gsi_p, ctx);
10378 break;
10379 case GIMPLE_OMP_MASTER:
10380 ctx = maybe_lookup_ctx (stmt);
10381 gcc_assert (ctx);
10382 lower_omp_master (gsi_p, ctx);
10383 break;
10384 case GIMPLE_OMP_TASKGROUP:
10385 ctx = maybe_lookup_ctx (stmt);
10386 gcc_assert (ctx);
10387 lower_omp_taskgroup (gsi_p, ctx);
10388 break;
10389 case GIMPLE_OMP_ORDERED:
10390 ctx = maybe_lookup_ctx (stmt);
10391 gcc_assert (ctx);
10392 lower_omp_ordered (gsi_p, ctx);
10393 break;
10394 case GIMPLE_OMP_CRITICAL:
10395 ctx = maybe_lookup_ctx (stmt);
10396 gcc_assert (ctx);
10397 lower_omp_critical (gsi_p, ctx);
10398 break;
10399 case GIMPLE_OMP_ATOMIC_LOAD:
10400 if ((ctx || task_shared_vars)
10401 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
10402 as_a <gomp_atomic_load *> (stmt)),
10403 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
10404 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
10405 break;
10406 case GIMPLE_OMP_TARGET:
10407 ctx = maybe_lookup_ctx (stmt);
10408 gcc_assert (ctx);
10409 lower_omp_target (gsi_p, ctx);
10410 break;
10411 case GIMPLE_OMP_TEAMS:
10412 ctx = maybe_lookup_ctx (stmt);
10413 gcc_assert (ctx);
10414 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
10415 lower_omp_taskreg (gsi_p, ctx);
10416 else
10417 lower_omp_teams (gsi_p, ctx);
10418 break;
10419 case GIMPLE_OMP_GRID_BODY:
10420 ctx = maybe_lookup_ctx (stmt);
10421 gcc_assert (ctx);
10422 lower_omp_grid_body (gsi_p, ctx);
10423 break;
10424 case GIMPLE_CALL:
10425 tree fndecl;
10426 call_stmt = as_a <gcall *> (stmt);
10427 fndecl = gimple_call_fndecl (call_stmt);
10428 if (fndecl
10429 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
10430 switch (DECL_FUNCTION_CODE (fndecl))
10432 case BUILT_IN_GOMP_BARRIER:
10433 if (ctx == NULL)
10434 break;
10435 /* FALLTHRU */
10436 case BUILT_IN_GOMP_CANCEL:
10437 case BUILT_IN_GOMP_CANCELLATION_POINT:
10438 omp_context *cctx;
10439 cctx = ctx;
10440 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
10441 cctx = cctx->outer;
10442 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
10443 if (!cctx->cancellable)
10445 if (DECL_FUNCTION_CODE (fndecl)
10446 == BUILT_IN_GOMP_CANCELLATION_POINT)
10448 stmt = gimple_build_nop ();
10449 gsi_replace (gsi_p, stmt, false);
10451 break;
10453 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
10455 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
10456 gimple_call_set_fndecl (call_stmt, fndecl);
10457 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
10459 tree lhs;
10460 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
10461 gimple_call_set_lhs (call_stmt, lhs);
10462 tree fallthru_label;
10463 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
10464 gimple *g;
10465 g = gimple_build_label (fallthru_label);
10466 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
10467 g = gimple_build_cond (NE_EXPR, lhs,
10468 fold_convert (TREE_TYPE (lhs),
10469 boolean_false_node),
10470 cctx->cancel_label, fallthru_label);
10471 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
10472 break;
10473 default:
10474 break;
10476 /* FALLTHRU */
10477 default:
10478 if ((ctx || task_shared_vars)
10479 && walk_gimple_op (stmt, lower_omp_regimplify_p,
10480 ctx ? NULL : &wi))
10482 /* Just remove clobbers, this should happen only if we have
10483 "privatized" local addressable variables in SIMD regions,
10484 the clobber isn't needed in that case and gimplifying address
10485 of the ARRAY_REF into a pointer and creating MEM_REF based
10486 clobber would create worse code than we get with the clobber
10487 dropped. */
10488 if (gimple_clobber_p (stmt))
10490 gsi_replace (gsi_p, gimple_build_nop (), true);
10491 break;
10493 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
10495 break;
10499 static void
10500 lower_omp (gimple_seq *body, omp_context *ctx)
10502 location_t saved_location = input_location;
10503 gimple_stmt_iterator gsi;
10504 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
10505 lower_omp_1 (&gsi, ctx);
10506 /* During gimplification, we haven't folded statments inside offloading
10507 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
10508 if (target_nesting_level || taskreg_nesting_level)
10509 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
10510 fold_stmt (&gsi);
10511 input_location = saved_location;
10514 /* Main entry point. */
10516 static unsigned int
10517 execute_lower_omp (void)
10519 gimple_seq body;
10520 int i;
10521 omp_context *ctx;
10523 /* This pass always runs, to provide PROP_gimple_lomp.
10524 But often, there is nothing to do. */
10525 if (flag_openacc == 0 && flag_openmp == 0
10526 && flag_openmp_simd == 0)
10527 return 0;
10529 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
10530 delete_omp_context);
10532 body = gimple_body (current_function_decl);
10534 if (hsa_gen_requested_p ())
10535 omp_grid_gridify_all_targets (&body);
10537 scan_omp (&body, NULL);
10538 gcc_assert (taskreg_nesting_level == 0);
10539 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
10540 finish_taskreg_scan (ctx);
10541 taskreg_contexts.release ();
10543 if (all_contexts->root)
10545 if (task_shared_vars)
10546 push_gimplify_context ();
10547 lower_omp (&body, NULL);
10548 if (task_shared_vars)
10549 pop_gimplify_context (NULL);
10552 if (all_contexts)
10554 splay_tree_delete (all_contexts);
10555 all_contexts = NULL;
10557 BITMAP_FREE (task_shared_vars);
10559 /* If current function is a method, remove artificial dummy VAR_DECL created
10560 for non-static data member privatization, they aren't needed for
10561 debuginfo nor anything else, have been already replaced everywhere in the
10562 IL and cause problems with LTO. */
10563 if (DECL_ARGUMENTS (current_function_decl)
10564 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
10565 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
10566 == POINTER_TYPE))
10567 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
10568 return 0;
10571 namespace {
10573 const pass_data pass_data_lower_omp =
10575 GIMPLE_PASS, /* type */
10576 "omplower", /* name */
10577 OPTGROUP_OMP, /* optinfo_flags */
10578 TV_NONE, /* tv_id */
10579 PROP_gimple_any, /* properties_required */
10580 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
10581 0, /* properties_destroyed */
10582 0, /* todo_flags_start */
10583 0, /* todo_flags_finish */
10586 class pass_lower_omp : public gimple_opt_pass
10588 public:
10589 pass_lower_omp (gcc::context *ctxt)
10590 : gimple_opt_pass (pass_data_lower_omp, ctxt)
10593 /* opt_pass methods: */
10594 virtual unsigned int execute (function *) { return execute_lower_omp (); }
10596 }; // class pass_lower_omp
10598 } // anon namespace
10600 gimple_opt_pass *
10601 make_pass_lower_omp (gcc::context *ctxt)
10603 return new pass_lower_omp (ctxt);
10606 /* The following is a utility to diagnose structured block violations.
10607 It is not part of the "omplower" pass, as that's invoked too late. It
10608 should be invoked by the respective front ends after gimplification. */
10610 static splay_tree all_labels;
10612 /* Check for mismatched contexts and generate an error if needed. Return
10613 true if an error is detected. */
10615 static bool
10616 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
10617 gimple *branch_ctx, gimple *label_ctx)
10619 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
10620 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
10622 if (label_ctx == branch_ctx)
10623 return false;
10625 const char* kind = NULL;
10627 if (flag_openacc)
10629 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
10630 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
10632 gcc_checking_assert (kind == NULL);
10633 kind = "OpenACC";
10636 if (kind == NULL)
10638 gcc_checking_assert (flag_openmp || flag_openmp_simd);
10639 kind = "OpenMP";
10642 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
10643 so we could traverse it and issue a correct "exit" or "enter" error
10644 message upon a structured block violation.
10646 We built the context by building a list with tree_cons'ing, but there is
10647 no easy counterpart in gimple tuples. It seems like far too much work
10648 for issuing exit/enter error messages. If someone really misses the
10649 distinct error message... patches welcome. */
10651 #if 0
10652 /* Try to avoid confusing the user by producing and error message
10653 with correct "exit" or "enter" verbiage. We prefer "exit"
10654 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
10655 if (branch_ctx == NULL)
10656 exit_p = false;
10657 else
10659 while (label_ctx)
10661 if (TREE_VALUE (label_ctx) == branch_ctx)
10663 exit_p = false;
10664 break;
10666 label_ctx = TREE_CHAIN (label_ctx);
10670 if (exit_p)
10671 error ("invalid exit from %s structured block", kind);
10672 else
10673 error ("invalid entry to %s structured block", kind);
10674 #endif
10676 /* If it's obvious we have an invalid entry, be specific about the error. */
10677 if (branch_ctx == NULL)
10678 error ("invalid entry to %s structured block", kind);
10679 else
10681 /* Otherwise, be vague and lazy, but efficient. */
10682 error ("invalid branch to/from %s structured block", kind);
10685 gsi_replace (gsi_p, gimple_build_nop (), false);
10686 return true;
10689 /* Pass 1: Create a minimal tree of structured blocks, and record
10690 where each label is found. */
10692 static tree
10693 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10694 struct walk_stmt_info *wi)
10696 gimple *context = (gimple *) wi->info;
10697 gimple *inner_context;
10698 gimple *stmt = gsi_stmt (*gsi_p);
10700 *handled_ops_p = true;
10702 switch (gimple_code (stmt))
10704 WALK_SUBSTMTS;
10706 case GIMPLE_OMP_PARALLEL:
10707 case GIMPLE_OMP_TASK:
10708 case GIMPLE_OMP_SECTIONS:
10709 case GIMPLE_OMP_SINGLE:
10710 case GIMPLE_OMP_SECTION:
10711 case GIMPLE_OMP_MASTER:
10712 case GIMPLE_OMP_ORDERED:
10713 case GIMPLE_OMP_CRITICAL:
10714 case GIMPLE_OMP_TARGET:
10715 case GIMPLE_OMP_TEAMS:
10716 case GIMPLE_OMP_TASKGROUP:
10717 /* The minimal context here is just the current OMP construct. */
10718 inner_context = stmt;
10719 wi->info = inner_context;
10720 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
10721 wi->info = context;
10722 break;
10724 case GIMPLE_OMP_FOR:
10725 inner_context = stmt;
10726 wi->info = inner_context;
10727 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
10728 walk them. */
10729 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
10730 diagnose_sb_1, NULL, wi);
10731 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
10732 wi->info = context;
10733 break;
10735 case GIMPLE_LABEL:
10736 splay_tree_insert (all_labels,
10737 (splay_tree_key) gimple_label_label (
10738 as_a <glabel *> (stmt)),
10739 (splay_tree_value) context);
10740 break;
10742 default:
10743 break;
10746 return NULL_TREE;
10749 /* Pass 2: Check each branch and see if its context differs from that of
10750 the destination label's context. */
10752 static tree
10753 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10754 struct walk_stmt_info *wi)
10756 gimple *context = (gimple *) wi->info;
10757 splay_tree_node n;
10758 gimple *stmt = gsi_stmt (*gsi_p);
10760 *handled_ops_p = true;
10762 switch (gimple_code (stmt))
10764 WALK_SUBSTMTS;
10766 case GIMPLE_OMP_PARALLEL:
10767 case GIMPLE_OMP_TASK:
10768 case GIMPLE_OMP_SECTIONS:
10769 case GIMPLE_OMP_SINGLE:
10770 case GIMPLE_OMP_SECTION:
10771 case GIMPLE_OMP_MASTER:
10772 case GIMPLE_OMP_ORDERED:
10773 case GIMPLE_OMP_CRITICAL:
10774 case GIMPLE_OMP_TARGET:
10775 case GIMPLE_OMP_TEAMS:
10776 case GIMPLE_OMP_TASKGROUP:
10777 wi->info = stmt;
10778 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
10779 wi->info = context;
10780 break;
10782 case GIMPLE_OMP_FOR:
10783 wi->info = stmt;
10784 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
10785 walk them. */
10786 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
10787 diagnose_sb_2, NULL, wi);
10788 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
10789 wi->info = context;
10790 break;
10792 case GIMPLE_COND:
10794 gcond *cond_stmt = as_a <gcond *> (stmt);
10795 tree lab = gimple_cond_true_label (cond_stmt);
10796 if (lab)
10798 n = splay_tree_lookup (all_labels,
10799 (splay_tree_key) lab);
10800 diagnose_sb_0 (gsi_p, context,
10801 n ? (gimple *) n->value : NULL);
10803 lab = gimple_cond_false_label (cond_stmt);
10804 if (lab)
10806 n = splay_tree_lookup (all_labels,
10807 (splay_tree_key) lab);
10808 diagnose_sb_0 (gsi_p, context,
10809 n ? (gimple *) n->value : NULL);
10812 break;
10814 case GIMPLE_GOTO:
10816 tree lab = gimple_goto_dest (stmt);
10817 if (TREE_CODE (lab) != LABEL_DECL)
10818 break;
10820 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
10821 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
10823 break;
10825 case GIMPLE_SWITCH:
10827 gswitch *switch_stmt = as_a <gswitch *> (stmt);
10828 unsigned int i;
10829 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
10831 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
10832 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
10833 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
10834 break;
10837 break;
10839 case GIMPLE_RETURN:
10840 diagnose_sb_0 (gsi_p, context, NULL);
10841 break;
10843 default:
10844 break;
10847 return NULL_TREE;
10850 static unsigned int
10851 diagnose_omp_structured_block_errors (void)
10853 struct walk_stmt_info wi;
10854 gimple_seq body = gimple_body (current_function_decl);
10856 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
10858 memset (&wi, 0, sizeof (wi));
10859 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
10861 memset (&wi, 0, sizeof (wi));
10862 wi.want_locations = true;
10863 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
10865 gimple_set_body (current_function_decl, body);
10867 splay_tree_delete (all_labels);
10868 all_labels = NULL;
10870 return 0;
10873 namespace {
10875 const pass_data pass_data_diagnose_omp_blocks =
10877 GIMPLE_PASS, /* type */
10878 "*diagnose_omp_blocks", /* name */
10879 OPTGROUP_OMP, /* optinfo_flags */
10880 TV_NONE, /* tv_id */
10881 PROP_gimple_any, /* properties_required */
10882 0, /* properties_provided */
10883 0, /* properties_destroyed */
10884 0, /* todo_flags_start */
10885 0, /* todo_flags_finish */
10888 class pass_diagnose_omp_blocks : public gimple_opt_pass
10890 public:
10891 pass_diagnose_omp_blocks (gcc::context *ctxt)
10892 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
10895 /* opt_pass methods: */
10896 virtual bool gate (function *)
10898 return flag_openacc || flag_openmp || flag_openmp_simd;
10900 virtual unsigned int execute (function *)
10902 return diagnose_omp_structured_block_errors ();
10905 }; // class pass_diagnose_omp_blocks
10907 } // anon namespace
10909 gimple_opt_pass *
10910 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
10912 return new pass_diagnose_omp_blocks (ctxt);
10916 #include "gt-omp-low.h"