pr88074.c: Require c99_runtime.
[official-gcc.git] / gcc / omp-low.c
blob61f2f5e3d25c31e23da544f896bf78bdf5d85312
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2019 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
122 /* And a hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
126 /* Nesting depth of this context. Used to beautify error messages re
127 invalid gotos. The outermost ctx is depth 1, with depth 0 being
128 reserved for the main body of the function. */
129 int depth;
131 /* True if this parallel directive is nested within another. */
132 bool is_nested;
134 /* True if this construct can be cancelled. */
135 bool cancellable;
138 static splay_tree all_contexts;
139 static int taskreg_nesting_level;
140 static int target_nesting_level;
141 static bitmap task_shared_vars;
142 static vec<omp_context *> taskreg_contexts;
144 static void scan_omp (gimple_seq *, omp_context *);
145 static tree scan_omp_1_op (tree *, int *, void *);
147 #define WALK_SUBSTMTS \
148 case GIMPLE_BIND: \
149 case GIMPLE_TRY: \
150 case GIMPLE_CATCH: \
151 case GIMPLE_EH_FILTER: \
152 case GIMPLE_TRANSACTION: \
153 /* The sub-statements for these should be walked. */ \
154 *handled_ops_p = false; \
155 break;
157 /* Return true if CTX corresponds to an oacc parallel region. */
159 static bool
160 is_oacc_parallel (omp_context *ctx)
162 enum gimple_code outer_type = gimple_code (ctx->stmt);
163 return ((outer_type == GIMPLE_OMP_TARGET)
164 && (gimple_omp_target_kind (ctx->stmt)
165 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
168 /* Return true if CTX corresponds to an oacc kernels region. */
170 static bool
171 is_oacc_kernels (omp_context *ctx)
173 enum gimple_code outer_type = gimple_code (ctx->stmt);
174 return ((outer_type == GIMPLE_OMP_TARGET)
175 && (gimple_omp_target_kind (ctx->stmt)
176 == GF_OMP_TARGET_KIND_OACC_KERNELS));
179 /* If DECL is the artificial dummy VAR_DECL created for non-static
180 data member privatization, return the underlying "this" parameter,
181 otherwise return NULL. */
183 tree
184 omp_member_access_dummy_var (tree decl)
186 if (!VAR_P (decl)
187 || !DECL_ARTIFICIAL (decl)
188 || !DECL_IGNORED_P (decl)
189 || !DECL_HAS_VALUE_EXPR_P (decl)
190 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
191 return NULL_TREE;
193 tree v = DECL_VALUE_EXPR (decl);
194 if (TREE_CODE (v) != COMPONENT_REF)
195 return NULL_TREE;
197 while (1)
198 switch (TREE_CODE (v))
200 case COMPONENT_REF:
201 case MEM_REF:
202 case INDIRECT_REF:
203 CASE_CONVERT:
204 case POINTER_PLUS_EXPR:
205 v = TREE_OPERAND (v, 0);
206 continue;
207 case PARM_DECL:
208 if (DECL_CONTEXT (v) == current_function_decl
209 && DECL_ARTIFICIAL (v)
210 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
211 return v;
212 return NULL_TREE;
213 default:
214 return NULL_TREE;
218 /* Helper for unshare_and_remap, called through walk_tree. */
220 static tree
221 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
223 tree *pair = (tree *) data;
224 if (*tp == pair[0])
226 *tp = unshare_expr (pair[1]);
227 *walk_subtrees = 0;
229 else if (IS_TYPE_OR_DECL_P (*tp))
230 *walk_subtrees = 0;
231 return NULL_TREE;
234 /* Return unshare_expr (X) with all occurrences of FROM
235 replaced with TO. */
237 static tree
238 unshare_and_remap (tree x, tree from, tree to)
240 tree pair[2] = { from, to };
241 x = unshare_expr (x);
242 walk_tree (&x, unshare_and_remap_1, pair, NULL);
243 return x;
246 /* Convenience function for calling scan_omp_1_op on tree operands. */
248 static inline tree
249 scan_omp_op (tree *tp, omp_context *ctx)
251 struct walk_stmt_info wi;
253 memset (&wi, 0, sizeof (wi));
254 wi.info = ctx;
255 wi.want_locations = true;
257 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
260 static void lower_omp (gimple_seq *, omp_context *);
261 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
262 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
264 /* Return true if CTX is for an omp parallel. */
266 static inline bool
267 is_parallel_ctx (omp_context *ctx)
269 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
273 /* Return true if CTX is for an omp task. */
275 static inline bool
276 is_task_ctx (omp_context *ctx)
278 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
282 /* Return true if CTX is for an omp taskloop. */
284 static inline bool
285 is_taskloop_ctx (omp_context *ctx)
287 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
288 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
292 /* Return true if CTX is for a host omp teams. */
294 static inline bool
295 is_host_teams_ctx (omp_context *ctx)
297 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
298 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
301 /* Return true if CTX is for an omp parallel or omp task or host omp teams
302 (the last one is strictly not a task region in OpenMP speak, but we
303 need to treat it similarly). */
305 static inline bool
306 is_taskreg_ctx (omp_context *ctx)
308 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
311 /* Return true if EXPR is variable sized. */
313 static inline bool
314 is_variable_sized (const_tree expr)
316 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
319 /* Lookup variables. The "maybe" form
320 allows for the variable form to not have been entered, otherwise we
321 assert that the variable must have been entered. */
323 static inline tree
324 lookup_decl (tree var, omp_context *ctx)
326 tree *n = ctx->cb.decl_map->get (var);
327 return *n;
330 static inline tree
331 maybe_lookup_decl (const_tree var, omp_context *ctx)
333 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
334 return n ? *n : NULL_TREE;
337 static inline tree
338 lookup_field (tree var, omp_context *ctx)
340 splay_tree_node n;
341 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
342 return (tree) n->value;
345 static inline tree
346 lookup_sfield (splay_tree_key key, omp_context *ctx)
348 splay_tree_node n;
349 n = splay_tree_lookup (ctx->sfield_map
350 ? ctx->sfield_map : ctx->field_map, key);
351 return (tree) n->value;
354 static inline tree
355 lookup_sfield (tree var, omp_context *ctx)
357 return lookup_sfield ((splay_tree_key) var, ctx);
360 static inline tree
361 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
363 splay_tree_node n;
364 n = splay_tree_lookup (ctx->field_map, key);
365 return n ? (tree) n->value : NULL_TREE;
368 static inline tree
369 maybe_lookup_field (tree var, omp_context *ctx)
371 return maybe_lookup_field ((splay_tree_key) var, ctx);
374 /* Return true if DECL should be copied by pointer. SHARED_CTX is
375 the parallel context if DECL is to be shared. */
377 static bool
378 use_pointer_for_field (tree decl, omp_context *shared_ctx)
380 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
381 || TYPE_ATOMIC (TREE_TYPE (decl)))
382 return true;
384 /* We can only use copy-in/copy-out semantics for shared variables
385 when we know the value is not accessible from an outer scope. */
386 if (shared_ctx)
388 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
390 /* ??? Trivially accessible from anywhere. But why would we even
391 be passing an address in this case? Should we simply assert
392 this to be false, or should we have a cleanup pass that removes
393 these from the list of mappings? */
394 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
395 return true;
397 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
398 without analyzing the expression whether or not its location
399 is accessible to anyone else. In the case of nested parallel
400 regions it certainly may be. */
401 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
402 return true;
404 /* Do not use copy-in/copy-out for variables that have their
405 address taken. */
406 if (TREE_ADDRESSABLE (decl))
407 return true;
409 /* lower_send_shared_vars only uses copy-in, but not copy-out
410 for these. */
411 if (TREE_READONLY (decl)
412 || ((TREE_CODE (decl) == RESULT_DECL
413 || TREE_CODE (decl) == PARM_DECL)
414 && DECL_BY_REFERENCE (decl)))
415 return false;
417 /* Disallow copy-in/out in nested parallel if
418 decl is shared in outer parallel, otherwise
419 each thread could store the shared variable
420 in its own copy-in location, making the
421 variable no longer really shared. */
422 if (shared_ctx->is_nested)
424 omp_context *up;
426 for (up = shared_ctx->outer; up; up = up->outer)
427 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
428 break;
430 if (up)
432 tree c;
434 for (c = gimple_omp_taskreg_clauses (up->stmt);
435 c; c = OMP_CLAUSE_CHAIN (c))
436 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
437 && OMP_CLAUSE_DECL (c) == decl)
438 break;
440 if (c)
441 goto maybe_mark_addressable_and_ret;
445 /* For tasks avoid using copy-in/out. As tasks can be
446 deferred or executed in different thread, when GOMP_task
447 returns, the task hasn't necessarily terminated. */
448 if (is_task_ctx (shared_ctx))
450 tree outer;
451 maybe_mark_addressable_and_ret:
452 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
453 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
455 /* Taking address of OUTER in lower_send_shared_vars
456 might need regimplification of everything that uses the
457 variable. */
458 if (!task_shared_vars)
459 task_shared_vars = BITMAP_ALLOC (NULL);
460 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
461 TREE_ADDRESSABLE (outer) = 1;
463 return true;
467 return false;
470 /* Construct a new automatic decl similar to VAR. */
472 static tree
473 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
475 tree copy = copy_var_decl (var, name, type);
477 DECL_CONTEXT (copy) = current_function_decl;
478 DECL_CHAIN (copy) = ctx->block_vars;
479 /* If VAR is listed in task_shared_vars, it means it wasn't
480 originally addressable and is just because task needs to take
481 it's address. But we don't need to take address of privatizations
482 from that var. */
483 if (TREE_ADDRESSABLE (var)
484 && task_shared_vars
485 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
486 TREE_ADDRESSABLE (copy) = 0;
487 ctx->block_vars = copy;
489 return copy;
492 static tree
493 omp_copy_decl_1 (tree var, omp_context *ctx)
495 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
498 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
499 as appropriate. */
500 static tree
501 omp_build_component_ref (tree obj, tree field)
503 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
504 if (TREE_THIS_VOLATILE (field))
505 TREE_THIS_VOLATILE (ret) |= 1;
506 if (TREE_READONLY (field))
507 TREE_READONLY (ret) |= 1;
508 return ret;
511 /* Build tree nodes to access the field for VAR on the receiver side. */
513 static tree
514 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
516 tree x, field = lookup_field (var, ctx);
518 /* If the receiver record type was remapped in the child function,
519 remap the field into the new record type. */
520 x = maybe_lookup_field (field, ctx);
521 if (x != NULL)
522 field = x;
524 x = build_simple_mem_ref (ctx->receiver_decl);
525 TREE_THIS_NOTRAP (x) = 1;
526 x = omp_build_component_ref (x, field);
527 if (by_ref)
529 x = build_simple_mem_ref (x);
530 TREE_THIS_NOTRAP (x) = 1;
533 return x;
536 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
537 of a parallel, this is a component reference; for workshare constructs
538 this is some variable. */
540 static tree
541 build_outer_var_ref (tree var, omp_context *ctx,
542 enum omp_clause_code code = OMP_CLAUSE_ERROR)
544 tree x;
545 omp_context *outer = ctx->outer;
546 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
547 outer = outer->outer;
549 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
550 x = var;
551 else if (is_variable_sized (var))
553 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
554 x = build_outer_var_ref (x, ctx, code);
555 x = build_simple_mem_ref (x);
557 else if (is_taskreg_ctx (ctx))
559 bool by_ref = use_pointer_for_field (var, NULL);
560 x = build_receiver_ref (var, by_ref, ctx);
562 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
563 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
564 || (code == OMP_CLAUSE_PRIVATE
565 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
566 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
567 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
569 /* #pragma omp simd isn't a worksharing construct, and can reference
570 even private vars in its linear etc. clauses.
571 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
572 to private vars in all worksharing constructs. */
573 x = NULL_TREE;
574 if (outer && is_taskreg_ctx (outer))
575 x = lookup_decl (var, outer);
576 else if (outer)
577 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
578 if (x == NULL_TREE)
579 x = var;
581 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
583 gcc_assert (outer);
584 splay_tree_node n
585 = splay_tree_lookup (outer->field_map,
586 (splay_tree_key) &DECL_UID (var));
587 if (n == NULL)
589 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
590 x = var;
591 else
592 x = lookup_decl (var, outer);
594 else
596 tree field = (tree) n->value;
597 /* If the receiver record type was remapped in the child function,
598 remap the field into the new record type. */
599 x = maybe_lookup_field (field, outer);
600 if (x != NULL)
601 field = x;
603 x = build_simple_mem_ref (outer->receiver_decl);
604 x = omp_build_component_ref (x, field);
605 if (use_pointer_for_field (var, outer))
606 x = build_simple_mem_ref (x);
609 else if (outer)
611 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
613 outer = outer->outer;
614 gcc_assert (outer
615 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
617 x = lookup_decl (var, outer);
619 else if (omp_is_reference (var))
620 /* This can happen with orphaned constructs. If var is reference, it is
621 possible it is shared and as such valid. */
622 x = var;
623 else if (omp_member_access_dummy_var (var))
624 x = var;
625 else
626 gcc_unreachable ();
628 if (x == var)
630 tree t = omp_member_access_dummy_var (var);
631 if (t)
633 x = DECL_VALUE_EXPR (var);
634 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
635 if (o != t)
636 x = unshare_and_remap (x, t, o);
637 else
638 x = unshare_expr (x);
642 if (omp_is_reference (var))
643 x = build_simple_mem_ref (x);
645 return x;
648 /* Build tree nodes to access the field for VAR on the sender side. */
650 static tree
651 build_sender_ref (splay_tree_key key, omp_context *ctx)
653 tree field = lookup_sfield (key, ctx);
654 return omp_build_component_ref (ctx->sender_decl, field);
657 static tree
658 build_sender_ref (tree var, omp_context *ctx)
660 return build_sender_ref ((splay_tree_key) var, ctx);
663 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
664 BASE_POINTERS_RESTRICT, declare the field with restrict. */
666 static void
667 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
669 tree field, type, sfield = NULL_TREE;
670 splay_tree_key key = (splay_tree_key) var;
672 if ((mask & 8) != 0)
674 key = (splay_tree_key) &DECL_UID (var);
675 gcc_checking_assert (key != (splay_tree_key) var);
677 gcc_assert ((mask & 1) == 0
678 || !splay_tree_lookup (ctx->field_map, key));
679 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
680 || !splay_tree_lookup (ctx->sfield_map, key));
681 gcc_assert ((mask & 3) == 3
682 || !is_gimple_omp_oacc (ctx->stmt));
684 type = TREE_TYPE (var);
685 /* Prevent redeclaring the var in the split-off function with a restrict
686 pointer type. Note that we only clear type itself, restrict qualifiers in
687 the pointed-to type will be ignored by points-to analysis. */
688 if (POINTER_TYPE_P (type)
689 && TYPE_RESTRICT (type))
690 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
692 if (mask & 4)
694 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
695 type = build_pointer_type (build_pointer_type (type));
697 else if (by_ref)
698 type = build_pointer_type (type);
699 else if ((mask & 3) == 1 && omp_is_reference (var))
700 type = TREE_TYPE (type);
702 field = build_decl (DECL_SOURCE_LOCATION (var),
703 FIELD_DECL, DECL_NAME (var), type);
705 /* Remember what variable this field was created for. This does have a
706 side effect of making dwarf2out ignore this member, so for helpful
707 debugging we clear it later in delete_omp_context. */
708 DECL_ABSTRACT_ORIGIN (field) = var;
709 if (type == TREE_TYPE (var))
711 SET_DECL_ALIGN (field, DECL_ALIGN (var));
712 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
713 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
715 else
716 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
718 if ((mask & 3) == 3)
720 insert_field_into_struct (ctx->record_type, field);
721 if (ctx->srecord_type)
723 sfield = build_decl (DECL_SOURCE_LOCATION (var),
724 FIELD_DECL, DECL_NAME (var), type);
725 DECL_ABSTRACT_ORIGIN (sfield) = var;
726 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
727 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
728 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
729 insert_field_into_struct (ctx->srecord_type, sfield);
732 else
734 if (ctx->srecord_type == NULL_TREE)
736 tree t;
738 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
739 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
740 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
742 sfield = build_decl (DECL_SOURCE_LOCATION (t),
743 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
744 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
745 insert_field_into_struct (ctx->srecord_type, sfield);
746 splay_tree_insert (ctx->sfield_map,
747 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
748 (splay_tree_value) sfield);
751 sfield = field;
752 insert_field_into_struct ((mask & 1) ? ctx->record_type
753 : ctx->srecord_type, field);
756 if (mask & 1)
757 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
758 if ((mask & 2) && ctx->sfield_map)
759 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
762 static tree
763 install_var_local (tree var, omp_context *ctx)
765 tree new_var = omp_copy_decl_1 (var, ctx);
766 insert_decl_map (&ctx->cb, var, new_var);
767 return new_var;
770 /* Adjust the replacement for DECL in CTX for the new context. This means
771 copying the DECL_VALUE_EXPR, and fixing up the type. */
773 static void
774 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
776 tree new_decl, size;
778 new_decl = lookup_decl (decl, ctx);
780 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
782 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
783 && DECL_HAS_VALUE_EXPR_P (decl))
785 tree ve = DECL_VALUE_EXPR (decl);
786 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
787 SET_DECL_VALUE_EXPR (new_decl, ve);
788 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
791 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
793 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
794 if (size == error_mark_node)
795 size = TYPE_SIZE (TREE_TYPE (new_decl));
796 DECL_SIZE (new_decl) = size;
798 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
799 if (size == error_mark_node)
800 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
801 DECL_SIZE_UNIT (new_decl) = size;
805 /* The callback for remap_decl. Search all containing contexts for a
806 mapping of the variable; this avoids having to duplicate the splay
807 tree ahead of time. We know a mapping doesn't already exist in the
808 given context. Create new mappings to implement default semantics. */
810 static tree
811 omp_copy_decl (tree var, copy_body_data *cb)
813 omp_context *ctx = (omp_context *) cb;
814 tree new_var;
816 if (TREE_CODE (var) == LABEL_DECL)
818 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
819 return var;
820 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
821 DECL_CONTEXT (new_var) = current_function_decl;
822 insert_decl_map (&ctx->cb, var, new_var);
823 return new_var;
826 while (!is_taskreg_ctx (ctx))
828 ctx = ctx->outer;
829 if (ctx == NULL)
830 return var;
831 new_var = maybe_lookup_decl (var, ctx);
832 if (new_var)
833 return new_var;
836 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
837 return var;
839 return error_mark_node;
842 /* Create a new context, with OUTER_CTX being the surrounding context. */
844 static omp_context *
845 new_omp_context (gimple *stmt, omp_context *outer_ctx)
847 omp_context *ctx = XCNEW (omp_context);
849 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
850 (splay_tree_value) ctx);
851 ctx->stmt = stmt;
853 if (outer_ctx)
855 ctx->outer = outer_ctx;
856 ctx->cb = outer_ctx->cb;
857 ctx->cb.block = NULL;
858 ctx->depth = outer_ctx->depth + 1;
860 else
862 ctx->cb.src_fn = current_function_decl;
863 ctx->cb.dst_fn = current_function_decl;
864 ctx->cb.src_node = cgraph_node::get (current_function_decl);
865 gcc_checking_assert (ctx->cb.src_node);
866 ctx->cb.dst_node = ctx->cb.src_node;
867 ctx->cb.src_cfun = cfun;
868 ctx->cb.copy_decl = omp_copy_decl;
869 ctx->cb.eh_lp_nr = 0;
870 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
871 ctx->depth = 1;
874 ctx->cb.decl_map = new hash_map<tree, tree>;
875 ctx->cb.adjust_array_error_bounds = true;
877 return ctx;
880 static gimple_seq maybe_catch_exception (gimple_seq);
882 /* Finalize task copyfn. */
884 static void
885 finalize_task_copyfn (gomp_task *task_stmt)
887 struct function *child_cfun;
888 tree child_fn;
889 gimple_seq seq = NULL, new_seq;
890 gbind *bind;
892 child_fn = gimple_omp_task_copy_fn (task_stmt);
893 if (child_fn == NULL_TREE)
894 return;
896 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
897 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
899 push_cfun (child_cfun);
900 bind = gimplify_body (child_fn, false);
901 gimple_seq_add_stmt (&seq, bind);
902 new_seq = maybe_catch_exception (seq);
903 if (new_seq != seq)
905 bind = gimple_build_bind (NULL, new_seq, NULL);
906 seq = NULL;
907 gimple_seq_add_stmt (&seq, bind);
909 gimple_set_body (child_fn, seq);
910 pop_cfun ();
912 /* Inform the callgraph about the new function. */
913 cgraph_node *node = cgraph_node::get_create (child_fn);
914 node->parallelized_function = 1;
915 cgraph_node::add_new_function (child_fn, false);
918 /* Destroy a omp_context data structures. Called through the splay tree
919 value delete callback. */
921 static void
922 delete_omp_context (splay_tree_value value)
924 omp_context *ctx = (omp_context *) value;
926 delete ctx->cb.decl_map;
928 if (ctx->field_map)
929 splay_tree_delete (ctx->field_map);
930 if (ctx->sfield_map)
931 splay_tree_delete (ctx->sfield_map);
933 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
934 it produces corrupt debug information. */
935 if (ctx->record_type)
937 tree t;
938 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
939 DECL_ABSTRACT_ORIGIN (t) = NULL;
941 if (ctx->srecord_type)
943 tree t;
944 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
945 DECL_ABSTRACT_ORIGIN (t) = NULL;
948 if (is_task_ctx (ctx))
949 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
951 if (ctx->task_reduction_map)
953 ctx->task_reductions.release ();
954 delete ctx->task_reduction_map;
957 XDELETE (ctx);
960 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
961 context. */
963 static void
964 fixup_child_record_type (omp_context *ctx)
966 tree f, type = ctx->record_type;
968 if (!ctx->receiver_decl)
969 return;
970 /* ??? It isn't sufficient to just call remap_type here, because
971 variably_modified_type_p doesn't work the way we expect for
972 record types. Testing each field for whether it needs remapping
973 and creating a new record by hand works, however. */
974 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
975 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
976 break;
977 if (f)
979 tree name, new_fields = NULL;
981 type = lang_hooks.types.make_type (RECORD_TYPE);
982 name = DECL_NAME (TYPE_NAME (ctx->record_type));
983 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
984 TYPE_DECL, name, type);
985 TYPE_NAME (type) = name;
987 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
989 tree new_f = copy_node (f);
990 DECL_CONTEXT (new_f) = type;
991 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
992 DECL_CHAIN (new_f) = new_fields;
993 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
994 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
995 &ctx->cb, NULL);
996 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
997 &ctx->cb, NULL);
998 new_fields = new_f;
1000 /* Arrange to be able to look up the receiver field
1001 given the sender field. */
1002 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1003 (splay_tree_value) new_f);
1005 TYPE_FIELDS (type) = nreverse (new_fields);
1006 layout_type (type);
1009 /* In a target region we never modify any of the pointers in *.omp_data_i,
1010 so attempt to help the optimizers. */
1011 if (is_gimple_omp_offloaded (ctx->stmt))
1012 type = build_qualified_type (type, TYPE_QUAL_CONST);
1014 TREE_TYPE (ctx->receiver_decl)
1015 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1018 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1019 specified by CLAUSES. */
1021 static void
1022 scan_sharing_clauses (tree clauses, omp_context *ctx)
1024 tree c, decl;
1025 bool scan_array_reductions = false;
1027 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1029 bool by_ref;
1031 switch (OMP_CLAUSE_CODE (c))
1033 case OMP_CLAUSE_PRIVATE:
1034 decl = OMP_CLAUSE_DECL (c);
1035 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1036 goto do_private;
1037 else if (!is_variable_sized (decl))
1038 install_var_local (decl, ctx);
1039 break;
1041 case OMP_CLAUSE_SHARED:
1042 decl = OMP_CLAUSE_DECL (c);
1043 /* Ignore shared directives in teams construct inside of
1044 target construct. */
1045 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1046 && !is_host_teams_ctx (ctx))
1048 /* Global variables don't need to be copied,
1049 the receiver side will use them directly. */
1050 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1051 if (is_global_var (odecl))
1052 break;
1053 insert_decl_map (&ctx->cb, decl, odecl);
1054 break;
1056 gcc_assert (is_taskreg_ctx (ctx));
1057 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1058 || !is_variable_sized (decl));
1059 /* Global variables don't need to be copied,
1060 the receiver side will use them directly. */
1061 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1062 break;
1063 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1065 use_pointer_for_field (decl, ctx);
1066 break;
1068 by_ref = use_pointer_for_field (decl, NULL);
1069 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1070 || TREE_ADDRESSABLE (decl)
1071 || by_ref
1072 || omp_is_reference (decl))
1074 by_ref = use_pointer_for_field (decl, ctx);
1075 install_var_field (decl, by_ref, 3, ctx);
1076 install_var_local (decl, ctx);
1077 break;
1079 /* We don't need to copy const scalar vars back. */
1080 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1081 goto do_private;
1083 case OMP_CLAUSE_REDUCTION:
1084 case OMP_CLAUSE_IN_REDUCTION:
1085 decl = OMP_CLAUSE_DECL (c);
1086 if (TREE_CODE (decl) == MEM_REF)
1088 tree t = TREE_OPERAND (decl, 0);
1089 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1090 t = TREE_OPERAND (t, 0);
1091 if (TREE_CODE (t) == INDIRECT_REF
1092 || TREE_CODE (t) == ADDR_EXPR)
1093 t = TREE_OPERAND (t, 0);
1094 install_var_local (t, ctx);
1095 if (is_taskreg_ctx (ctx)
1096 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1097 || (is_task_ctx (ctx)
1098 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1099 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1100 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1101 == POINTER_TYPE)))))
1102 && !is_variable_sized (t)
1103 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1104 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1105 && !is_task_ctx (ctx))))
1107 by_ref = use_pointer_for_field (t, NULL);
1108 if (is_task_ctx (ctx)
1109 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1110 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1112 install_var_field (t, false, 1, ctx);
1113 install_var_field (t, by_ref, 2, ctx);
1115 else
1116 install_var_field (t, by_ref, 3, ctx);
1118 break;
1120 if (is_task_ctx (ctx)
1121 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1122 && OMP_CLAUSE_REDUCTION_TASK (c)
1123 && is_parallel_ctx (ctx)))
1125 /* Global variables don't need to be copied,
1126 the receiver side will use them directly. */
1127 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1129 by_ref = use_pointer_for_field (decl, ctx);
1130 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1131 install_var_field (decl, by_ref, 3, ctx);
1133 install_var_local (decl, ctx);
1134 break;
1136 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1137 && OMP_CLAUSE_REDUCTION_TASK (c))
1139 install_var_local (decl, ctx);
1140 break;
1142 goto do_private;
1144 case OMP_CLAUSE_LASTPRIVATE:
1145 /* Let the corresponding firstprivate clause create
1146 the variable. */
1147 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1148 break;
1149 /* FALLTHRU */
1151 case OMP_CLAUSE_FIRSTPRIVATE:
1152 case OMP_CLAUSE_LINEAR:
1153 decl = OMP_CLAUSE_DECL (c);
1154 do_private:
1155 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1156 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1157 && is_gimple_omp_offloaded (ctx->stmt))
1159 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1160 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1161 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1162 install_var_field (decl, true, 3, ctx);
1163 else
1164 install_var_field (decl, false, 3, ctx);
1166 if (is_variable_sized (decl))
1168 if (is_task_ctx (ctx))
1169 install_var_field (decl, false, 1, ctx);
1170 break;
1172 else if (is_taskreg_ctx (ctx))
1174 bool global
1175 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1176 by_ref = use_pointer_for_field (decl, NULL);
1178 if (is_task_ctx (ctx)
1179 && (global || by_ref || omp_is_reference (decl)))
1181 install_var_field (decl, false, 1, ctx);
1182 if (!global)
1183 install_var_field (decl, by_ref, 2, ctx);
1185 else if (!global)
1186 install_var_field (decl, by_ref, 3, ctx);
1188 install_var_local (decl, ctx);
1189 break;
1191 case OMP_CLAUSE_USE_DEVICE_PTR:
1192 decl = OMP_CLAUSE_DECL (c);
1193 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1194 install_var_field (decl, true, 3, ctx);
1195 else
1196 install_var_field (decl, false, 3, ctx);
1197 if (DECL_SIZE (decl)
1198 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1200 tree decl2 = DECL_VALUE_EXPR (decl);
1201 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1202 decl2 = TREE_OPERAND (decl2, 0);
1203 gcc_assert (DECL_P (decl2));
1204 install_var_local (decl2, ctx);
1206 install_var_local (decl, ctx);
1207 break;
1209 case OMP_CLAUSE_IS_DEVICE_PTR:
1210 decl = OMP_CLAUSE_DECL (c);
1211 goto do_private;
1213 case OMP_CLAUSE__LOOPTEMP_:
1214 case OMP_CLAUSE__REDUCTEMP_:
1215 gcc_assert (is_taskreg_ctx (ctx));
1216 decl = OMP_CLAUSE_DECL (c);
1217 install_var_field (decl, false, 3, ctx);
1218 install_var_local (decl, ctx);
1219 break;
1221 case OMP_CLAUSE_COPYPRIVATE:
1222 case OMP_CLAUSE_COPYIN:
1223 decl = OMP_CLAUSE_DECL (c);
1224 by_ref = use_pointer_for_field (decl, NULL);
1225 install_var_field (decl, by_ref, 3, ctx);
1226 break;
1228 case OMP_CLAUSE_FINAL:
1229 case OMP_CLAUSE_IF:
1230 case OMP_CLAUSE_NUM_THREADS:
1231 case OMP_CLAUSE_NUM_TEAMS:
1232 case OMP_CLAUSE_THREAD_LIMIT:
1233 case OMP_CLAUSE_DEVICE:
1234 case OMP_CLAUSE_SCHEDULE:
1235 case OMP_CLAUSE_DIST_SCHEDULE:
1236 case OMP_CLAUSE_DEPEND:
1237 case OMP_CLAUSE_PRIORITY:
1238 case OMP_CLAUSE_GRAINSIZE:
1239 case OMP_CLAUSE_NUM_TASKS:
1240 case OMP_CLAUSE_NUM_GANGS:
1241 case OMP_CLAUSE_NUM_WORKERS:
1242 case OMP_CLAUSE_VECTOR_LENGTH:
1243 if (ctx->outer)
1244 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1245 break;
1247 case OMP_CLAUSE_TO:
1248 case OMP_CLAUSE_FROM:
1249 case OMP_CLAUSE_MAP:
1250 if (ctx->outer)
1251 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1252 decl = OMP_CLAUSE_DECL (c);
1253 /* Global variables with "omp declare target" attribute
1254 don't need to be copied, the receiver side will use them
1255 directly. However, global variables with "omp declare target link"
1256 attribute need to be copied. Or when ALWAYS modifier is used. */
1257 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1258 && DECL_P (decl)
1259 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1260 && (OMP_CLAUSE_MAP_KIND (c)
1261 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1262 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1263 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1264 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1265 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1266 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1267 && varpool_node::get_create (decl)->offloadable
1268 && !lookup_attribute ("omp declare target link",
1269 DECL_ATTRIBUTES (decl)))
1270 break;
1271 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1272 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1274 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1275 not offloaded; there is nothing to map for those. */
1276 if (!is_gimple_omp_offloaded (ctx->stmt)
1277 && !POINTER_TYPE_P (TREE_TYPE (decl))
1278 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1279 break;
1281 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1282 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1283 || (OMP_CLAUSE_MAP_KIND (c)
1284 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1286 if (TREE_CODE (decl) == COMPONENT_REF
1287 || (TREE_CODE (decl) == INDIRECT_REF
1288 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1289 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1290 == REFERENCE_TYPE)))
1291 break;
1292 if (DECL_SIZE (decl)
1293 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1295 tree decl2 = DECL_VALUE_EXPR (decl);
1296 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1297 decl2 = TREE_OPERAND (decl2, 0);
1298 gcc_assert (DECL_P (decl2));
1299 install_var_local (decl2, ctx);
1301 install_var_local (decl, ctx);
1302 break;
1304 if (DECL_P (decl))
1306 if (DECL_SIZE (decl)
1307 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1309 tree decl2 = DECL_VALUE_EXPR (decl);
1310 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1311 decl2 = TREE_OPERAND (decl2, 0);
1312 gcc_assert (DECL_P (decl2));
1313 install_var_field (decl2, true, 3, ctx);
1314 install_var_local (decl2, ctx);
1315 install_var_local (decl, ctx);
1317 else
1319 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1320 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1321 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1322 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1323 install_var_field (decl, true, 7, ctx);
1324 else
1325 install_var_field (decl, true, 3, ctx);
1326 if (is_gimple_omp_offloaded (ctx->stmt)
1327 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1328 install_var_local (decl, ctx);
1331 else
1333 tree base = get_base_address (decl);
1334 tree nc = OMP_CLAUSE_CHAIN (c);
1335 if (DECL_P (base)
1336 && nc != NULL_TREE
1337 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1338 && OMP_CLAUSE_DECL (nc) == base
1339 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1340 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1342 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1343 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1345 else
1347 if (ctx->outer)
1349 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1350 decl = OMP_CLAUSE_DECL (c);
1352 gcc_assert (!splay_tree_lookup (ctx->field_map,
1353 (splay_tree_key) decl));
1354 tree field
1355 = build_decl (OMP_CLAUSE_LOCATION (c),
1356 FIELD_DECL, NULL_TREE, ptr_type_node);
1357 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1358 insert_field_into_struct (ctx->record_type, field);
1359 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1360 (splay_tree_value) field);
1363 break;
1365 case OMP_CLAUSE__GRIDDIM_:
1366 if (ctx->outer)
1368 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1369 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1371 break;
1373 case OMP_CLAUSE_NOWAIT:
1374 case OMP_CLAUSE_ORDERED:
1375 case OMP_CLAUSE_COLLAPSE:
1376 case OMP_CLAUSE_UNTIED:
1377 case OMP_CLAUSE_MERGEABLE:
1378 case OMP_CLAUSE_PROC_BIND:
1379 case OMP_CLAUSE_SAFELEN:
1380 case OMP_CLAUSE_SIMDLEN:
1381 case OMP_CLAUSE_THREADS:
1382 case OMP_CLAUSE_SIMD:
1383 case OMP_CLAUSE_NOGROUP:
1384 case OMP_CLAUSE_DEFAULTMAP:
1385 case OMP_CLAUSE_ASYNC:
1386 case OMP_CLAUSE_WAIT:
1387 case OMP_CLAUSE_GANG:
1388 case OMP_CLAUSE_WORKER:
1389 case OMP_CLAUSE_VECTOR:
1390 case OMP_CLAUSE_INDEPENDENT:
1391 case OMP_CLAUSE_AUTO:
1392 case OMP_CLAUSE_SEQ:
1393 case OMP_CLAUSE_TILE:
1394 case OMP_CLAUSE__SIMT_:
1395 case OMP_CLAUSE_DEFAULT:
1396 case OMP_CLAUSE_NONTEMPORAL:
1397 case OMP_CLAUSE_IF_PRESENT:
1398 case OMP_CLAUSE_FINALIZE:
1399 case OMP_CLAUSE_TASK_REDUCTION:
1400 break;
1402 case OMP_CLAUSE_ALIGNED:
1403 decl = OMP_CLAUSE_DECL (c);
1404 if (is_global_var (decl)
1405 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1406 install_var_local (decl, ctx);
1407 break;
1409 case OMP_CLAUSE__CACHE_:
1410 default:
1411 gcc_unreachable ();
1415 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1417 switch (OMP_CLAUSE_CODE (c))
1419 case OMP_CLAUSE_LASTPRIVATE:
1420 /* Let the corresponding firstprivate clause create
1421 the variable. */
1422 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1423 scan_array_reductions = true;
1424 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1425 break;
1426 /* FALLTHRU */
1428 case OMP_CLAUSE_FIRSTPRIVATE:
1429 case OMP_CLAUSE_PRIVATE:
1430 case OMP_CLAUSE_LINEAR:
1431 case OMP_CLAUSE_IS_DEVICE_PTR:
1432 decl = OMP_CLAUSE_DECL (c);
1433 if (is_variable_sized (decl))
1435 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1436 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1437 && is_gimple_omp_offloaded (ctx->stmt))
1439 tree decl2 = DECL_VALUE_EXPR (decl);
1440 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1441 decl2 = TREE_OPERAND (decl2, 0);
1442 gcc_assert (DECL_P (decl2));
1443 install_var_local (decl2, ctx);
1444 fixup_remapped_decl (decl2, ctx, false);
1446 install_var_local (decl, ctx);
1448 fixup_remapped_decl (decl, ctx,
1449 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1450 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1451 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1452 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1453 scan_array_reductions = true;
1454 break;
1456 case OMP_CLAUSE_REDUCTION:
1457 case OMP_CLAUSE_IN_REDUCTION:
1458 decl = OMP_CLAUSE_DECL (c);
1459 if (TREE_CODE (decl) != MEM_REF)
1461 if (is_variable_sized (decl))
1462 install_var_local (decl, ctx);
1463 fixup_remapped_decl (decl, ctx, false);
1465 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1466 scan_array_reductions = true;
1467 break;
1469 case OMP_CLAUSE_TASK_REDUCTION:
1470 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1471 scan_array_reductions = true;
1472 break;
1474 case OMP_CLAUSE_SHARED:
1475 /* Ignore shared directives in teams construct inside of
1476 target construct. */
1477 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1478 && !is_host_teams_ctx (ctx))
1479 break;
1480 decl = OMP_CLAUSE_DECL (c);
1481 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1482 break;
1483 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1485 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1486 ctx->outer)))
1487 break;
1488 bool by_ref = use_pointer_for_field (decl, ctx);
1489 install_var_field (decl, by_ref, 11, ctx);
1490 break;
1492 fixup_remapped_decl (decl, ctx, false);
1493 break;
1495 case OMP_CLAUSE_MAP:
1496 if (!is_gimple_omp_offloaded (ctx->stmt))
1497 break;
1498 decl = OMP_CLAUSE_DECL (c);
1499 if (DECL_P (decl)
1500 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1501 && (OMP_CLAUSE_MAP_KIND (c)
1502 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1503 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1504 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1505 && varpool_node::get_create (decl)->offloadable)
1506 break;
1507 if (DECL_P (decl))
1509 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1510 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1511 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1512 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1514 tree new_decl = lookup_decl (decl, ctx);
1515 TREE_TYPE (new_decl)
1516 = remap_type (TREE_TYPE (decl), &ctx->cb);
1518 else if (DECL_SIZE (decl)
1519 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1521 tree decl2 = DECL_VALUE_EXPR (decl);
1522 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1523 decl2 = TREE_OPERAND (decl2, 0);
1524 gcc_assert (DECL_P (decl2));
1525 fixup_remapped_decl (decl2, ctx, false);
1526 fixup_remapped_decl (decl, ctx, true);
1528 else
1529 fixup_remapped_decl (decl, ctx, false);
1531 break;
1533 case OMP_CLAUSE_COPYPRIVATE:
1534 case OMP_CLAUSE_COPYIN:
1535 case OMP_CLAUSE_DEFAULT:
1536 case OMP_CLAUSE_IF:
1537 case OMP_CLAUSE_NUM_THREADS:
1538 case OMP_CLAUSE_NUM_TEAMS:
1539 case OMP_CLAUSE_THREAD_LIMIT:
1540 case OMP_CLAUSE_DEVICE:
1541 case OMP_CLAUSE_SCHEDULE:
1542 case OMP_CLAUSE_DIST_SCHEDULE:
1543 case OMP_CLAUSE_NOWAIT:
1544 case OMP_CLAUSE_ORDERED:
1545 case OMP_CLAUSE_COLLAPSE:
1546 case OMP_CLAUSE_UNTIED:
1547 case OMP_CLAUSE_FINAL:
1548 case OMP_CLAUSE_MERGEABLE:
1549 case OMP_CLAUSE_PROC_BIND:
1550 case OMP_CLAUSE_SAFELEN:
1551 case OMP_CLAUSE_SIMDLEN:
1552 case OMP_CLAUSE_ALIGNED:
1553 case OMP_CLAUSE_DEPEND:
1554 case OMP_CLAUSE__LOOPTEMP_:
1555 case OMP_CLAUSE__REDUCTEMP_:
1556 case OMP_CLAUSE_TO:
1557 case OMP_CLAUSE_FROM:
1558 case OMP_CLAUSE_PRIORITY:
1559 case OMP_CLAUSE_GRAINSIZE:
1560 case OMP_CLAUSE_NUM_TASKS:
1561 case OMP_CLAUSE_THREADS:
1562 case OMP_CLAUSE_SIMD:
1563 case OMP_CLAUSE_NOGROUP:
1564 case OMP_CLAUSE_DEFAULTMAP:
1565 case OMP_CLAUSE_USE_DEVICE_PTR:
1566 case OMP_CLAUSE_NONTEMPORAL:
1567 case OMP_CLAUSE_ASYNC:
1568 case OMP_CLAUSE_WAIT:
1569 case OMP_CLAUSE_NUM_GANGS:
1570 case OMP_CLAUSE_NUM_WORKERS:
1571 case OMP_CLAUSE_VECTOR_LENGTH:
1572 case OMP_CLAUSE_GANG:
1573 case OMP_CLAUSE_WORKER:
1574 case OMP_CLAUSE_VECTOR:
1575 case OMP_CLAUSE_INDEPENDENT:
1576 case OMP_CLAUSE_AUTO:
1577 case OMP_CLAUSE_SEQ:
1578 case OMP_CLAUSE_TILE:
1579 case OMP_CLAUSE__GRIDDIM_:
1580 case OMP_CLAUSE__SIMT_:
1581 case OMP_CLAUSE_IF_PRESENT:
1582 case OMP_CLAUSE_FINALIZE:
1583 break;
1585 case OMP_CLAUSE__CACHE_:
1586 default:
1587 gcc_unreachable ();
1591 gcc_checking_assert (!scan_array_reductions
1592 || !is_gimple_omp_oacc (ctx->stmt));
1593 if (scan_array_reductions)
1595 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1596 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1597 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1598 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1599 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1601 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1602 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1604 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1605 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1606 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1607 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1608 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1609 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1613 /* Create a new name for omp child function. Returns an identifier. */
1615 static tree
1616 create_omp_child_function_name (bool task_copy)
1618 return clone_function_name_numbered (current_function_decl,
1619 task_copy ? "_omp_cpyfn" : "_omp_fn");
1622 /* Return true if CTX may belong to offloaded code: either if current function
1623 is offloaded, or any enclosing context corresponds to a target region. */
1625 static bool
1626 omp_maybe_offloaded_ctx (omp_context *ctx)
1628 if (cgraph_node::get (current_function_decl)->offloadable)
1629 return true;
1630 for (; ctx; ctx = ctx->outer)
1631 if (is_gimple_omp_offloaded (ctx->stmt))
1632 return true;
1633 return false;
1636 /* Build a decl for the omp child function. It'll not contain a body
1637 yet, just the bare decl. */
1639 static void
1640 create_omp_child_function (omp_context *ctx, bool task_copy)
1642 tree decl, type, name, t;
1644 name = create_omp_child_function_name (task_copy);
1645 if (task_copy)
1646 type = build_function_type_list (void_type_node, ptr_type_node,
1647 ptr_type_node, NULL_TREE);
1648 else
1649 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1651 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1653 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1654 || !task_copy);
1655 if (!task_copy)
1656 ctx->cb.dst_fn = decl;
1657 else
1658 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1660 TREE_STATIC (decl) = 1;
1661 TREE_USED (decl) = 1;
1662 DECL_ARTIFICIAL (decl) = 1;
1663 DECL_IGNORED_P (decl) = 0;
1664 TREE_PUBLIC (decl) = 0;
1665 DECL_UNINLINABLE (decl) = 1;
1666 DECL_EXTERNAL (decl) = 0;
1667 DECL_CONTEXT (decl) = NULL_TREE;
1668 DECL_INITIAL (decl) = make_node (BLOCK);
1669 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1670 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1671 /* Remove omp declare simd attribute from the new attributes. */
1672 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1674 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1675 a = a2;
1676 a = TREE_CHAIN (a);
1677 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1678 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1679 *p = TREE_CHAIN (*p);
1680 else
1682 tree chain = TREE_CHAIN (*p);
1683 *p = copy_node (*p);
1684 p = &TREE_CHAIN (*p);
1685 *p = chain;
1688 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1689 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1690 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1691 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1692 DECL_FUNCTION_VERSIONED (decl)
1693 = DECL_FUNCTION_VERSIONED (current_function_decl);
1695 if (omp_maybe_offloaded_ctx (ctx))
1697 cgraph_node::get_create (decl)->offloadable = 1;
1698 if (ENABLE_OFFLOADING)
1699 g->have_offload = true;
1702 if (cgraph_node::get_create (decl)->offloadable
1703 && !lookup_attribute ("omp declare target",
1704 DECL_ATTRIBUTES (current_function_decl)))
1706 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1707 ? "omp target entrypoint"
1708 : "omp declare target");
1709 DECL_ATTRIBUTES (decl)
1710 = tree_cons (get_identifier (target_attr),
1711 NULL_TREE, DECL_ATTRIBUTES (decl));
1714 t = build_decl (DECL_SOURCE_LOCATION (decl),
1715 RESULT_DECL, NULL_TREE, void_type_node);
1716 DECL_ARTIFICIAL (t) = 1;
1717 DECL_IGNORED_P (t) = 1;
1718 DECL_CONTEXT (t) = decl;
1719 DECL_RESULT (decl) = t;
1721 tree data_name = get_identifier (".omp_data_i");
1722 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1723 ptr_type_node);
1724 DECL_ARTIFICIAL (t) = 1;
1725 DECL_NAMELESS (t) = 1;
1726 DECL_ARG_TYPE (t) = ptr_type_node;
1727 DECL_CONTEXT (t) = current_function_decl;
1728 TREE_USED (t) = 1;
1729 TREE_READONLY (t) = 1;
1730 DECL_ARGUMENTS (decl) = t;
1731 if (!task_copy)
1732 ctx->receiver_decl = t;
1733 else
1735 t = build_decl (DECL_SOURCE_LOCATION (decl),
1736 PARM_DECL, get_identifier (".omp_data_o"),
1737 ptr_type_node);
1738 DECL_ARTIFICIAL (t) = 1;
1739 DECL_NAMELESS (t) = 1;
1740 DECL_ARG_TYPE (t) = ptr_type_node;
1741 DECL_CONTEXT (t) = current_function_decl;
1742 TREE_USED (t) = 1;
1743 TREE_ADDRESSABLE (t) = 1;
1744 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1745 DECL_ARGUMENTS (decl) = t;
1748 /* Allocate memory for the function structure. The call to
1749 allocate_struct_function clobbers CFUN, so we need to restore
1750 it afterward. */
1751 push_struct_function (decl);
1752 cfun->function_end_locus = gimple_location (ctx->stmt);
1753 init_tree_ssa (cfun);
1754 pop_cfun ();
1757 /* Callback for walk_gimple_seq. Check if combined parallel
1758 contains gimple_omp_for_combined_into_p OMP_FOR. */
1760 tree
1761 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1762 bool *handled_ops_p,
1763 struct walk_stmt_info *wi)
1765 gimple *stmt = gsi_stmt (*gsi_p);
1767 *handled_ops_p = true;
1768 switch (gimple_code (stmt))
1770 WALK_SUBSTMTS;
1772 case GIMPLE_OMP_FOR:
1773 if (gimple_omp_for_combined_into_p (stmt)
1774 && gimple_omp_for_kind (stmt)
1775 == *(const enum gf_mask *) (wi->info))
1777 wi->info = stmt;
1778 return integer_zero_node;
1780 break;
1781 default:
1782 break;
1784 return NULL;
1787 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1789 static void
1790 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1791 omp_context *outer_ctx)
1793 struct walk_stmt_info wi;
1795 memset (&wi, 0, sizeof (wi));
1796 wi.val_only = true;
1797 wi.info = (void *) &msk;
1798 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1799 if (wi.info != (void *) &msk)
1801 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1802 struct omp_for_data fd;
1803 omp_extract_for_data (for_stmt, &fd, NULL);
1804 /* We need two temporaries with fd.loop.v type (istart/iend)
1805 and then (fd.collapse - 1) temporaries with the same
1806 type for count2 ... countN-1 vars if not constant. */
1807 size_t count = 2, i;
1808 tree type = fd.iter_type;
1809 if (fd.collapse > 1
1810 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1812 count += fd.collapse - 1;
1813 /* If there are lastprivate clauses on the inner
1814 GIMPLE_OMP_FOR, add one more temporaries for the total number
1815 of iterations (product of count1 ... countN-1). */
1816 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1817 OMP_CLAUSE_LASTPRIVATE))
1818 count++;
1819 else if (msk == GF_OMP_FOR_KIND_FOR
1820 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1821 OMP_CLAUSE_LASTPRIVATE))
1822 count++;
1824 for (i = 0; i < count; i++)
1826 tree temp = create_tmp_var (type);
1827 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1828 insert_decl_map (&outer_ctx->cb, temp, temp);
1829 OMP_CLAUSE_DECL (c) = temp;
1830 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1831 gimple_omp_taskreg_set_clauses (stmt, c);
1834 if (msk == GF_OMP_FOR_KIND_TASKLOOP
1835 && omp_find_clause (gimple_omp_task_clauses (stmt),
1836 OMP_CLAUSE_REDUCTION))
1838 tree type = build_pointer_type (pointer_sized_int_node);
1839 tree temp = create_tmp_var (type);
1840 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1841 insert_decl_map (&outer_ctx->cb, temp, temp);
1842 OMP_CLAUSE_DECL (c) = temp;
1843 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
1844 gimple_omp_task_set_clauses (stmt, c);
1848 /* Scan an OpenMP parallel directive. */
1850 static void
1851 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1853 omp_context *ctx;
1854 tree name;
1855 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1857 /* Ignore parallel directives with empty bodies, unless there
1858 are copyin clauses. */
1859 if (optimize > 0
1860 && empty_body_p (gimple_omp_body (stmt))
1861 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1862 OMP_CLAUSE_COPYIN) == NULL)
1864 gsi_replace (gsi, gimple_build_nop (), false);
1865 return;
1868 if (gimple_omp_parallel_combined_p (stmt))
1869 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1870 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
1871 OMP_CLAUSE_REDUCTION);
1872 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
1873 if (OMP_CLAUSE_REDUCTION_TASK (c))
1875 tree type = build_pointer_type (pointer_sized_int_node);
1876 tree temp = create_tmp_var (type);
1877 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1878 if (outer_ctx)
1879 insert_decl_map (&outer_ctx->cb, temp, temp);
1880 OMP_CLAUSE_DECL (c) = temp;
1881 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1882 gimple_omp_parallel_set_clauses (stmt, c);
1883 break;
1885 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
1886 break;
1888 ctx = new_omp_context (stmt, outer_ctx);
1889 taskreg_contexts.safe_push (ctx);
1890 if (taskreg_nesting_level > 1)
1891 ctx->is_nested = true;
1892 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1893 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1894 name = create_tmp_var_name (".omp_data_s");
1895 name = build_decl (gimple_location (stmt),
1896 TYPE_DECL, name, ctx->record_type);
1897 DECL_ARTIFICIAL (name) = 1;
1898 DECL_NAMELESS (name) = 1;
1899 TYPE_NAME (ctx->record_type) = name;
1900 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1901 if (!gimple_omp_parallel_grid_phony (stmt))
1903 create_omp_child_function (ctx, false);
1904 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1907 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1908 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1910 if (TYPE_FIELDS (ctx->record_type) == NULL)
1911 ctx->record_type = ctx->receiver_decl = NULL;
1914 /* Scan an OpenMP task directive. */
1916 static void
1917 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1919 omp_context *ctx;
1920 tree name, t;
1921 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1923 /* Ignore task directives with empty bodies, unless they have depend
1924 clause. */
1925 if (optimize > 0
1926 && gimple_omp_body (stmt)
1927 && empty_body_p (gimple_omp_body (stmt))
1928 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1930 gsi_replace (gsi, gimple_build_nop (), false);
1931 return;
1934 if (gimple_omp_task_taskloop_p (stmt))
1935 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1937 ctx = new_omp_context (stmt, outer_ctx);
1939 if (gimple_omp_task_taskwait_p (stmt))
1941 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1942 return;
1945 taskreg_contexts.safe_push (ctx);
1946 if (taskreg_nesting_level > 1)
1947 ctx->is_nested = true;
1948 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1949 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1950 name = create_tmp_var_name (".omp_data_s");
1951 name = build_decl (gimple_location (stmt),
1952 TYPE_DECL, name, ctx->record_type);
1953 DECL_ARTIFICIAL (name) = 1;
1954 DECL_NAMELESS (name) = 1;
1955 TYPE_NAME (ctx->record_type) = name;
1956 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1957 create_omp_child_function (ctx, false);
1958 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1960 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1962 if (ctx->srecord_type)
1964 name = create_tmp_var_name (".omp_data_a");
1965 name = build_decl (gimple_location (stmt),
1966 TYPE_DECL, name, ctx->srecord_type);
1967 DECL_ARTIFICIAL (name) = 1;
1968 DECL_NAMELESS (name) = 1;
1969 TYPE_NAME (ctx->srecord_type) = name;
1970 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1971 create_omp_child_function (ctx, true);
1974 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1976 if (TYPE_FIELDS (ctx->record_type) == NULL)
1978 ctx->record_type = ctx->receiver_decl = NULL;
1979 t = build_int_cst (long_integer_type_node, 0);
1980 gimple_omp_task_set_arg_size (stmt, t);
1981 t = build_int_cst (long_integer_type_node, 1);
1982 gimple_omp_task_set_arg_align (stmt, t);
1986 /* Helper function for finish_taskreg_scan, called through walk_tree.
1987 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1988 tree, replace it in the expression. */
1990 static tree
1991 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1993 if (VAR_P (*tp))
1995 omp_context *ctx = (omp_context *) data;
1996 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1997 if (t != *tp)
1999 if (DECL_HAS_VALUE_EXPR_P (t))
2000 t = unshare_expr (DECL_VALUE_EXPR (t));
2001 *tp = t;
2003 *walk_subtrees = 0;
2005 else if (IS_TYPE_OR_DECL_P (*tp))
2006 *walk_subtrees = 0;
2007 return NULL_TREE;
2010 /* If any decls have been made addressable during scan_omp,
2011 adjust their fields if needed, and layout record types
2012 of parallel/task constructs. */
2014 static void
2015 finish_taskreg_scan (omp_context *ctx)
2017 if (ctx->record_type == NULL_TREE)
2018 return;
2020 /* If any task_shared_vars were needed, verify all
2021 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2022 statements if use_pointer_for_field hasn't changed
2023 because of that. If it did, update field types now. */
2024 if (task_shared_vars)
2026 tree c;
2028 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2029 c; c = OMP_CLAUSE_CHAIN (c))
2030 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2031 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2033 tree decl = OMP_CLAUSE_DECL (c);
2035 /* Global variables don't need to be copied,
2036 the receiver side will use them directly. */
2037 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2038 continue;
2039 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2040 || !use_pointer_for_field (decl, ctx))
2041 continue;
2042 tree field = lookup_field (decl, ctx);
2043 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2044 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2045 continue;
2046 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2047 TREE_THIS_VOLATILE (field) = 0;
2048 DECL_USER_ALIGN (field) = 0;
2049 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2050 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2051 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2052 if (ctx->srecord_type)
2054 tree sfield = lookup_sfield (decl, ctx);
2055 TREE_TYPE (sfield) = TREE_TYPE (field);
2056 TREE_THIS_VOLATILE (sfield) = 0;
2057 DECL_USER_ALIGN (sfield) = 0;
2058 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2059 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2060 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2065 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2067 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2068 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2069 if (c)
2071 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2072 expects to find it at the start of data. */
2073 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2074 tree *p = &TYPE_FIELDS (ctx->record_type);
2075 while (*p)
2076 if (*p == f)
2078 *p = DECL_CHAIN (*p);
2079 break;
2081 else
2082 p = &DECL_CHAIN (*p);
2083 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2084 TYPE_FIELDS (ctx->record_type) = f;
2086 layout_type (ctx->record_type);
2087 fixup_child_record_type (ctx);
2089 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2091 layout_type (ctx->record_type);
2092 fixup_child_record_type (ctx);
2094 else
2096 location_t loc = gimple_location (ctx->stmt);
2097 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2098 /* Move VLA fields to the end. */
2099 p = &TYPE_FIELDS (ctx->record_type);
2100 while (*p)
2101 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2102 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2104 *q = *p;
2105 *p = TREE_CHAIN (*p);
2106 TREE_CHAIN (*q) = NULL_TREE;
2107 q = &TREE_CHAIN (*q);
2109 else
2110 p = &DECL_CHAIN (*p);
2111 *p = vla_fields;
2112 if (gimple_omp_task_taskloop_p (ctx->stmt))
2114 /* Move fields corresponding to first and second _looptemp_
2115 clause first. There are filled by GOMP_taskloop
2116 and thus need to be in specific positions. */
2117 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2118 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2119 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2120 OMP_CLAUSE__LOOPTEMP_);
2121 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2122 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2123 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2124 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2125 p = &TYPE_FIELDS (ctx->record_type);
2126 while (*p)
2127 if (*p == f1 || *p == f2 || *p == f3)
2128 *p = DECL_CHAIN (*p);
2129 else
2130 p = &DECL_CHAIN (*p);
2131 DECL_CHAIN (f1) = f2;
2132 if (c3)
2134 DECL_CHAIN (f2) = f3;
2135 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2137 else
2138 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2139 TYPE_FIELDS (ctx->record_type) = f1;
2140 if (ctx->srecord_type)
2142 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2143 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2144 if (c3)
2145 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2146 p = &TYPE_FIELDS (ctx->srecord_type);
2147 while (*p)
2148 if (*p == f1 || *p == f2 || *p == f3)
2149 *p = DECL_CHAIN (*p);
2150 else
2151 p = &DECL_CHAIN (*p);
2152 DECL_CHAIN (f1) = f2;
2153 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2154 if (c3)
2156 DECL_CHAIN (f2) = f3;
2157 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2159 else
2160 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2161 TYPE_FIELDS (ctx->srecord_type) = f1;
2164 layout_type (ctx->record_type);
2165 fixup_child_record_type (ctx);
2166 if (ctx->srecord_type)
2167 layout_type (ctx->srecord_type);
2168 tree t = fold_convert_loc (loc, long_integer_type_node,
2169 TYPE_SIZE_UNIT (ctx->record_type));
2170 if (TREE_CODE (t) != INTEGER_CST)
2172 t = unshare_expr (t);
2173 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2175 gimple_omp_task_set_arg_size (ctx->stmt, t);
2176 t = build_int_cst (long_integer_type_node,
2177 TYPE_ALIGN_UNIT (ctx->record_type));
2178 gimple_omp_task_set_arg_align (ctx->stmt, t);
2182 /* Find the enclosing offload context. */
2184 static omp_context *
2185 enclosing_target_ctx (omp_context *ctx)
2187 for (; ctx; ctx = ctx->outer)
2188 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2189 break;
2191 return ctx;
2194 /* Return true if ctx is part of an oacc kernels region. */
2196 static bool
2197 ctx_in_oacc_kernels_region (omp_context *ctx)
2199 for (;ctx != NULL; ctx = ctx->outer)
2201 gimple *stmt = ctx->stmt;
2202 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2203 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2204 return true;
2207 return false;
2210 /* Check the parallelism clauses inside a kernels regions.
2211 Until kernels handling moves to use the same loop indirection
2212 scheme as parallel, we need to do this checking early. */
2214 static unsigned
2215 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2217 bool checking = true;
2218 unsigned outer_mask = 0;
2219 unsigned this_mask = 0;
2220 bool has_seq = false, has_auto = false;
2222 if (ctx->outer)
2223 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2224 if (!stmt)
2226 checking = false;
2227 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2228 return outer_mask;
2229 stmt = as_a <gomp_for *> (ctx->stmt);
2232 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2234 switch (OMP_CLAUSE_CODE (c))
2236 case OMP_CLAUSE_GANG:
2237 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2238 break;
2239 case OMP_CLAUSE_WORKER:
2240 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2241 break;
2242 case OMP_CLAUSE_VECTOR:
2243 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2244 break;
2245 case OMP_CLAUSE_SEQ:
2246 has_seq = true;
2247 break;
2248 case OMP_CLAUSE_AUTO:
2249 has_auto = true;
2250 break;
2251 default:
2252 break;
2256 if (checking)
2258 if (has_seq && (this_mask || has_auto))
2259 error_at (gimple_location (stmt), "%<seq%> overrides other"
2260 " OpenACC loop specifiers");
2261 else if (has_auto && this_mask)
2262 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2263 " OpenACC loop specifiers");
2265 if (this_mask & outer_mask)
2266 error_at (gimple_location (stmt), "inner loop uses same"
2267 " OpenACC parallelism as containing loop");
2270 return outer_mask | this_mask;
2273 /* Scan a GIMPLE_OMP_FOR. */
2275 static omp_context *
2276 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2278 omp_context *ctx;
2279 size_t i;
2280 tree clauses = gimple_omp_for_clauses (stmt);
2282 ctx = new_omp_context (stmt, outer_ctx);
2284 if (is_gimple_omp_oacc (stmt))
2286 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2288 if (!tgt || is_oacc_parallel (tgt))
2289 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2291 char const *check = NULL;
2293 switch (OMP_CLAUSE_CODE (c))
2295 case OMP_CLAUSE_GANG:
2296 check = "gang";
2297 break;
2299 case OMP_CLAUSE_WORKER:
2300 check = "worker";
2301 break;
2303 case OMP_CLAUSE_VECTOR:
2304 check = "vector";
2305 break;
2307 default:
2308 break;
2311 if (check && OMP_CLAUSE_OPERAND (c, 0))
2312 error_at (gimple_location (stmt),
2313 "argument not permitted on %qs clause in"
2314 " OpenACC %<parallel%>", check);
2317 if (tgt && is_oacc_kernels (tgt))
2319 /* Strip out reductions, as they are not handled yet. */
2320 tree *prev_ptr = &clauses;
2322 while (tree probe = *prev_ptr)
2324 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2326 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2327 *prev_ptr = *next_ptr;
2328 else
2329 prev_ptr = next_ptr;
2332 gimple_omp_for_set_clauses (stmt, clauses);
2333 check_oacc_kernel_gwv (stmt, ctx);
2337 scan_sharing_clauses (clauses, ctx);
2339 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2340 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2342 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2343 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2344 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2345 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2347 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2348 return ctx;
2351 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2353 static void
2354 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2355 omp_context *outer_ctx)
2357 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2358 gsi_replace (gsi, bind, false);
2359 gimple_seq seq = NULL;
2360 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2361 tree cond = create_tmp_var_raw (integer_type_node);
2362 DECL_CONTEXT (cond) = current_function_decl;
2363 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2364 gimple_bind_set_vars (bind, cond);
2365 gimple_call_set_lhs (g, cond);
2366 gimple_seq_add_stmt (&seq, g);
2367 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2368 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2369 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2370 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2371 gimple_seq_add_stmt (&seq, g);
2372 g = gimple_build_label (lab1);
2373 gimple_seq_add_stmt (&seq, g);
2374 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2375 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2376 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2377 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2378 gimple_omp_for_set_clauses (new_stmt, clause);
2379 gimple_seq_add_stmt (&seq, new_stmt);
2380 g = gimple_build_goto (lab3);
2381 gimple_seq_add_stmt (&seq, g);
2382 g = gimple_build_label (lab2);
2383 gimple_seq_add_stmt (&seq, g);
2384 gimple_seq_add_stmt (&seq, stmt);
2385 g = gimple_build_label (lab3);
2386 gimple_seq_add_stmt (&seq, g);
2387 gimple_bind_set_body (bind, seq);
2388 update_stmt (bind);
2389 scan_omp_for (new_stmt, outer_ctx);
2390 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2393 /* Scan an OpenMP sections directive. */
2395 static void
2396 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2398 omp_context *ctx;
2400 ctx = new_omp_context (stmt, outer_ctx);
2401 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2402 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2405 /* Scan an OpenMP single directive. */
2407 static void
2408 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2410 omp_context *ctx;
2411 tree name;
2413 ctx = new_omp_context (stmt, outer_ctx);
2414 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2415 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2416 name = create_tmp_var_name (".omp_copy_s");
2417 name = build_decl (gimple_location (stmt),
2418 TYPE_DECL, name, ctx->record_type);
2419 TYPE_NAME (ctx->record_type) = name;
2421 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2422 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2424 if (TYPE_FIELDS (ctx->record_type) == NULL)
2425 ctx->record_type = NULL;
2426 else
2427 layout_type (ctx->record_type);
2430 /* Scan a GIMPLE_OMP_TARGET. */
2432 static void
2433 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2435 omp_context *ctx;
2436 tree name;
2437 bool offloaded = is_gimple_omp_offloaded (stmt);
2438 tree clauses = gimple_omp_target_clauses (stmt);
2440 ctx = new_omp_context (stmt, outer_ctx);
2441 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2442 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2443 name = create_tmp_var_name (".omp_data_t");
2444 name = build_decl (gimple_location (stmt),
2445 TYPE_DECL, name, ctx->record_type);
2446 DECL_ARTIFICIAL (name) = 1;
2447 DECL_NAMELESS (name) = 1;
2448 TYPE_NAME (ctx->record_type) = name;
2449 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2451 if (offloaded)
2453 create_omp_child_function (ctx, false);
2454 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2457 scan_sharing_clauses (clauses, ctx);
2458 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2460 if (TYPE_FIELDS (ctx->record_type) == NULL)
2461 ctx->record_type = ctx->receiver_decl = NULL;
2462 else
2464 TYPE_FIELDS (ctx->record_type)
2465 = nreverse (TYPE_FIELDS (ctx->record_type));
2466 if (flag_checking)
2468 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2469 for (tree field = TYPE_FIELDS (ctx->record_type);
2470 field;
2471 field = DECL_CHAIN (field))
2472 gcc_assert (DECL_ALIGN (field) == align);
2474 layout_type (ctx->record_type);
2475 if (offloaded)
2476 fixup_child_record_type (ctx);
2480 /* Scan an OpenMP teams directive. */
2482 static void
2483 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2485 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2487 if (!gimple_omp_teams_host (stmt))
2489 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2490 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2491 return;
2493 taskreg_contexts.safe_push (ctx);
2494 gcc_assert (taskreg_nesting_level == 1);
2495 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2496 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2497 tree name = create_tmp_var_name (".omp_data_s");
2498 name = build_decl (gimple_location (stmt),
2499 TYPE_DECL, name, ctx->record_type);
2500 DECL_ARTIFICIAL (name) = 1;
2501 DECL_NAMELESS (name) = 1;
2502 TYPE_NAME (ctx->record_type) = name;
2503 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2504 create_omp_child_function (ctx, false);
2505 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2507 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2508 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2510 if (TYPE_FIELDS (ctx->record_type) == NULL)
2511 ctx->record_type = ctx->receiver_decl = NULL;
2514 /* Check nesting restrictions. */
2515 static bool
2516 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2518 tree c;
2520 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2521 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2522 the original copy of its contents. */
2523 return true;
2525 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2526 inside an OpenACC CTX. */
2527 if (!(is_gimple_omp (stmt)
2528 && is_gimple_omp_oacc (stmt))
2529 /* Except for atomic codes that we share with OpenMP. */
2530 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2531 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2533 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2535 error_at (gimple_location (stmt),
2536 "non-OpenACC construct inside of OpenACC routine");
2537 return false;
2539 else
2540 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2541 if (is_gimple_omp (octx->stmt)
2542 && is_gimple_omp_oacc (octx->stmt))
2544 error_at (gimple_location (stmt),
2545 "non-OpenACC construct inside of OpenACC region");
2546 return false;
2550 if (ctx != NULL)
2552 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2553 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2555 c = NULL_TREE;
2556 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2558 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2559 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2561 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2562 && (ctx->outer == NULL
2563 || !gimple_omp_for_combined_into_p (ctx->stmt)
2564 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2565 || (gimple_omp_for_kind (ctx->outer->stmt)
2566 != GF_OMP_FOR_KIND_FOR)
2567 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2569 error_at (gimple_location (stmt),
2570 "%<ordered simd threads%> must be closely "
2571 "nested inside of %<for simd%> region");
2572 return false;
2574 return true;
2577 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2578 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)
2579 return true;
2580 error_at (gimple_location (stmt),
2581 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2582 " or %<#pragma omp atomic%> may not be nested inside"
2583 " %<simd%> region");
2584 return false;
2586 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2588 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2589 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2590 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2591 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2593 error_at (gimple_location (stmt),
2594 "only %<distribute%> or %<parallel%> regions are "
2595 "allowed to be strictly nested inside %<teams%> "
2596 "region");
2597 return false;
2601 switch (gimple_code (stmt))
2603 case GIMPLE_OMP_FOR:
2604 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2605 return true;
2606 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2608 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2610 error_at (gimple_location (stmt),
2611 "%<distribute%> region must be strictly nested "
2612 "inside %<teams%> construct");
2613 return false;
2615 return true;
2617 /* We split taskloop into task and nested taskloop in it. */
2618 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2619 return true;
2620 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2622 bool ok = false;
2624 if (ctx)
2625 switch (gimple_code (ctx->stmt))
2627 case GIMPLE_OMP_FOR:
2628 ok = (gimple_omp_for_kind (ctx->stmt)
2629 == GF_OMP_FOR_KIND_OACC_LOOP);
2630 break;
2632 case GIMPLE_OMP_TARGET:
2633 switch (gimple_omp_target_kind (ctx->stmt))
2635 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2636 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2637 ok = true;
2638 break;
2640 default:
2641 break;
2644 default:
2645 break;
2647 else if (oacc_get_fn_attrib (current_function_decl))
2648 ok = true;
2649 if (!ok)
2651 error_at (gimple_location (stmt),
2652 "OpenACC loop directive must be associated with"
2653 " an OpenACC compute region");
2654 return false;
2657 /* FALLTHRU */
2658 case GIMPLE_CALL:
2659 if (is_gimple_call (stmt)
2660 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2661 == BUILT_IN_GOMP_CANCEL
2662 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2663 == BUILT_IN_GOMP_CANCELLATION_POINT))
2665 const char *bad = NULL;
2666 const char *kind = NULL;
2667 const char *construct
2668 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2669 == BUILT_IN_GOMP_CANCEL)
2670 ? "#pragma omp cancel"
2671 : "#pragma omp cancellation point";
2672 if (ctx == NULL)
2674 error_at (gimple_location (stmt), "orphaned %qs construct",
2675 construct);
2676 return false;
2678 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2679 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2680 : 0)
2682 case 1:
2683 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2684 bad = "#pragma omp parallel";
2685 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2686 == BUILT_IN_GOMP_CANCEL
2687 && !integer_zerop (gimple_call_arg (stmt, 1)))
2688 ctx->cancellable = true;
2689 kind = "parallel";
2690 break;
2691 case 2:
2692 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2693 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2694 bad = "#pragma omp for";
2695 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2696 == BUILT_IN_GOMP_CANCEL
2697 && !integer_zerop (gimple_call_arg (stmt, 1)))
2699 ctx->cancellable = true;
2700 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2701 OMP_CLAUSE_NOWAIT))
2702 warning_at (gimple_location (stmt), 0,
2703 "%<#pragma omp cancel for%> inside "
2704 "%<nowait%> for construct");
2705 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2706 OMP_CLAUSE_ORDERED))
2707 warning_at (gimple_location (stmt), 0,
2708 "%<#pragma omp cancel for%> inside "
2709 "%<ordered%> for construct");
2711 kind = "for";
2712 break;
2713 case 4:
2714 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2715 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2716 bad = "#pragma omp sections";
2717 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2718 == BUILT_IN_GOMP_CANCEL
2719 && !integer_zerop (gimple_call_arg (stmt, 1)))
2721 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2723 ctx->cancellable = true;
2724 if (omp_find_clause (gimple_omp_sections_clauses
2725 (ctx->stmt),
2726 OMP_CLAUSE_NOWAIT))
2727 warning_at (gimple_location (stmt), 0,
2728 "%<#pragma omp cancel sections%> inside "
2729 "%<nowait%> sections construct");
2731 else
2733 gcc_assert (ctx->outer
2734 && gimple_code (ctx->outer->stmt)
2735 == GIMPLE_OMP_SECTIONS);
2736 ctx->outer->cancellable = true;
2737 if (omp_find_clause (gimple_omp_sections_clauses
2738 (ctx->outer->stmt),
2739 OMP_CLAUSE_NOWAIT))
2740 warning_at (gimple_location (stmt), 0,
2741 "%<#pragma omp cancel sections%> inside "
2742 "%<nowait%> sections construct");
2745 kind = "sections";
2746 break;
2747 case 8:
2748 if (!is_task_ctx (ctx)
2749 && (!is_taskloop_ctx (ctx)
2750 || ctx->outer == NULL
2751 || !is_task_ctx (ctx->outer)))
2752 bad = "#pragma omp task";
2753 else
2755 for (omp_context *octx = ctx->outer;
2756 octx; octx = octx->outer)
2758 switch (gimple_code (octx->stmt))
2760 case GIMPLE_OMP_TASKGROUP:
2761 break;
2762 case GIMPLE_OMP_TARGET:
2763 if (gimple_omp_target_kind (octx->stmt)
2764 != GF_OMP_TARGET_KIND_REGION)
2765 continue;
2766 /* FALLTHRU */
2767 case GIMPLE_OMP_PARALLEL:
2768 case GIMPLE_OMP_TEAMS:
2769 error_at (gimple_location (stmt),
2770 "%<%s taskgroup%> construct not closely "
2771 "nested inside of %<taskgroup%> region",
2772 construct);
2773 return false;
2774 case GIMPLE_OMP_TASK:
2775 if (gimple_omp_task_taskloop_p (octx->stmt)
2776 && octx->outer
2777 && is_taskloop_ctx (octx->outer))
2779 tree clauses
2780 = gimple_omp_for_clauses (octx->outer->stmt);
2781 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
2782 break;
2784 continue;
2785 default:
2786 continue;
2788 break;
2790 ctx->cancellable = true;
2792 kind = "taskgroup";
2793 break;
2794 default:
2795 error_at (gimple_location (stmt), "invalid arguments");
2796 return false;
2798 if (bad)
2800 error_at (gimple_location (stmt),
2801 "%<%s %s%> construct not closely nested inside of %qs",
2802 construct, kind, bad);
2803 return false;
2806 /* FALLTHRU */
2807 case GIMPLE_OMP_SECTIONS:
2808 case GIMPLE_OMP_SINGLE:
2809 for (; ctx != NULL; ctx = ctx->outer)
2810 switch (gimple_code (ctx->stmt))
2812 case GIMPLE_OMP_FOR:
2813 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2814 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2815 break;
2816 /* FALLTHRU */
2817 case GIMPLE_OMP_SECTIONS:
2818 case GIMPLE_OMP_SINGLE:
2819 case GIMPLE_OMP_ORDERED:
2820 case GIMPLE_OMP_MASTER:
2821 case GIMPLE_OMP_TASK:
2822 case GIMPLE_OMP_CRITICAL:
2823 if (is_gimple_call (stmt))
2825 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2826 != BUILT_IN_GOMP_BARRIER)
2827 return true;
2828 error_at (gimple_location (stmt),
2829 "barrier region may not be closely nested inside "
2830 "of work-sharing, %<critical%>, %<ordered%>, "
2831 "%<master%>, explicit %<task%> or %<taskloop%> "
2832 "region");
2833 return false;
2835 error_at (gimple_location (stmt),
2836 "work-sharing region may not be closely nested inside "
2837 "of work-sharing, %<critical%>, %<ordered%>, "
2838 "%<master%>, explicit %<task%> or %<taskloop%> region");
2839 return false;
2840 case GIMPLE_OMP_PARALLEL:
2841 case GIMPLE_OMP_TEAMS:
2842 return true;
2843 case GIMPLE_OMP_TARGET:
2844 if (gimple_omp_target_kind (ctx->stmt)
2845 == GF_OMP_TARGET_KIND_REGION)
2846 return true;
2847 break;
2848 default:
2849 break;
2851 break;
2852 case GIMPLE_OMP_MASTER:
2853 for (; ctx != NULL; ctx = ctx->outer)
2854 switch (gimple_code (ctx->stmt))
2856 case GIMPLE_OMP_FOR:
2857 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2858 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2859 break;
2860 /* FALLTHRU */
2861 case GIMPLE_OMP_SECTIONS:
2862 case GIMPLE_OMP_SINGLE:
2863 case GIMPLE_OMP_TASK:
2864 error_at (gimple_location (stmt),
2865 "%<master%> region may not be closely nested inside "
2866 "of work-sharing, explicit %<task%> or %<taskloop%> "
2867 "region");
2868 return false;
2869 case GIMPLE_OMP_PARALLEL:
2870 case GIMPLE_OMP_TEAMS:
2871 return true;
2872 case GIMPLE_OMP_TARGET:
2873 if (gimple_omp_target_kind (ctx->stmt)
2874 == GF_OMP_TARGET_KIND_REGION)
2875 return true;
2876 break;
2877 default:
2878 break;
2880 break;
2881 case GIMPLE_OMP_TASK:
2882 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2883 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2884 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2885 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2887 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2888 error_at (OMP_CLAUSE_LOCATION (c),
2889 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2890 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2891 return false;
2893 break;
2894 case GIMPLE_OMP_ORDERED:
2895 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2896 c; c = OMP_CLAUSE_CHAIN (c))
2898 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2900 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2901 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2902 continue;
2904 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2905 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2906 || kind == OMP_CLAUSE_DEPEND_SINK)
2908 tree oclause;
2909 /* Look for containing ordered(N) loop. */
2910 if (ctx == NULL
2911 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2912 || (oclause
2913 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2914 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2916 error_at (OMP_CLAUSE_LOCATION (c),
2917 "%<ordered%> construct with %<depend%> clause "
2918 "must be closely nested inside an %<ordered%> "
2919 "loop");
2920 return false;
2922 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2924 error_at (OMP_CLAUSE_LOCATION (c),
2925 "%<ordered%> construct with %<depend%> clause "
2926 "must be closely nested inside a loop with "
2927 "%<ordered%> clause with a parameter");
2928 return false;
2931 else
2933 error_at (OMP_CLAUSE_LOCATION (c),
2934 "invalid depend kind in omp %<ordered%> %<depend%>");
2935 return false;
2938 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2939 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2941 /* ordered simd must be closely nested inside of simd region,
2942 and simd region must not encounter constructs other than
2943 ordered simd, therefore ordered simd may be either orphaned,
2944 or ctx->stmt must be simd. The latter case is handled already
2945 earlier. */
2946 if (ctx != NULL)
2948 error_at (gimple_location (stmt),
2949 "%<ordered%> %<simd%> must be closely nested inside "
2950 "%<simd%> region");
2951 return false;
2954 for (; ctx != NULL; ctx = ctx->outer)
2955 switch (gimple_code (ctx->stmt))
2957 case GIMPLE_OMP_CRITICAL:
2958 case GIMPLE_OMP_TASK:
2959 case GIMPLE_OMP_ORDERED:
2960 ordered_in_taskloop:
2961 error_at (gimple_location (stmt),
2962 "%<ordered%> region may not be closely nested inside "
2963 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2964 "%<taskloop%> region");
2965 return false;
2966 case GIMPLE_OMP_FOR:
2967 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2968 goto ordered_in_taskloop;
2969 tree o;
2970 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2971 OMP_CLAUSE_ORDERED);
2972 if (o == NULL)
2974 error_at (gimple_location (stmt),
2975 "%<ordered%> region must be closely nested inside "
2976 "a loop region with an %<ordered%> clause");
2977 return false;
2979 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
2980 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
2982 error_at (gimple_location (stmt),
2983 "%<ordered%> region without %<depend%> clause may "
2984 "not be closely nested inside a loop region with "
2985 "an %<ordered%> clause with a parameter");
2986 return false;
2988 return true;
2989 case GIMPLE_OMP_TARGET:
2990 if (gimple_omp_target_kind (ctx->stmt)
2991 != GF_OMP_TARGET_KIND_REGION)
2992 break;
2993 /* FALLTHRU */
2994 case GIMPLE_OMP_PARALLEL:
2995 case GIMPLE_OMP_TEAMS:
2996 error_at (gimple_location (stmt),
2997 "%<ordered%> region must be closely nested inside "
2998 "a loop region with an %<ordered%> clause");
2999 return false;
3000 default:
3001 break;
3003 break;
3004 case GIMPLE_OMP_CRITICAL:
3006 tree this_stmt_name
3007 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3008 for (; ctx != NULL; ctx = ctx->outer)
3009 if (gomp_critical *other_crit
3010 = dyn_cast <gomp_critical *> (ctx->stmt))
3011 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3013 error_at (gimple_location (stmt),
3014 "%<critical%> region may not be nested inside "
3015 "a %<critical%> region with the same name");
3016 return false;
3019 break;
3020 case GIMPLE_OMP_TEAMS:
3021 if (ctx == NULL)
3022 break;
3023 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3024 || (gimple_omp_target_kind (ctx->stmt)
3025 != GF_OMP_TARGET_KIND_REGION))
3027 /* Teams construct can appear either strictly nested inside of
3028 target construct with no intervening stmts, or can be encountered
3029 only by initial task (so must not appear inside any OpenMP
3030 construct. */
3031 error_at (gimple_location (stmt),
3032 "%<teams%> construct must be closely nested inside of "
3033 "%<target%> construct or not nested in any OpenMP "
3034 "construct");
3035 return false;
3037 break;
3038 case GIMPLE_OMP_TARGET:
3039 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3040 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3041 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3042 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3044 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3045 error_at (OMP_CLAUSE_LOCATION (c),
3046 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3047 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3048 return false;
3050 if (is_gimple_omp_offloaded (stmt)
3051 && oacc_get_fn_attrib (cfun->decl) != NULL)
3053 error_at (gimple_location (stmt),
3054 "OpenACC region inside of OpenACC routine, nested "
3055 "parallelism not supported yet");
3056 return false;
3058 for (; ctx != NULL; ctx = ctx->outer)
3060 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3062 if (is_gimple_omp (stmt)
3063 && is_gimple_omp_oacc (stmt)
3064 && is_gimple_omp (ctx->stmt))
3066 error_at (gimple_location (stmt),
3067 "OpenACC construct inside of non-OpenACC region");
3068 return false;
3070 continue;
3073 const char *stmt_name, *ctx_stmt_name;
3074 switch (gimple_omp_target_kind (stmt))
3076 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3077 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3078 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3079 case GF_OMP_TARGET_KIND_ENTER_DATA:
3080 stmt_name = "target enter data"; break;
3081 case GF_OMP_TARGET_KIND_EXIT_DATA:
3082 stmt_name = "target exit data"; break;
3083 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3084 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3085 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3086 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3087 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3088 stmt_name = "enter/exit data"; break;
3089 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3090 break;
3091 default: gcc_unreachable ();
3093 switch (gimple_omp_target_kind (ctx->stmt))
3095 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3096 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3097 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3098 ctx_stmt_name = "parallel"; break;
3099 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3100 ctx_stmt_name = "kernels"; break;
3101 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3102 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3103 ctx_stmt_name = "host_data"; break;
3104 default: gcc_unreachable ();
3107 /* OpenACC/OpenMP mismatch? */
3108 if (is_gimple_omp_oacc (stmt)
3109 != is_gimple_omp_oacc (ctx->stmt))
3111 error_at (gimple_location (stmt),
3112 "%s %qs construct inside of %s %qs region",
3113 (is_gimple_omp_oacc (stmt)
3114 ? "OpenACC" : "OpenMP"), stmt_name,
3115 (is_gimple_omp_oacc (ctx->stmt)
3116 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3117 return false;
3119 if (is_gimple_omp_offloaded (ctx->stmt))
3121 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3122 if (is_gimple_omp_oacc (ctx->stmt))
3124 error_at (gimple_location (stmt),
3125 "%qs construct inside of %qs region",
3126 stmt_name, ctx_stmt_name);
3127 return false;
3129 else
3131 warning_at (gimple_location (stmt), 0,
3132 "%qs construct inside of %qs region",
3133 stmt_name, ctx_stmt_name);
3137 break;
3138 default:
3139 break;
3141 return true;
3145 /* Helper function scan_omp.
3147 Callback for walk_tree or operators in walk_gimple_stmt used to
3148 scan for OMP directives in TP. */
3150 static tree
3151 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3153 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3154 omp_context *ctx = (omp_context *) wi->info;
3155 tree t = *tp;
3157 switch (TREE_CODE (t))
3159 case VAR_DECL:
3160 case PARM_DECL:
3161 case LABEL_DECL:
3162 case RESULT_DECL:
3163 if (ctx)
3165 tree repl = remap_decl (t, &ctx->cb);
3166 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3167 *tp = repl;
3169 break;
3171 default:
3172 if (ctx && TYPE_P (t))
3173 *tp = remap_type (t, &ctx->cb);
3174 else if (!DECL_P (t))
3176 *walk_subtrees = 1;
3177 if (ctx)
3179 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3180 if (tem != TREE_TYPE (t))
3182 if (TREE_CODE (t) == INTEGER_CST)
3183 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3184 else
3185 TREE_TYPE (t) = tem;
3189 break;
3192 return NULL_TREE;
3195 /* Return true if FNDECL is a setjmp or a longjmp. */
3197 static bool
3198 setjmp_or_longjmp_p (const_tree fndecl)
3200 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3201 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3202 return true;
3204 tree declname = DECL_NAME (fndecl);
3205 if (!declname)
3206 return false;
3207 const char *name = IDENTIFIER_POINTER (declname);
3208 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3212 /* Helper function for scan_omp.
3214 Callback for walk_gimple_stmt used to scan for OMP directives in
3215 the current statement in GSI. */
3217 static tree
3218 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3219 struct walk_stmt_info *wi)
3221 gimple *stmt = gsi_stmt (*gsi);
3222 omp_context *ctx = (omp_context *) wi->info;
3224 if (gimple_has_location (stmt))
3225 input_location = gimple_location (stmt);
3227 /* Check the nesting restrictions. */
3228 bool remove = false;
3229 if (is_gimple_omp (stmt))
3230 remove = !check_omp_nesting_restrictions (stmt, ctx);
3231 else if (is_gimple_call (stmt))
3233 tree fndecl = gimple_call_fndecl (stmt);
3234 if (fndecl)
3236 if (setjmp_or_longjmp_p (fndecl)
3237 && ctx
3238 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3239 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3241 remove = true;
3242 error_at (gimple_location (stmt),
3243 "setjmp/longjmp inside simd construct");
3245 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3246 switch (DECL_FUNCTION_CODE (fndecl))
3248 case BUILT_IN_GOMP_BARRIER:
3249 case BUILT_IN_GOMP_CANCEL:
3250 case BUILT_IN_GOMP_CANCELLATION_POINT:
3251 case BUILT_IN_GOMP_TASKYIELD:
3252 case BUILT_IN_GOMP_TASKWAIT:
3253 case BUILT_IN_GOMP_TASKGROUP_START:
3254 case BUILT_IN_GOMP_TASKGROUP_END:
3255 remove = !check_omp_nesting_restrictions (stmt, ctx);
3256 break;
3257 default:
3258 break;
3262 if (remove)
3264 stmt = gimple_build_nop ();
3265 gsi_replace (gsi, stmt, false);
3268 *handled_ops_p = true;
3270 switch (gimple_code (stmt))
3272 case GIMPLE_OMP_PARALLEL:
3273 taskreg_nesting_level++;
3274 scan_omp_parallel (gsi, ctx);
3275 taskreg_nesting_level--;
3276 break;
3278 case GIMPLE_OMP_TASK:
3279 taskreg_nesting_level++;
3280 scan_omp_task (gsi, ctx);
3281 taskreg_nesting_level--;
3282 break;
3284 case GIMPLE_OMP_FOR:
3285 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3286 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3287 && omp_maybe_offloaded_ctx (ctx)
3288 && omp_max_simt_vf ())
3289 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3290 else
3291 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3292 break;
3294 case GIMPLE_OMP_SECTIONS:
3295 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3296 break;
3298 case GIMPLE_OMP_SINGLE:
3299 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3300 break;
3302 case GIMPLE_OMP_SECTION:
3303 case GIMPLE_OMP_MASTER:
3304 case GIMPLE_OMP_ORDERED:
3305 case GIMPLE_OMP_CRITICAL:
3306 case GIMPLE_OMP_GRID_BODY:
3307 ctx = new_omp_context (stmt, ctx);
3308 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3309 break;
3311 case GIMPLE_OMP_TASKGROUP:
3312 ctx = new_omp_context (stmt, ctx);
3313 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3314 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3315 break;
3317 case GIMPLE_OMP_TARGET:
3318 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3319 break;
3321 case GIMPLE_OMP_TEAMS:
3322 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3324 taskreg_nesting_level++;
3325 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3326 taskreg_nesting_level--;
3328 else
3329 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3330 break;
3332 case GIMPLE_BIND:
3334 tree var;
3336 *handled_ops_p = false;
3337 if (ctx)
3338 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3339 var ;
3340 var = DECL_CHAIN (var))
3341 insert_decl_map (&ctx->cb, var, var);
3343 break;
3344 default:
3345 *handled_ops_p = false;
3346 break;
3349 return NULL_TREE;
3353 /* Scan all the statements starting at the current statement. CTX
3354 contains context information about the OMP directives and
3355 clauses found during the scan. */
3357 static void
3358 scan_omp (gimple_seq *body_p, omp_context *ctx)
3360 location_t saved_location;
3361 struct walk_stmt_info wi;
3363 memset (&wi, 0, sizeof (wi));
3364 wi.info = ctx;
3365 wi.want_locations = true;
3367 saved_location = input_location;
3368 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3369 input_location = saved_location;
3372 /* Re-gimplification and code generation routines. */
3374 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3375 of BIND if in a method. */
3377 static void
3378 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3380 if (DECL_ARGUMENTS (current_function_decl)
3381 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3382 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3383 == POINTER_TYPE))
3385 tree vars = gimple_bind_vars (bind);
3386 for (tree *pvar = &vars; *pvar; )
3387 if (omp_member_access_dummy_var (*pvar))
3388 *pvar = DECL_CHAIN (*pvar);
3389 else
3390 pvar = &DECL_CHAIN (*pvar);
3391 gimple_bind_set_vars (bind, vars);
3395 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3396 block and its subblocks. */
3398 static void
3399 remove_member_access_dummy_vars (tree block)
3401 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3402 if (omp_member_access_dummy_var (*pvar))
3403 *pvar = DECL_CHAIN (*pvar);
3404 else
3405 pvar = &DECL_CHAIN (*pvar);
3407 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3408 remove_member_access_dummy_vars (block);
3411 /* If a context was created for STMT when it was scanned, return it. */
3413 static omp_context *
3414 maybe_lookup_ctx (gimple *stmt)
3416 splay_tree_node n;
3417 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3418 return n ? (omp_context *) n->value : NULL;
3422 /* Find the mapping for DECL in CTX or the immediately enclosing
3423 context that has a mapping for DECL.
3425 If CTX is a nested parallel directive, we may have to use the decl
3426 mappings created in CTX's parent context. Suppose that we have the
3427 following parallel nesting (variable UIDs showed for clarity):
3429 iD.1562 = 0;
3430 #omp parallel shared(iD.1562) -> outer parallel
3431 iD.1562 = iD.1562 + 1;
3433 #omp parallel shared (iD.1562) -> inner parallel
3434 iD.1562 = iD.1562 - 1;
3436 Each parallel structure will create a distinct .omp_data_s structure
3437 for copying iD.1562 in/out of the directive:
3439 outer parallel .omp_data_s.1.i -> iD.1562
3440 inner parallel .omp_data_s.2.i -> iD.1562
3442 A shared variable mapping will produce a copy-out operation before
3443 the parallel directive and a copy-in operation after it. So, in
3444 this case we would have:
3446 iD.1562 = 0;
3447 .omp_data_o.1.i = iD.1562;
3448 #omp parallel shared(iD.1562) -> outer parallel
3449 .omp_data_i.1 = &.omp_data_o.1
3450 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3452 .omp_data_o.2.i = iD.1562; -> **
3453 #omp parallel shared(iD.1562) -> inner parallel
3454 .omp_data_i.2 = &.omp_data_o.2
3455 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3458 ** This is a problem. The symbol iD.1562 cannot be referenced
3459 inside the body of the outer parallel region. But since we are
3460 emitting this copy operation while expanding the inner parallel
3461 directive, we need to access the CTX structure of the outer
3462 parallel directive to get the correct mapping:
3464 .omp_data_o.2.i = .omp_data_i.1->i
3466 Since there may be other workshare or parallel directives enclosing
3467 the parallel directive, it may be necessary to walk up the context
3468 parent chain. This is not a problem in general because nested
3469 parallelism happens only rarely. */
3471 static tree
3472 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3474 tree t;
3475 omp_context *up;
3477 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3478 t = maybe_lookup_decl (decl, up);
3480 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3482 return t ? t : decl;
3486 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3487 in outer contexts. */
3489 static tree
3490 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3492 tree t = NULL;
3493 omp_context *up;
3495 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3496 t = maybe_lookup_decl (decl, up);
3498 return t ? t : decl;
3502 /* Construct the initialization value for reduction operation OP. */
3504 tree
3505 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3507 switch (op)
3509 case PLUS_EXPR:
3510 case MINUS_EXPR:
3511 case BIT_IOR_EXPR:
3512 case BIT_XOR_EXPR:
3513 case TRUTH_OR_EXPR:
3514 case TRUTH_ORIF_EXPR:
3515 case TRUTH_XOR_EXPR:
3516 case NE_EXPR:
3517 return build_zero_cst (type);
3519 case MULT_EXPR:
3520 case TRUTH_AND_EXPR:
3521 case TRUTH_ANDIF_EXPR:
3522 case EQ_EXPR:
3523 return fold_convert_loc (loc, type, integer_one_node);
3525 case BIT_AND_EXPR:
3526 return fold_convert_loc (loc, type, integer_minus_one_node);
3528 case MAX_EXPR:
3529 if (SCALAR_FLOAT_TYPE_P (type))
3531 REAL_VALUE_TYPE max, min;
3532 if (HONOR_INFINITIES (type))
3534 real_inf (&max);
3535 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3537 else
3538 real_maxval (&min, 1, TYPE_MODE (type));
3539 return build_real (type, min);
3541 else if (POINTER_TYPE_P (type))
3543 wide_int min
3544 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3545 return wide_int_to_tree (type, min);
3547 else
3549 gcc_assert (INTEGRAL_TYPE_P (type));
3550 return TYPE_MIN_VALUE (type);
3553 case MIN_EXPR:
3554 if (SCALAR_FLOAT_TYPE_P (type))
3556 REAL_VALUE_TYPE max;
3557 if (HONOR_INFINITIES (type))
3558 real_inf (&max);
3559 else
3560 real_maxval (&max, 0, TYPE_MODE (type));
3561 return build_real (type, max);
3563 else if (POINTER_TYPE_P (type))
3565 wide_int max
3566 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3567 return wide_int_to_tree (type, max);
3569 else
3571 gcc_assert (INTEGRAL_TYPE_P (type));
3572 return TYPE_MAX_VALUE (type);
3575 default:
3576 gcc_unreachable ();
3580 /* Construct the initialization value for reduction CLAUSE. */
3582 tree
3583 omp_reduction_init (tree clause, tree type)
3585 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3586 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3589 /* Return alignment to be assumed for var in CLAUSE, which should be
3590 OMP_CLAUSE_ALIGNED. */
3592 static tree
3593 omp_clause_aligned_alignment (tree clause)
3595 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3596 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3598 /* Otherwise return implementation defined alignment. */
3599 unsigned int al = 1;
3600 opt_scalar_mode mode_iter;
3601 auto_vector_sizes sizes;
3602 targetm.vectorize.autovectorize_vector_sizes (&sizes);
3603 poly_uint64 vs = 0;
3604 for (unsigned int i = 0; i < sizes.length (); ++i)
3605 vs = ordered_max (vs, sizes[i]);
3606 static enum mode_class classes[]
3607 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3608 for (int i = 0; i < 4; i += 2)
3609 /* The for loop above dictates that we only walk through scalar classes. */
3610 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3612 scalar_mode mode = mode_iter.require ();
3613 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3614 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3615 continue;
3616 while (maybe_ne (vs, 0U)
3617 && known_lt (GET_MODE_SIZE (vmode), vs)
3618 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3619 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3621 tree type = lang_hooks.types.type_for_mode (mode, 1);
3622 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3623 continue;
3624 poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3625 GET_MODE_SIZE (mode));
3626 type = build_vector_type (type, nelts);
3627 if (TYPE_MODE (type) != vmode)
3628 continue;
3629 if (TYPE_ALIGN_UNIT (type) > al)
3630 al = TYPE_ALIGN_UNIT (type);
3632 return build_int_cst (integer_type_node, al);
3636 /* This structure is part of the interface between lower_rec_simd_input_clauses
3637 and lower_rec_input_clauses. */
3639 struct omplow_simd_context {
3640 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3641 tree idx;
3642 tree lane;
3643 vec<tree, va_heap> simt_eargs;
3644 gimple_seq simt_dlist;
3645 poly_uint64_pod max_vf;
3646 bool is_simt;
3649 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3650 privatization. */
3652 static bool
3653 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3654 omplow_simd_context *sctx, tree &ivar, tree &lvar)
3656 if (known_eq (sctx->max_vf, 0U))
3658 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3659 if (maybe_gt (sctx->max_vf, 1U))
3661 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3662 OMP_CLAUSE_SAFELEN);
3663 if (c)
3665 poly_uint64 safe_len;
3666 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3667 || maybe_lt (safe_len, 1U))
3668 sctx->max_vf = 1;
3669 else
3670 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3673 if (maybe_gt (sctx->max_vf, 1U))
3675 sctx->idx = create_tmp_var (unsigned_type_node);
3676 sctx->lane = create_tmp_var (unsigned_type_node);
3679 if (known_eq (sctx->max_vf, 1U))
3680 return false;
3682 if (sctx->is_simt)
3684 if (is_gimple_reg (new_var))
3686 ivar = lvar = new_var;
3687 return true;
3689 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3690 ivar = lvar = create_tmp_var (type);
3691 TREE_ADDRESSABLE (ivar) = 1;
3692 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3693 NULL, DECL_ATTRIBUTES (ivar));
3694 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3695 tree clobber = build_constructor (type, NULL);
3696 TREE_THIS_VOLATILE (clobber) = 1;
3697 gimple *g = gimple_build_assign (ivar, clobber);
3698 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3700 else
3702 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3703 tree avar = create_tmp_var_raw (atype);
3704 if (TREE_ADDRESSABLE (new_var))
3705 TREE_ADDRESSABLE (avar) = 1;
3706 DECL_ATTRIBUTES (avar)
3707 = tree_cons (get_identifier ("omp simd array"), NULL,
3708 DECL_ATTRIBUTES (avar));
3709 gimple_add_tmp_var (avar);
3710 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3711 NULL_TREE, NULL_TREE);
3712 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3713 NULL_TREE, NULL_TREE);
3715 if (DECL_P (new_var))
3717 SET_DECL_VALUE_EXPR (new_var, lvar);
3718 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3720 return true;
3723 /* Helper function of lower_rec_input_clauses. For a reference
3724 in simd reduction, add an underlying variable it will reference. */
3726 static void
3727 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3729 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3730 if (TREE_CONSTANT (z))
3732 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3733 get_name (new_vard));
3734 gimple_add_tmp_var (z);
3735 TREE_ADDRESSABLE (z) = 1;
3736 z = build_fold_addr_expr_loc (loc, z);
3737 gimplify_assign (new_vard, z, ilist);
3741 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
3742 code to emit (type) (tskred_temp[idx]). */
3744 static tree
3745 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
3746 unsigned idx)
3748 unsigned HOST_WIDE_INT sz
3749 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
3750 tree r = build2 (MEM_REF, pointer_sized_int_node,
3751 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
3752 idx * sz));
3753 tree v = create_tmp_var (pointer_sized_int_node);
3754 gimple *g = gimple_build_assign (v, r);
3755 gimple_seq_add_stmt (ilist, g);
3756 if (!useless_type_conversion_p (type, pointer_sized_int_node))
3758 v = create_tmp_var (type);
3759 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
3760 gimple_seq_add_stmt (ilist, g);
3762 return v;
3765 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3766 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3767 private variables. Initialization statements go in ILIST, while calls
3768 to destructors go in DLIST. */
3770 static void
3771 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3772 omp_context *ctx, struct omp_for_data *fd)
3774 tree c, dtor, copyin_seq, x, ptr;
3775 bool copyin_by_ref = false;
3776 bool lastprivate_firstprivate = false;
3777 bool reduction_omp_orig_ref = false;
3778 int pass;
3779 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3780 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3781 omplow_simd_context sctx = omplow_simd_context ();
3782 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3783 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3784 gimple_seq llist[3] = { };
3786 copyin_seq = NULL;
3787 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3789 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3790 with data sharing clauses referencing variable sized vars. That
3791 is unnecessarily hard to support and very unlikely to result in
3792 vectorized code anyway. */
3793 if (is_simd)
3794 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3795 switch (OMP_CLAUSE_CODE (c))
3797 case OMP_CLAUSE_LINEAR:
3798 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3799 sctx.max_vf = 1;
3800 /* FALLTHRU */
3801 case OMP_CLAUSE_PRIVATE:
3802 case OMP_CLAUSE_FIRSTPRIVATE:
3803 case OMP_CLAUSE_LASTPRIVATE:
3804 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3805 sctx.max_vf = 1;
3806 break;
3807 case OMP_CLAUSE_REDUCTION:
3808 case OMP_CLAUSE_IN_REDUCTION:
3809 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3810 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3811 sctx.max_vf = 1;
3812 break;
3813 default:
3814 continue;
3817 /* Add a placeholder for simduid. */
3818 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
3819 sctx.simt_eargs.safe_push (NULL_TREE);
3821 unsigned task_reduction_cnt = 0;
3822 unsigned task_reduction_cntorig = 0;
3823 unsigned task_reduction_cnt_full = 0;
3824 unsigned task_reduction_cntorig_full = 0;
3825 unsigned task_reduction_other_cnt = 0;
3826 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
3827 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
3828 /* Do all the fixed sized types in the first pass, and the variable sized
3829 types in the second pass. This makes sure that the scalar arguments to
3830 the variable sized types are processed before we use them in the
3831 variable sized operations. For task reductions we use 4 passes, in the
3832 first two we ignore them, in the third one gather arguments for
3833 GOMP_task_reduction_remap call and in the last pass actually handle
3834 the task reductions. */
3835 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
3836 ? 4 : 2); ++pass)
3838 if (pass == 2 && task_reduction_cnt)
3840 tskred_atype
3841 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
3842 + task_reduction_cntorig);
3843 tskred_avar = create_tmp_var_raw (tskred_atype);
3844 gimple_add_tmp_var (tskred_avar);
3845 TREE_ADDRESSABLE (tskred_avar) = 1;
3846 task_reduction_cnt_full = task_reduction_cnt;
3847 task_reduction_cntorig_full = task_reduction_cntorig;
3849 else if (pass == 3 && task_reduction_cnt)
3851 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
3852 gimple *g
3853 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
3854 size_int (task_reduction_cntorig),
3855 build_fold_addr_expr (tskred_avar));
3856 gimple_seq_add_stmt (ilist, g);
3858 if (pass == 3 && task_reduction_other_cnt)
3860 /* For reduction clauses, build
3861 tskred_base = (void *) tskred_temp[2]
3862 + omp_get_thread_num () * tskred_temp[1]
3863 or if tskred_temp[1] is known to be constant, that constant
3864 directly. This is the start of the private reduction copy block
3865 for the current thread. */
3866 tree v = create_tmp_var (integer_type_node);
3867 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
3868 gimple *g = gimple_build_call (x, 0);
3869 gimple_call_set_lhs (g, v);
3870 gimple_seq_add_stmt (ilist, g);
3871 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
3872 tskred_temp = OMP_CLAUSE_DECL (c);
3873 if (is_taskreg_ctx (ctx))
3874 tskred_temp = lookup_decl (tskred_temp, ctx);
3875 tree v2 = create_tmp_var (sizetype);
3876 g = gimple_build_assign (v2, NOP_EXPR, v);
3877 gimple_seq_add_stmt (ilist, g);
3878 if (ctx->task_reductions[0])
3879 v = fold_convert (sizetype, ctx->task_reductions[0]);
3880 else
3881 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
3882 tree v3 = create_tmp_var (sizetype);
3883 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
3884 gimple_seq_add_stmt (ilist, g);
3885 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
3886 tskred_base = create_tmp_var (ptr_type_node);
3887 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
3888 gimple_seq_add_stmt (ilist, g);
3890 task_reduction_cnt = 0;
3891 task_reduction_cntorig = 0;
3892 task_reduction_other_cnt = 0;
3893 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3895 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3896 tree var, new_var;
3897 bool by_ref;
3898 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3899 bool task_reduction_p = false;
3900 bool task_reduction_needs_orig_p = false;
3901 tree cond = NULL_TREE;
3903 switch (c_kind)
3905 case OMP_CLAUSE_PRIVATE:
3906 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3907 continue;
3908 break;
3909 case OMP_CLAUSE_SHARED:
3910 /* Ignore shared directives in teams construct inside
3911 of target construct. */
3912 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
3913 && !is_host_teams_ctx (ctx))
3914 continue;
3915 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3917 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3918 || is_global_var (OMP_CLAUSE_DECL (c)));
3919 continue;
3921 case OMP_CLAUSE_FIRSTPRIVATE:
3922 case OMP_CLAUSE_COPYIN:
3923 break;
3924 case OMP_CLAUSE_LINEAR:
3925 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3926 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3927 lastprivate_firstprivate = true;
3928 break;
3929 case OMP_CLAUSE_REDUCTION:
3930 case OMP_CLAUSE_IN_REDUCTION:
3931 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
3933 task_reduction_p = true;
3934 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
3936 task_reduction_other_cnt++;
3937 if (pass == 2)
3938 continue;
3940 else
3941 task_reduction_cnt++;
3942 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3944 var = OMP_CLAUSE_DECL (c);
3945 /* If var is a global variable that isn't privatized
3946 in outer contexts, we don't need to look up the
3947 original address, it is always the address of the
3948 global variable itself. */
3949 if (!DECL_P (var)
3950 || omp_is_reference (var)
3951 || !is_global_var
3952 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
3954 task_reduction_needs_orig_p = true;
3955 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
3956 task_reduction_cntorig++;
3960 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3961 reduction_omp_orig_ref = true;
3962 break;
3963 case OMP_CLAUSE__REDUCTEMP_:
3964 if (!is_taskreg_ctx (ctx))
3965 continue;
3966 /* FALLTHRU */
3967 case OMP_CLAUSE__LOOPTEMP_:
3968 /* Handle _looptemp_/_reductemp_ clauses only on
3969 parallel/task. */
3970 if (fd)
3971 continue;
3972 break;
3973 case OMP_CLAUSE_LASTPRIVATE:
3974 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3976 lastprivate_firstprivate = true;
3977 if (pass != 0 || is_taskloop_ctx (ctx))
3978 continue;
3980 /* Even without corresponding firstprivate, if
3981 decl is Fortran allocatable, it needs outer var
3982 reference. */
3983 else if (pass == 0
3984 && lang_hooks.decls.omp_private_outer_ref
3985 (OMP_CLAUSE_DECL (c)))
3986 lastprivate_firstprivate = true;
3987 break;
3988 case OMP_CLAUSE_ALIGNED:
3989 if (pass != 1)
3990 continue;
3991 var = OMP_CLAUSE_DECL (c);
3992 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3993 && !is_global_var (var))
3995 new_var = maybe_lookup_decl (var, ctx);
3996 if (new_var == NULL_TREE)
3997 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3998 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3999 tree alarg = omp_clause_aligned_alignment (c);
4000 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4001 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4002 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4003 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4004 gimplify_and_add (x, ilist);
4006 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4007 && is_global_var (var))
4009 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4010 new_var = lookup_decl (var, ctx);
4011 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4012 t = build_fold_addr_expr_loc (clause_loc, t);
4013 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4014 tree alarg = omp_clause_aligned_alignment (c);
4015 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4016 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4017 t = fold_convert_loc (clause_loc, ptype, t);
4018 x = create_tmp_var (ptype);
4019 t = build2 (MODIFY_EXPR, ptype, x, t);
4020 gimplify_and_add (t, ilist);
4021 t = build_simple_mem_ref_loc (clause_loc, x);
4022 SET_DECL_VALUE_EXPR (new_var, t);
4023 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4025 continue;
4026 default:
4027 continue;
4030 if (task_reduction_p != (pass >= 2))
4031 continue;
4033 new_var = var = OMP_CLAUSE_DECL (c);
4034 if ((c_kind == OMP_CLAUSE_REDUCTION
4035 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4036 && TREE_CODE (var) == MEM_REF)
4038 var = TREE_OPERAND (var, 0);
4039 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4040 var = TREE_OPERAND (var, 0);
4041 if (TREE_CODE (var) == INDIRECT_REF
4042 || TREE_CODE (var) == ADDR_EXPR)
4043 var = TREE_OPERAND (var, 0);
4044 if (is_variable_sized (var))
4046 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4047 var = DECL_VALUE_EXPR (var);
4048 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4049 var = TREE_OPERAND (var, 0);
4050 gcc_assert (DECL_P (var));
4052 new_var = var;
4054 if (c_kind != OMP_CLAUSE_COPYIN)
4055 new_var = lookup_decl (var, ctx);
4057 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4059 if (pass != 0)
4060 continue;
4062 /* C/C++ array section reductions. */
4063 else if ((c_kind == OMP_CLAUSE_REDUCTION
4064 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4065 && var != OMP_CLAUSE_DECL (c))
4067 if (pass == 0)
4068 continue;
4070 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4071 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4073 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4075 tree b = TREE_OPERAND (orig_var, 1);
4076 b = maybe_lookup_decl (b, ctx);
4077 if (b == NULL)
4079 b = TREE_OPERAND (orig_var, 1);
4080 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4082 if (integer_zerop (bias))
4083 bias = b;
4084 else
4086 bias = fold_convert_loc (clause_loc,
4087 TREE_TYPE (b), bias);
4088 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4089 TREE_TYPE (b), b, bias);
4091 orig_var = TREE_OPERAND (orig_var, 0);
4093 if (pass == 2)
4095 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4096 if (is_global_var (out)
4097 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4098 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4099 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4100 != POINTER_TYPE)))
4101 x = var;
4102 else
4104 bool by_ref = use_pointer_for_field (var, NULL);
4105 x = build_receiver_ref (var, by_ref, ctx);
4106 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4107 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4108 == POINTER_TYPE))
4109 x = build_fold_addr_expr (x);
4111 if (TREE_CODE (orig_var) == INDIRECT_REF)
4112 x = build_simple_mem_ref (x);
4113 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4115 if (var == TREE_OPERAND (orig_var, 0))
4116 x = build_fold_addr_expr (x);
4118 bias = fold_convert (sizetype, bias);
4119 x = fold_convert (ptr_type_node, x);
4120 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4121 TREE_TYPE (x), x, bias);
4122 unsigned cnt = task_reduction_cnt - 1;
4123 if (!task_reduction_needs_orig_p)
4124 cnt += (task_reduction_cntorig_full
4125 - task_reduction_cntorig);
4126 else
4127 cnt = task_reduction_cntorig - 1;
4128 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4129 size_int (cnt), NULL_TREE, NULL_TREE);
4130 gimplify_assign (r, x, ilist);
4131 continue;
4134 if (TREE_CODE (orig_var) == INDIRECT_REF
4135 || TREE_CODE (orig_var) == ADDR_EXPR)
4136 orig_var = TREE_OPERAND (orig_var, 0);
4137 tree d = OMP_CLAUSE_DECL (c);
4138 tree type = TREE_TYPE (d);
4139 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4140 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4141 const char *name = get_name (orig_var);
4142 if (pass == 3)
4144 tree xv = create_tmp_var (ptr_type_node);
4145 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4147 unsigned cnt = task_reduction_cnt - 1;
4148 if (!task_reduction_needs_orig_p)
4149 cnt += (task_reduction_cntorig_full
4150 - task_reduction_cntorig);
4151 else
4152 cnt = task_reduction_cntorig - 1;
4153 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4154 size_int (cnt), NULL_TREE, NULL_TREE);
4156 gimple *g = gimple_build_assign (xv, x);
4157 gimple_seq_add_stmt (ilist, g);
4159 else
4161 unsigned int idx = *ctx->task_reduction_map->get (c);
4162 tree off;
4163 if (ctx->task_reductions[1 + idx])
4164 off = fold_convert (sizetype,
4165 ctx->task_reductions[1 + idx]);
4166 else
4167 off = task_reduction_read (ilist, tskred_temp, sizetype,
4168 7 + 3 * idx + 1);
4169 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4170 tskred_base, off);
4171 gimple_seq_add_stmt (ilist, g);
4173 x = fold_convert (build_pointer_type (boolean_type_node),
4174 xv);
4175 if (TREE_CONSTANT (v))
4176 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4177 TYPE_SIZE_UNIT (type));
4178 else
4180 tree t = maybe_lookup_decl (v, ctx);
4181 if (t)
4182 v = t;
4183 else
4184 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4185 gimplify_expr (&v, ilist, NULL, is_gimple_val,
4186 fb_rvalue);
4187 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4188 TREE_TYPE (v), v,
4189 build_int_cst (TREE_TYPE (v), 1));
4190 t = fold_build2_loc (clause_loc, MULT_EXPR,
4191 TREE_TYPE (v), t,
4192 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4193 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4195 cond = create_tmp_var (TREE_TYPE (x));
4196 gimplify_assign (cond, x, ilist);
4197 x = xv;
4199 else if (TREE_CONSTANT (v))
4201 x = create_tmp_var_raw (type, name);
4202 gimple_add_tmp_var (x);
4203 TREE_ADDRESSABLE (x) = 1;
4204 x = build_fold_addr_expr_loc (clause_loc, x);
4206 else
4208 tree atmp
4209 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4210 tree t = maybe_lookup_decl (v, ctx);
4211 if (t)
4212 v = t;
4213 else
4214 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4215 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4216 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4217 TREE_TYPE (v), v,
4218 build_int_cst (TREE_TYPE (v), 1));
4219 t = fold_build2_loc (clause_loc, MULT_EXPR,
4220 TREE_TYPE (v), t,
4221 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4222 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4223 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4226 tree ptype = build_pointer_type (TREE_TYPE (type));
4227 x = fold_convert_loc (clause_loc, ptype, x);
4228 tree y = create_tmp_var (ptype, name);
4229 gimplify_assign (y, x, ilist);
4230 x = y;
4231 tree yb = y;
4233 if (!integer_zerop (bias))
4235 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4236 bias);
4237 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4239 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4240 pointer_sized_int_node, yb, bias);
4241 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
4242 yb = create_tmp_var (ptype, name);
4243 gimplify_assign (yb, x, ilist);
4244 x = yb;
4247 d = TREE_OPERAND (d, 0);
4248 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4249 d = TREE_OPERAND (d, 0);
4250 if (TREE_CODE (d) == ADDR_EXPR)
4252 if (orig_var != var)
4254 gcc_assert (is_variable_sized (orig_var));
4255 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4257 gimplify_assign (new_var, x, ilist);
4258 tree new_orig_var = lookup_decl (orig_var, ctx);
4259 tree t = build_fold_indirect_ref (new_var);
4260 DECL_IGNORED_P (new_var) = 0;
4261 TREE_THIS_NOTRAP (t) = 1;
4262 SET_DECL_VALUE_EXPR (new_orig_var, t);
4263 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4265 else
4267 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4268 build_int_cst (ptype, 0));
4269 SET_DECL_VALUE_EXPR (new_var, x);
4270 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4273 else
4275 gcc_assert (orig_var == var);
4276 if (TREE_CODE (d) == INDIRECT_REF)
4278 x = create_tmp_var (ptype, name);
4279 TREE_ADDRESSABLE (x) = 1;
4280 gimplify_assign (x, yb, ilist);
4281 x = build_fold_addr_expr_loc (clause_loc, x);
4283 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4284 gimplify_assign (new_var, x, ilist);
4286 /* GOMP_taskgroup_reduction_register memsets the whole
4287 array to zero. If the initializer is zero, we don't
4288 need to initialize it again, just mark it as ever
4289 used unconditionally, i.e. cond = true. */
4290 if (cond
4291 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4292 && initializer_zerop (omp_reduction_init (c,
4293 TREE_TYPE (type))))
4295 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4296 boolean_true_node);
4297 gimple_seq_add_stmt (ilist, g);
4298 continue;
4300 tree end = create_artificial_label (UNKNOWN_LOCATION);
4301 if (cond)
4303 gimple *g;
4304 if (!is_parallel_ctx (ctx))
4306 tree condv = create_tmp_var (boolean_type_node);
4307 g = gimple_build_assign (condv,
4308 build_simple_mem_ref (cond));
4309 gimple_seq_add_stmt (ilist, g);
4310 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4311 g = gimple_build_cond (NE_EXPR, condv,
4312 boolean_false_node, end, lab1);
4313 gimple_seq_add_stmt (ilist, g);
4314 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4316 g = gimple_build_assign (build_simple_mem_ref (cond),
4317 boolean_true_node);
4318 gimple_seq_add_stmt (ilist, g);
4321 tree y1 = create_tmp_var (ptype);
4322 gimplify_assign (y1, y, ilist);
4323 tree i2 = NULL_TREE, y2 = NULL_TREE;
4324 tree body2 = NULL_TREE, end2 = NULL_TREE;
4325 tree y3 = NULL_TREE, y4 = NULL_TREE;
4326 if (task_reduction_needs_orig_p)
4328 y3 = create_tmp_var (ptype);
4329 tree ref;
4330 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4331 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4332 size_int (task_reduction_cnt_full
4333 + task_reduction_cntorig - 1),
4334 NULL_TREE, NULL_TREE);
4335 else
4337 unsigned int idx = *ctx->task_reduction_map->get (c);
4338 ref = task_reduction_read (ilist, tskred_temp, ptype,
4339 7 + 3 * idx);
4341 gimplify_assign (y3, ref, ilist);
4343 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4345 if (pass != 3)
4347 y2 = create_tmp_var (ptype);
4348 gimplify_assign (y2, y, ilist);
4350 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4352 tree ref = build_outer_var_ref (var, ctx);
4353 /* For ref build_outer_var_ref already performs this. */
4354 if (TREE_CODE (d) == INDIRECT_REF)
4355 gcc_assert (omp_is_reference (var));
4356 else if (TREE_CODE (d) == ADDR_EXPR)
4357 ref = build_fold_addr_expr (ref);
4358 else if (omp_is_reference (var))
4359 ref = build_fold_addr_expr (ref);
4360 ref = fold_convert_loc (clause_loc, ptype, ref);
4361 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4362 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4364 y3 = create_tmp_var (ptype);
4365 gimplify_assign (y3, unshare_expr (ref), ilist);
4367 if (is_simd)
4369 y4 = create_tmp_var (ptype);
4370 gimplify_assign (y4, ref, dlist);
4374 tree i = create_tmp_var (TREE_TYPE (v));
4375 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
4376 tree body = create_artificial_label (UNKNOWN_LOCATION);
4377 gimple_seq_add_stmt (ilist, gimple_build_label (body));
4378 if (y2)
4380 i2 = create_tmp_var (TREE_TYPE (v));
4381 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
4382 body2 = create_artificial_label (UNKNOWN_LOCATION);
4383 end2 = create_artificial_label (UNKNOWN_LOCATION);
4384 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
4386 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4388 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4389 tree decl_placeholder
4390 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
4391 SET_DECL_VALUE_EXPR (decl_placeholder,
4392 build_simple_mem_ref (y1));
4393 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4394 SET_DECL_VALUE_EXPR (placeholder,
4395 y3 ? build_simple_mem_ref (y3)
4396 : error_mark_node);
4397 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4398 x = lang_hooks.decls.omp_clause_default_ctor
4399 (c, build_simple_mem_ref (y1),
4400 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4401 if (x)
4402 gimplify_and_add (x, ilist);
4403 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4405 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4406 lower_omp (&tseq, ctx);
4407 gimple_seq_add_seq (ilist, tseq);
4409 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4410 if (is_simd)
4412 SET_DECL_VALUE_EXPR (decl_placeholder,
4413 build_simple_mem_ref (y2));
4414 SET_DECL_VALUE_EXPR (placeholder,
4415 build_simple_mem_ref (y4));
4416 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4417 lower_omp (&tseq, ctx);
4418 gimple_seq_add_seq (dlist, tseq);
4419 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4421 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4422 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
4423 if (y2)
4425 x = lang_hooks.decls.omp_clause_dtor
4426 (c, build_simple_mem_ref (y2));
4427 if (x)
4429 gimple_seq tseq = NULL;
4430 dtor = x;
4431 gimplify_stmt (&dtor, &tseq);
4432 gimple_seq_add_seq (dlist, tseq);
4436 else
4438 x = omp_reduction_init (c, TREE_TYPE (type));
4439 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4441 /* reduction(-:var) sums up the partial results, so it
4442 acts identically to reduction(+:var). */
4443 if (code == MINUS_EXPR)
4444 code = PLUS_EXPR;
4446 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4447 if (is_simd)
4449 x = build2 (code, TREE_TYPE (type),
4450 build_simple_mem_ref (y4),
4451 build_simple_mem_ref (y2));
4452 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4455 gimple *g
4456 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4457 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4458 gimple_seq_add_stmt (ilist, g);
4459 if (y3)
4461 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4462 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4463 gimple_seq_add_stmt (ilist, g);
4465 g = gimple_build_assign (i, PLUS_EXPR, i,
4466 build_int_cst (TREE_TYPE (i), 1));
4467 gimple_seq_add_stmt (ilist, g);
4468 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4469 gimple_seq_add_stmt (ilist, g);
4470 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4471 if (y2)
4473 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4474 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4475 gimple_seq_add_stmt (dlist, g);
4476 if (y4)
4478 g = gimple_build_assign
4479 (y4, POINTER_PLUS_EXPR, y4,
4480 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4481 gimple_seq_add_stmt (dlist, g);
4483 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4484 build_int_cst (TREE_TYPE (i2), 1));
4485 gimple_seq_add_stmt (dlist, g);
4486 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4487 gimple_seq_add_stmt (dlist, g);
4488 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4490 continue;
4492 else if (pass == 2)
4494 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4495 x = var;
4496 else
4498 bool by_ref = use_pointer_for_field (var, ctx);
4499 x = build_receiver_ref (var, by_ref, ctx);
4501 if (!omp_is_reference (var))
4502 x = build_fold_addr_expr (x);
4503 x = fold_convert (ptr_type_node, x);
4504 unsigned cnt = task_reduction_cnt - 1;
4505 if (!task_reduction_needs_orig_p)
4506 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
4507 else
4508 cnt = task_reduction_cntorig - 1;
4509 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4510 size_int (cnt), NULL_TREE, NULL_TREE);
4511 gimplify_assign (r, x, ilist);
4512 continue;
4514 else if (pass == 3)
4516 tree type = TREE_TYPE (new_var);
4517 if (!omp_is_reference (var))
4518 type = build_pointer_type (type);
4519 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4521 unsigned cnt = task_reduction_cnt - 1;
4522 if (!task_reduction_needs_orig_p)
4523 cnt += (task_reduction_cntorig_full
4524 - task_reduction_cntorig);
4525 else
4526 cnt = task_reduction_cntorig - 1;
4527 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4528 size_int (cnt), NULL_TREE, NULL_TREE);
4530 else
4532 unsigned int idx = *ctx->task_reduction_map->get (c);
4533 tree off;
4534 if (ctx->task_reductions[1 + idx])
4535 off = fold_convert (sizetype,
4536 ctx->task_reductions[1 + idx]);
4537 else
4538 off = task_reduction_read (ilist, tskred_temp, sizetype,
4539 7 + 3 * idx + 1);
4540 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
4541 tskred_base, off);
4543 x = fold_convert (type, x);
4544 tree t;
4545 if (omp_is_reference (var))
4547 gimplify_assign (new_var, x, ilist);
4548 t = new_var;
4549 new_var = build_simple_mem_ref (new_var);
4551 else
4553 t = create_tmp_var (type);
4554 gimplify_assign (t, x, ilist);
4555 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
4556 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4558 t = fold_convert (build_pointer_type (boolean_type_node), t);
4559 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
4560 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4561 cond = create_tmp_var (TREE_TYPE (t));
4562 gimplify_assign (cond, t, ilist);
4564 else if (is_variable_sized (var))
4566 /* For variable sized types, we need to allocate the
4567 actual storage here. Call alloca and store the
4568 result in the pointer decl that we created elsewhere. */
4569 if (pass == 0)
4570 continue;
4572 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4574 gcall *stmt;
4575 tree tmp, atmp;
4577 ptr = DECL_VALUE_EXPR (new_var);
4578 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4579 ptr = TREE_OPERAND (ptr, 0);
4580 gcc_assert (DECL_P (ptr));
4581 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4583 /* void *tmp = __builtin_alloca */
4584 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4585 stmt = gimple_build_call (atmp, 2, x,
4586 size_int (DECL_ALIGN (var)));
4587 tmp = create_tmp_var_raw (ptr_type_node);
4588 gimple_add_tmp_var (tmp);
4589 gimple_call_set_lhs (stmt, tmp);
4591 gimple_seq_add_stmt (ilist, stmt);
4593 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4594 gimplify_assign (ptr, x, ilist);
4597 else if (omp_is_reference (var)
4598 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
4599 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
4601 /* For references that are being privatized for Fortran,
4602 allocate new backing storage for the new pointer
4603 variable. This allows us to avoid changing all the
4604 code that expects a pointer to something that expects
4605 a direct variable. */
4606 if (pass == 0)
4607 continue;
4609 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4610 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4612 x = build_receiver_ref (var, false, ctx);
4613 x = build_fold_addr_expr_loc (clause_loc, x);
4615 else if (TREE_CONSTANT (x))
4617 /* For reduction in SIMD loop, defer adding the
4618 initialization of the reference, because if we decide
4619 to use SIMD array for it, the initilization could cause
4620 expansion ICE. */
4621 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4622 x = NULL_TREE;
4623 else
4625 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4626 get_name (var));
4627 gimple_add_tmp_var (x);
4628 TREE_ADDRESSABLE (x) = 1;
4629 x = build_fold_addr_expr_loc (clause_loc, x);
4632 else
4634 tree atmp
4635 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4636 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4637 tree al = size_int (TYPE_ALIGN (rtype));
4638 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4641 if (x)
4643 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4644 gimplify_assign (new_var, x, ilist);
4647 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4649 else if ((c_kind == OMP_CLAUSE_REDUCTION
4650 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4651 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4653 if (pass == 0)
4654 continue;
4656 else if (pass != 0)
4657 continue;
4659 switch (OMP_CLAUSE_CODE (c))
4661 case OMP_CLAUSE_SHARED:
4662 /* Ignore shared directives in teams construct inside
4663 target construct. */
4664 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4665 && !is_host_teams_ctx (ctx))
4666 continue;
4667 /* Shared global vars are just accessed directly. */
4668 if (is_global_var (new_var))
4669 break;
4670 /* For taskloop firstprivate/lastprivate, represented
4671 as firstprivate and shared clause on the task, new_var
4672 is the firstprivate var. */
4673 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4674 break;
4675 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4676 needs to be delayed until after fixup_child_record_type so
4677 that we get the correct type during the dereference. */
4678 by_ref = use_pointer_for_field (var, ctx);
4679 x = build_receiver_ref (var, by_ref, ctx);
4680 SET_DECL_VALUE_EXPR (new_var, x);
4681 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4683 /* ??? If VAR is not passed by reference, and the variable
4684 hasn't been initialized yet, then we'll get a warning for
4685 the store into the omp_data_s structure. Ideally, we'd be
4686 able to notice this and not store anything at all, but
4687 we're generating code too early. Suppress the warning. */
4688 if (!by_ref)
4689 TREE_NO_WARNING (var) = 1;
4690 break;
4692 case OMP_CLAUSE_LASTPRIVATE:
4693 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4694 break;
4695 /* FALLTHRU */
4697 case OMP_CLAUSE_PRIVATE:
4698 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4699 x = build_outer_var_ref (var, ctx);
4700 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4702 if (is_task_ctx (ctx))
4703 x = build_receiver_ref (var, false, ctx);
4704 else
4705 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4707 else
4708 x = NULL;
4709 do_private:
4710 tree nx;
4711 nx = lang_hooks.decls.omp_clause_default_ctor
4712 (c, unshare_expr (new_var), x);
4713 if (is_simd)
4715 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4716 if ((TREE_ADDRESSABLE (new_var) || nx || y
4717 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4718 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4719 ivar, lvar))
4721 if (nx)
4722 x = lang_hooks.decls.omp_clause_default_ctor
4723 (c, unshare_expr (ivar), x);
4724 if (nx && x)
4725 gimplify_and_add (x, &llist[0]);
4726 if (y)
4728 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4729 if (y)
4731 gimple_seq tseq = NULL;
4733 dtor = y;
4734 gimplify_stmt (&dtor, &tseq);
4735 gimple_seq_add_seq (&llist[1], tseq);
4738 break;
4741 if (nx)
4742 gimplify_and_add (nx, ilist);
4743 /* FALLTHRU */
4745 do_dtor:
4746 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4747 if (x)
4749 gimple_seq tseq = NULL;
4751 dtor = x;
4752 gimplify_stmt (&dtor, &tseq);
4753 gimple_seq_add_seq (dlist, tseq);
4755 break;
4757 case OMP_CLAUSE_LINEAR:
4758 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4759 goto do_firstprivate;
4760 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4761 x = NULL;
4762 else
4763 x = build_outer_var_ref (var, ctx);
4764 goto do_private;
4766 case OMP_CLAUSE_FIRSTPRIVATE:
4767 if (is_task_ctx (ctx))
4769 if ((omp_is_reference (var)
4770 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
4771 || is_variable_sized (var))
4772 goto do_dtor;
4773 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4774 ctx))
4775 || use_pointer_for_field (var, NULL))
4777 x = build_receiver_ref (var, false, ctx);
4778 SET_DECL_VALUE_EXPR (new_var, x);
4779 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4780 goto do_dtor;
4783 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
4784 && omp_is_reference (var))
4786 x = build_outer_var_ref (var, ctx);
4787 gcc_assert (TREE_CODE (x) == MEM_REF
4788 && integer_zerop (TREE_OPERAND (x, 1)));
4789 x = TREE_OPERAND (x, 0);
4790 x = lang_hooks.decls.omp_clause_copy_ctor
4791 (c, unshare_expr (new_var), x);
4792 gimplify_and_add (x, ilist);
4793 goto do_dtor;
4795 do_firstprivate:
4796 x = build_outer_var_ref (var, ctx);
4797 if (is_simd)
4799 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4800 && gimple_omp_for_combined_into_p (ctx->stmt))
4802 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4803 tree stept = TREE_TYPE (t);
4804 tree ct = omp_find_clause (clauses,
4805 OMP_CLAUSE__LOOPTEMP_);
4806 gcc_assert (ct);
4807 tree l = OMP_CLAUSE_DECL (ct);
4808 tree n1 = fd->loop.n1;
4809 tree step = fd->loop.step;
4810 tree itype = TREE_TYPE (l);
4811 if (POINTER_TYPE_P (itype))
4812 itype = signed_type_for (itype);
4813 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4814 if (TYPE_UNSIGNED (itype)
4815 && fd->loop.cond_code == GT_EXPR)
4816 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4817 fold_build1 (NEGATE_EXPR, itype, l),
4818 fold_build1 (NEGATE_EXPR,
4819 itype, step));
4820 else
4821 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4822 t = fold_build2 (MULT_EXPR, stept,
4823 fold_convert (stept, l), t);
4825 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4827 x = lang_hooks.decls.omp_clause_linear_ctor
4828 (c, new_var, x, t);
4829 gimplify_and_add (x, ilist);
4830 goto do_dtor;
4833 if (POINTER_TYPE_P (TREE_TYPE (x)))
4834 x = fold_build2 (POINTER_PLUS_EXPR,
4835 TREE_TYPE (x), x, t);
4836 else
4837 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4840 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4841 || TREE_ADDRESSABLE (new_var))
4842 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4843 ivar, lvar))
4845 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4847 tree iv = create_tmp_var (TREE_TYPE (new_var));
4848 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4849 gimplify_and_add (x, ilist);
4850 gimple_stmt_iterator gsi
4851 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4852 gassign *g
4853 = gimple_build_assign (unshare_expr (lvar), iv);
4854 gsi_insert_before_without_update (&gsi, g,
4855 GSI_SAME_STMT);
4856 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4857 enum tree_code code = PLUS_EXPR;
4858 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4859 code = POINTER_PLUS_EXPR;
4860 g = gimple_build_assign (iv, code, iv, t);
4861 gsi_insert_before_without_update (&gsi, g,
4862 GSI_SAME_STMT);
4863 break;
4865 x = lang_hooks.decls.omp_clause_copy_ctor
4866 (c, unshare_expr (ivar), x);
4867 gimplify_and_add (x, &llist[0]);
4868 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4869 if (x)
4871 gimple_seq tseq = NULL;
4873 dtor = x;
4874 gimplify_stmt (&dtor, &tseq);
4875 gimple_seq_add_seq (&llist[1], tseq);
4877 break;
4880 x = lang_hooks.decls.omp_clause_copy_ctor
4881 (c, unshare_expr (new_var), x);
4882 gimplify_and_add (x, ilist);
4883 goto do_dtor;
4885 case OMP_CLAUSE__LOOPTEMP_:
4886 case OMP_CLAUSE__REDUCTEMP_:
4887 gcc_assert (is_taskreg_ctx (ctx));
4888 x = build_outer_var_ref (var, ctx);
4889 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4890 gimplify_and_add (x, ilist);
4891 break;
4893 case OMP_CLAUSE_COPYIN:
4894 by_ref = use_pointer_for_field (var, NULL);
4895 x = build_receiver_ref (var, by_ref, ctx);
4896 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4897 append_to_statement_list (x, &copyin_seq);
4898 copyin_by_ref |= by_ref;
4899 break;
4901 case OMP_CLAUSE_REDUCTION:
4902 case OMP_CLAUSE_IN_REDUCTION:
4903 /* OpenACC reductions are initialized using the
4904 GOACC_REDUCTION internal function. */
4905 if (is_gimple_omp_oacc (ctx->stmt))
4906 break;
4907 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4909 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4910 gimple *tseq;
4911 tree ptype = TREE_TYPE (placeholder);
4912 if (cond)
4914 x = error_mark_node;
4915 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
4916 && !task_reduction_needs_orig_p)
4917 x = var;
4918 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4920 tree pptype = build_pointer_type (ptype);
4921 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4922 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4923 size_int (task_reduction_cnt_full
4924 + task_reduction_cntorig - 1),
4925 NULL_TREE, NULL_TREE);
4926 else
4928 unsigned int idx
4929 = *ctx->task_reduction_map->get (c);
4930 x = task_reduction_read (ilist, tskred_temp,
4931 pptype, 7 + 3 * idx);
4933 x = fold_convert (pptype, x);
4934 x = build_simple_mem_ref (x);
4937 else
4939 x = build_outer_var_ref (var, ctx);
4941 if (omp_is_reference (var)
4942 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
4943 x = build_fold_addr_expr_loc (clause_loc, x);
4945 SET_DECL_VALUE_EXPR (placeholder, x);
4946 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4947 tree new_vard = new_var;
4948 if (omp_is_reference (var))
4950 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4951 new_vard = TREE_OPERAND (new_var, 0);
4952 gcc_assert (DECL_P (new_vard));
4954 if (is_simd
4955 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4956 ivar, lvar))
4958 if (new_vard == new_var)
4960 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4961 SET_DECL_VALUE_EXPR (new_var, ivar);
4963 else
4965 SET_DECL_VALUE_EXPR (new_vard,
4966 build_fold_addr_expr (ivar));
4967 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4969 x = lang_hooks.decls.omp_clause_default_ctor
4970 (c, unshare_expr (ivar),
4971 build_outer_var_ref (var, ctx));
4972 if (x)
4973 gimplify_and_add (x, &llist[0]);
4974 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4976 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4977 lower_omp (&tseq, ctx);
4978 gimple_seq_add_seq (&llist[0], tseq);
4980 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4981 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4982 lower_omp (&tseq, ctx);
4983 gimple_seq_add_seq (&llist[1], tseq);
4984 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4985 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4986 if (new_vard == new_var)
4987 SET_DECL_VALUE_EXPR (new_var, lvar);
4988 else
4989 SET_DECL_VALUE_EXPR (new_vard,
4990 build_fold_addr_expr (lvar));
4991 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4992 if (x)
4994 tseq = NULL;
4995 dtor = x;
4996 gimplify_stmt (&dtor, &tseq);
4997 gimple_seq_add_seq (&llist[1], tseq);
4999 break;
5001 /* If this is a reference to constant size reduction var
5002 with placeholder, we haven't emitted the initializer
5003 for it because it is undesirable if SIMD arrays are used.
5004 But if they aren't used, we need to emit the deferred
5005 initialization now. */
5006 else if (omp_is_reference (var) && is_simd)
5007 handle_simd_reference (clause_loc, new_vard, ilist);
5009 tree lab2 = NULL_TREE;
5010 if (cond)
5012 gimple *g;
5013 if (!is_parallel_ctx (ctx))
5015 tree condv = create_tmp_var (boolean_type_node);
5016 tree m = build_simple_mem_ref (cond);
5017 g = gimple_build_assign (condv, m);
5018 gimple_seq_add_stmt (ilist, g);
5019 tree lab1
5020 = create_artificial_label (UNKNOWN_LOCATION);
5021 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5022 g = gimple_build_cond (NE_EXPR, condv,
5023 boolean_false_node,
5024 lab2, lab1);
5025 gimple_seq_add_stmt (ilist, g);
5026 gimple_seq_add_stmt (ilist,
5027 gimple_build_label (lab1));
5029 g = gimple_build_assign (build_simple_mem_ref (cond),
5030 boolean_true_node);
5031 gimple_seq_add_stmt (ilist, g);
5033 x = lang_hooks.decls.omp_clause_default_ctor
5034 (c, unshare_expr (new_var),
5035 cond ? NULL_TREE
5036 : build_outer_var_ref (var, ctx));
5037 if (x)
5038 gimplify_and_add (x, ilist);
5039 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5041 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5042 lower_omp (&tseq, ctx);
5043 gimple_seq_add_seq (ilist, tseq);
5045 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5046 if (is_simd)
5048 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5049 lower_omp (&tseq, ctx);
5050 gimple_seq_add_seq (dlist, tseq);
5051 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5053 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5054 if (cond)
5056 if (lab2)
5057 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5058 break;
5060 goto do_dtor;
5062 else
5064 x = omp_reduction_init (c, TREE_TYPE (new_var));
5065 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
5066 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5068 if (cond)
5070 gimple *g;
5071 tree lab2 = NULL_TREE;
5072 /* GOMP_taskgroup_reduction_register memsets the whole
5073 array to zero. If the initializer is zero, we don't
5074 need to initialize it again, just mark it as ever
5075 used unconditionally, i.e. cond = true. */
5076 if (initializer_zerop (x))
5078 g = gimple_build_assign (build_simple_mem_ref (cond),
5079 boolean_true_node);
5080 gimple_seq_add_stmt (ilist, g);
5081 break;
5084 /* Otherwise, emit
5085 if (!cond) { cond = true; new_var = x; } */
5086 if (!is_parallel_ctx (ctx))
5088 tree condv = create_tmp_var (boolean_type_node);
5089 tree m = build_simple_mem_ref (cond);
5090 g = gimple_build_assign (condv, m);
5091 gimple_seq_add_stmt (ilist, g);
5092 tree lab1
5093 = create_artificial_label (UNKNOWN_LOCATION);
5094 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5095 g = gimple_build_cond (NE_EXPR, condv,
5096 boolean_false_node,
5097 lab2, lab1);
5098 gimple_seq_add_stmt (ilist, g);
5099 gimple_seq_add_stmt (ilist,
5100 gimple_build_label (lab1));
5102 g = gimple_build_assign (build_simple_mem_ref (cond),
5103 boolean_true_node);
5104 gimple_seq_add_stmt (ilist, g);
5105 gimplify_assign (new_var, x, ilist);
5106 if (lab2)
5107 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5108 break;
5111 /* reduction(-:var) sums up the partial results, so it
5112 acts identically to reduction(+:var). */
5113 if (code == MINUS_EXPR)
5114 code = PLUS_EXPR;
5116 tree new_vard = new_var;
5117 if (is_simd && omp_is_reference (var))
5119 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5120 new_vard = TREE_OPERAND (new_var, 0);
5121 gcc_assert (DECL_P (new_vard));
5123 if (is_simd
5124 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5125 ivar, lvar))
5127 tree ref = build_outer_var_ref (var, ctx);
5129 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
5131 if (sctx.is_simt)
5133 if (!simt_lane)
5134 simt_lane = create_tmp_var (unsigned_type_node);
5135 x = build_call_expr_internal_loc
5136 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
5137 TREE_TYPE (ivar), 2, ivar, simt_lane);
5138 x = build2 (code, TREE_TYPE (ivar), ivar, x);
5139 gimplify_assign (ivar, x, &llist[2]);
5141 x = build2 (code, TREE_TYPE (ref), ref, ivar);
5142 ref = build_outer_var_ref (var, ctx);
5143 gimplify_assign (ref, x, &llist[1]);
5145 if (new_vard != new_var)
5147 SET_DECL_VALUE_EXPR (new_vard,
5148 build_fold_addr_expr (lvar));
5149 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5152 else
5154 if (omp_is_reference (var) && is_simd)
5155 handle_simd_reference (clause_loc, new_vard, ilist);
5156 gimplify_assign (new_var, x, ilist);
5157 if (is_simd)
5159 tree ref = build_outer_var_ref (var, ctx);
5161 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5162 ref = build_outer_var_ref (var, ctx);
5163 gimplify_assign (ref, x, dlist);
5167 break;
5169 default:
5170 gcc_unreachable ();
5174 if (tskred_avar)
5176 tree clobber = build_constructor (TREE_TYPE (tskred_avar), NULL);
5177 TREE_THIS_VOLATILE (clobber) = 1;
5178 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
5181 if (known_eq (sctx.max_vf, 1U))
5182 sctx.is_simt = false;
5184 if (sctx.lane || sctx.is_simt)
5186 uid = create_tmp_var (ptr_type_node, "simduid");
5187 /* Don't want uninit warnings on simduid, it is always uninitialized,
5188 but we use it not for the value, but for the DECL_UID only. */
5189 TREE_NO_WARNING (uid) = 1;
5190 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
5191 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
5192 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5193 gimple_omp_for_set_clauses (ctx->stmt, c);
5195 /* Emit calls denoting privatized variables and initializing a pointer to
5196 structure that holds private variables as fields after ompdevlow pass. */
5197 if (sctx.is_simt)
5199 sctx.simt_eargs[0] = uid;
5200 gimple *g
5201 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
5202 gimple_call_set_lhs (g, uid);
5203 gimple_seq_add_stmt (ilist, g);
5204 sctx.simt_eargs.release ();
5206 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
5207 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
5208 gimple_call_set_lhs (g, simtrec);
5209 gimple_seq_add_stmt (ilist, g);
5211 if (sctx.lane)
5213 gimple *g
5214 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
5215 gimple_call_set_lhs (g, sctx.lane);
5216 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5217 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
5218 g = gimple_build_assign (sctx.lane, INTEGER_CST,
5219 build_int_cst (unsigned_type_node, 0));
5220 gimple_seq_add_stmt (ilist, g);
5221 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
5222 if (llist[2])
5224 tree simt_vf = create_tmp_var (unsigned_type_node);
5225 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
5226 gimple_call_set_lhs (g, simt_vf);
5227 gimple_seq_add_stmt (dlist, g);
5229 tree t = build_int_cst (unsigned_type_node, 1);
5230 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
5231 gimple_seq_add_stmt (dlist, g);
5233 t = build_int_cst (unsigned_type_node, 0);
5234 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
5235 gimple_seq_add_stmt (dlist, g);
5237 tree body = create_artificial_label (UNKNOWN_LOCATION);
5238 tree header = create_artificial_label (UNKNOWN_LOCATION);
5239 tree end = create_artificial_label (UNKNOWN_LOCATION);
5240 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
5241 gimple_seq_add_stmt (dlist, gimple_build_label (body));
5243 gimple_seq_add_seq (dlist, llist[2]);
5245 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
5246 gimple_seq_add_stmt (dlist, g);
5248 gimple_seq_add_stmt (dlist, gimple_build_label (header));
5249 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
5250 gimple_seq_add_stmt (dlist, g);
5252 gimple_seq_add_stmt (dlist, gimple_build_label (end));
5254 for (int i = 0; i < 2; i++)
5255 if (llist[i])
5257 tree vf = create_tmp_var (unsigned_type_node);
5258 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
5259 gimple_call_set_lhs (g, vf);
5260 gimple_seq *seq = i == 0 ? ilist : dlist;
5261 gimple_seq_add_stmt (seq, g);
5262 tree t = build_int_cst (unsigned_type_node, 0);
5263 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
5264 gimple_seq_add_stmt (seq, g);
5265 tree body = create_artificial_label (UNKNOWN_LOCATION);
5266 tree header = create_artificial_label (UNKNOWN_LOCATION);
5267 tree end = create_artificial_label (UNKNOWN_LOCATION);
5268 gimple_seq_add_stmt (seq, gimple_build_goto (header));
5269 gimple_seq_add_stmt (seq, gimple_build_label (body));
5270 gimple_seq_add_seq (seq, llist[i]);
5271 t = build_int_cst (unsigned_type_node, 1);
5272 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
5273 gimple_seq_add_stmt (seq, g);
5274 gimple_seq_add_stmt (seq, gimple_build_label (header));
5275 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
5276 gimple_seq_add_stmt (seq, g);
5277 gimple_seq_add_stmt (seq, gimple_build_label (end));
5280 if (sctx.is_simt)
5282 gimple_seq_add_seq (dlist, sctx.simt_dlist);
5283 gimple *g
5284 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
5285 gimple_seq_add_stmt (dlist, g);
5288 /* The copyin sequence is not to be executed by the main thread, since
5289 that would result in self-copies. Perhaps not visible to scalars,
5290 but it certainly is to C++ operator=. */
5291 if (copyin_seq)
5293 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
5295 x = build2 (NE_EXPR, boolean_type_node, x,
5296 build_int_cst (TREE_TYPE (x), 0));
5297 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
5298 gimplify_and_add (x, ilist);
5301 /* If any copyin variable is passed by reference, we must ensure the
5302 master thread doesn't modify it before it is copied over in all
5303 threads. Similarly for variables in both firstprivate and
5304 lastprivate clauses we need to ensure the lastprivate copying
5305 happens after firstprivate copying in all threads. And similarly
5306 for UDRs if initializer expression refers to omp_orig. */
5307 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
5309 /* Don't add any barrier for #pragma omp simd or
5310 #pragma omp distribute. */
5311 if (!is_task_ctx (ctx)
5312 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
5313 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
5314 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
5317 /* If max_vf is non-zero, then we can use only a vectorization factor
5318 up to the max_vf we chose. So stick it into the safelen clause. */
5319 if (maybe_ne (sctx.max_vf, 0U))
5321 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
5322 OMP_CLAUSE_SAFELEN);
5323 poly_uint64 safe_len;
5324 if (c == NULL_TREE
5325 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
5326 && maybe_gt (safe_len, sctx.max_vf)))
5328 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
5329 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
5330 sctx.max_vf);
5331 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5332 gimple_omp_for_set_clauses (ctx->stmt, c);
5338 /* Generate code to implement the LASTPRIVATE clauses. This is used for
5339 both parallel and workshare constructs. PREDICATE may be NULL if it's
5340 always true. */
5342 static void
5343 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
5344 omp_context *ctx)
5346 tree x, c, label = NULL, orig_clauses = clauses;
5347 bool par_clauses = false;
5348 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
5350 /* Early exit if there are no lastprivate or linear clauses. */
5351 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
5352 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
5353 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
5354 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
5355 break;
5356 if (clauses == NULL)
5358 /* If this was a workshare clause, see if it had been combined
5359 with its parallel. In that case, look for the clauses on the
5360 parallel statement itself. */
5361 if (is_parallel_ctx (ctx))
5362 return;
5364 ctx = ctx->outer;
5365 if (ctx == NULL || !is_parallel_ctx (ctx))
5366 return;
5368 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
5369 OMP_CLAUSE_LASTPRIVATE);
5370 if (clauses == NULL)
5371 return;
5372 par_clauses = true;
5375 bool maybe_simt = false;
5376 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5377 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5379 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
5380 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
5381 if (simduid)
5382 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
5385 if (predicate)
5387 gcond *stmt;
5388 tree label_true, arm1, arm2;
5389 enum tree_code pred_code = TREE_CODE (predicate);
5391 label = create_artificial_label (UNKNOWN_LOCATION);
5392 label_true = create_artificial_label (UNKNOWN_LOCATION);
5393 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
5395 arm1 = TREE_OPERAND (predicate, 0);
5396 arm2 = TREE_OPERAND (predicate, 1);
5397 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
5398 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
5400 else
5402 arm1 = predicate;
5403 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
5404 arm2 = boolean_false_node;
5405 pred_code = NE_EXPR;
5407 if (maybe_simt)
5409 c = build2 (pred_code, boolean_type_node, arm1, arm2);
5410 c = fold_convert (integer_type_node, c);
5411 simtcond = create_tmp_var (integer_type_node);
5412 gimplify_assign (simtcond, c, stmt_list);
5413 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
5414 1, simtcond);
5415 c = create_tmp_var (integer_type_node);
5416 gimple_call_set_lhs (g, c);
5417 gimple_seq_add_stmt (stmt_list, g);
5418 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
5419 label_true, label);
5421 else
5422 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
5423 gimple_seq_add_stmt (stmt_list, stmt);
5424 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
5427 for (c = clauses; c ;)
5429 tree var, new_var;
5430 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5432 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5433 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5434 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
5436 var = OMP_CLAUSE_DECL (c);
5437 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5438 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
5439 && is_taskloop_ctx (ctx))
5441 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
5442 new_var = lookup_decl (var, ctx->outer);
5444 else
5446 new_var = lookup_decl (var, ctx);
5447 /* Avoid uninitialized warnings for lastprivate and
5448 for linear iterators. */
5449 if (predicate
5450 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5451 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
5452 TREE_NO_WARNING (new_var) = 1;
5455 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
5457 tree val = DECL_VALUE_EXPR (new_var);
5458 if (TREE_CODE (val) == ARRAY_REF
5459 && VAR_P (TREE_OPERAND (val, 0))
5460 && lookup_attribute ("omp simd array",
5461 DECL_ATTRIBUTES (TREE_OPERAND (val,
5462 0))))
5464 if (lastlane == NULL)
5466 lastlane = create_tmp_var (unsigned_type_node);
5467 gcall *g
5468 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
5469 2, simduid,
5470 TREE_OPERAND (val, 1));
5471 gimple_call_set_lhs (g, lastlane);
5472 gimple_seq_add_stmt (stmt_list, g);
5474 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
5475 TREE_OPERAND (val, 0), lastlane,
5476 NULL_TREE, NULL_TREE);
5479 else if (maybe_simt)
5481 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
5482 ? DECL_VALUE_EXPR (new_var)
5483 : new_var);
5484 if (simtlast == NULL)
5486 simtlast = create_tmp_var (unsigned_type_node);
5487 gcall *g = gimple_build_call_internal
5488 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
5489 gimple_call_set_lhs (g, simtlast);
5490 gimple_seq_add_stmt (stmt_list, g);
5492 x = build_call_expr_internal_loc
5493 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
5494 TREE_TYPE (val), 2, val, simtlast);
5495 new_var = unshare_expr (new_var);
5496 gimplify_assign (new_var, x, stmt_list);
5497 new_var = unshare_expr (new_var);
5500 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5501 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
5503 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
5504 gimple_seq_add_seq (stmt_list,
5505 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
5506 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
5508 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5509 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
5511 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
5512 gimple_seq_add_seq (stmt_list,
5513 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
5514 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
5517 x = NULL_TREE;
5518 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5519 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
5521 gcc_checking_assert (is_taskloop_ctx (ctx));
5522 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
5523 ctx->outer->outer);
5524 if (is_global_var (ovar))
5525 x = ovar;
5527 if (!x)
5528 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
5529 if (omp_is_reference (var))
5530 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5531 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
5532 gimplify_and_add (x, stmt_list);
5534 c = OMP_CLAUSE_CHAIN (c);
5535 if (c == NULL && !par_clauses)
5537 /* If this was a workshare clause, see if it had been combined
5538 with its parallel. In that case, continue looking for the
5539 clauses also on the parallel statement itself. */
5540 if (is_parallel_ctx (ctx))
5541 break;
5543 ctx = ctx->outer;
5544 if (ctx == NULL || !is_parallel_ctx (ctx))
5545 break;
5547 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
5548 OMP_CLAUSE_LASTPRIVATE);
5549 par_clauses = true;
5553 if (label)
5554 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
5557 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
5558 (which might be a placeholder). INNER is true if this is an inner
5559 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
5560 join markers. Generate the before-loop forking sequence in
5561 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
5562 general form of these sequences is
5564 GOACC_REDUCTION_SETUP
5565 GOACC_FORK
5566 GOACC_REDUCTION_INIT
5568 GOACC_REDUCTION_FINI
5569 GOACC_JOIN
5570 GOACC_REDUCTION_TEARDOWN. */
5572 static void
5573 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
5574 gcall *fork, gcall *join, gimple_seq *fork_seq,
5575 gimple_seq *join_seq, omp_context *ctx)
5577 gimple_seq before_fork = NULL;
5578 gimple_seq after_fork = NULL;
5579 gimple_seq before_join = NULL;
5580 gimple_seq after_join = NULL;
5581 tree init_code = NULL_TREE, fini_code = NULL_TREE,
5582 setup_code = NULL_TREE, teardown_code = NULL_TREE;
5583 unsigned offset = 0;
5585 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5586 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5588 tree orig = OMP_CLAUSE_DECL (c);
5589 tree var = maybe_lookup_decl (orig, ctx);
5590 tree ref_to_res = NULL_TREE;
5591 tree incoming, outgoing, v1, v2, v3;
5592 bool is_private = false;
5594 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
5595 if (rcode == MINUS_EXPR)
5596 rcode = PLUS_EXPR;
5597 else if (rcode == TRUTH_ANDIF_EXPR)
5598 rcode = BIT_AND_EXPR;
5599 else if (rcode == TRUTH_ORIF_EXPR)
5600 rcode = BIT_IOR_EXPR;
5601 tree op = build_int_cst (unsigned_type_node, rcode);
5603 if (!var)
5604 var = orig;
5606 incoming = outgoing = var;
5608 if (!inner)
5610 /* See if an outer construct also reduces this variable. */
5611 omp_context *outer = ctx;
5613 while (omp_context *probe = outer->outer)
5615 enum gimple_code type = gimple_code (probe->stmt);
5616 tree cls;
5618 switch (type)
5620 case GIMPLE_OMP_FOR:
5621 cls = gimple_omp_for_clauses (probe->stmt);
5622 break;
5624 case GIMPLE_OMP_TARGET:
5625 if (gimple_omp_target_kind (probe->stmt)
5626 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
5627 goto do_lookup;
5629 cls = gimple_omp_target_clauses (probe->stmt);
5630 break;
5632 default:
5633 goto do_lookup;
5636 outer = probe;
5637 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
5638 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
5639 && orig == OMP_CLAUSE_DECL (cls))
5641 incoming = outgoing = lookup_decl (orig, probe);
5642 goto has_outer_reduction;
5644 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
5645 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
5646 && orig == OMP_CLAUSE_DECL (cls))
5648 is_private = true;
5649 goto do_lookup;
5653 do_lookup:
5654 /* This is the outermost construct with this reduction,
5655 see if there's a mapping for it. */
5656 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
5657 && maybe_lookup_field (orig, outer) && !is_private)
5659 ref_to_res = build_receiver_ref (orig, false, outer);
5660 if (omp_is_reference (orig))
5661 ref_to_res = build_simple_mem_ref (ref_to_res);
5663 tree type = TREE_TYPE (var);
5664 if (POINTER_TYPE_P (type))
5665 type = TREE_TYPE (type);
5667 outgoing = var;
5668 incoming = omp_reduction_init_op (loc, rcode, type);
5670 else
5672 /* Try to look at enclosing contexts for reduction var,
5673 use original if no mapping found. */
5674 tree t = NULL_TREE;
5675 omp_context *c = ctx->outer;
5676 while (c && !t)
5678 t = maybe_lookup_decl (orig, c);
5679 c = c->outer;
5681 incoming = outgoing = (t ? t : orig);
5684 has_outer_reduction:;
5687 if (!ref_to_res)
5688 ref_to_res = integer_zero_node;
5690 if (omp_is_reference (orig))
5692 tree type = TREE_TYPE (var);
5693 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
5695 if (!inner)
5697 tree x = create_tmp_var (TREE_TYPE (type), id);
5698 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
5701 v1 = create_tmp_var (type, id);
5702 v2 = create_tmp_var (type, id);
5703 v3 = create_tmp_var (type, id);
5705 gimplify_assign (v1, var, fork_seq);
5706 gimplify_assign (v2, var, fork_seq);
5707 gimplify_assign (v3, var, fork_seq);
5709 var = build_simple_mem_ref (var);
5710 v1 = build_simple_mem_ref (v1);
5711 v2 = build_simple_mem_ref (v2);
5712 v3 = build_simple_mem_ref (v3);
5713 outgoing = build_simple_mem_ref (outgoing);
5715 if (!TREE_CONSTANT (incoming))
5716 incoming = build_simple_mem_ref (incoming);
5718 else
5719 v1 = v2 = v3 = var;
5721 /* Determine position in reduction buffer, which may be used
5722 by target. The parser has ensured that this is not a
5723 variable-sized type. */
5724 fixed_size_mode mode
5725 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
5726 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
5727 offset = (offset + align - 1) & ~(align - 1);
5728 tree off = build_int_cst (sizetype, offset);
5729 offset += GET_MODE_SIZE (mode);
5731 if (!init_code)
5733 init_code = build_int_cst (integer_type_node,
5734 IFN_GOACC_REDUCTION_INIT);
5735 fini_code = build_int_cst (integer_type_node,
5736 IFN_GOACC_REDUCTION_FINI);
5737 setup_code = build_int_cst (integer_type_node,
5738 IFN_GOACC_REDUCTION_SETUP);
5739 teardown_code = build_int_cst (integer_type_node,
5740 IFN_GOACC_REDUCTION_TEARDOWN);
5743 tree setup_call
5744 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5745 TREE_TYPE (var), 6, setup_code,
5746 unshare_expr (ref_to_res),
5747 incoming, level, op, off);
5748 tree init_call
5749 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5750 TREE_TYPE (var), 6, init_code,
5751 unshare_expr (ref_to_res),
5752 v1, level, op, off);
5753 tree fini_call
5754 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5755 TREE_TYPE (var), 6, fini_code,
5756 unshare_expr (ref_to_res),
5757 v2, level, op, off);
5758 tree teardown_call
5759 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5760 TREE_TYPE (var), 6, teardown_code,
5761 ref_to_res, v3, level, op, off);
5763 gimplify_assign (v1, setup_call, &before_fork);
5764 gimplify_assign (v2, init_call, &after_fork);
5765 gimplify_assign (v3, fini_call, &before_join);
5766 gimplify_assign (outgoing, teardown_call, &after_join);
5769 /* Now stitch things together. */
5770 gimple_seq_add_seq (fork_seq, before_fork);
5771 if (fork)
5772 gimple_seq_add_stmt (fork_seq, fork);
5773 gimple_seq_add_seq (fork_seq, after_fork);
5775 gimple_seq_add_seq (join_seq, before_join);
5776 if (join)
5777 gimple_seq_add_stmt (join_seq, join);
5778 gimple_seq_add_seq (join_seq, after_join);
5781 /* Generate code to implement the REDUCTION clauses. */
5783 static void
5784 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5786 gimple_seq sub_seq = NULL;
5787 gimple *stmt;
5788 tree x, c;
5789 int count = 0;
5791 /* OpenACC loop reductions are handled elsewhere. */
5792 if (is_gimple_omp_oacc (ctx->stmt))
5793 return;
5795 /* SIMD reductions are handled in lower_rec_input_clauses. */
5796 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5797 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5798 return;
5800 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5801 update in that case, otherwise use a lock. */
5802 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5803 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5804 && !OMP_CLAUSE_REDUCTION_TASK (c))
5806 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5807 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5809 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5810 count = -1;
5811 break;
5813 count++;
5816 if (count == 0)
5817 return;
5819 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5821 tree var, ref, new_var, orig_var;
5822 enum tree_code code;
5823 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5825 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5826 || OMP_CLAUSE_REDUCTION_TASK (c))
5827 continue;
5829 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
5830 orig_var = var = OMP_CLAUSE_DECL (c);
5831 if (TREE_CODE (var) == MEM_REF)
5833 var = TREE_OPERAND (var, 0);
5834 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5835 var = TREE_OPERAND (var, 0);
5836 if (TREE_CODE (var) == ADDR_EXPR)
5837 var = TREE_OPERAND (var, 0);
5838 else
5840 /* If this is a pointer or referenced based array
5841 section, the var could be private in the outer
5842 context e.g. on orphaned loop construct. Pretend this
5843 is private variable's outer reference. */
5844 ccode = OMP_CLAUSE_PRIVATE;
5845 if (TREE_CODE (var) == INDIRECT_REF)
5846 var = TREE_OPERAND (var, 0);
5848 orig_var = var;
5849 if (is_variable_sized (var))
5851 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5852 var = DECL_VALUE_EXPR (var);
5853 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5854 var = TREE_OPERAND (var, 0);
5855 gcc_assert (DECL_P (var));
5858 new_var = lookup_decl (var, ctx);
5859 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5860 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5861 ref = build_outer_var_ref (var, ctx, ccode);
5862 code = OMP_CLAUSE_REDUCTION_CODE (c);
5864 /* reduction(-:var) sums up the partial results, so it acts
5865 identically to reduction(+:var). */
5866 if (code == MINUS_EXPR)
5867 code = PLUS_EXPR;
5869 if (count == 1)
5871 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5873 addr = save_expr (addr);
5874 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5875 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5876 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5877 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
5878 gimplify_and_add (x, stmt_seqp);
5879 return;
5881 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5883 tree d = OMP_CLAUSE_DECL (c);
5884 tree type = TREE_TYPE (d);
5885 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5886 tree i = create_tmp_var (TREE_TYPE (v));
5887 tree ptype = build_pointer_type (TREE_TYPE (type));
5888 tree bias = TREE_OPERAND (d, 1);
5889 d = TREE_OPERAND (d, 0);
5890 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5892 tree b = TREE_OPERAND (d, 1);
5893 b = maybe_lookup_decl (b, ctx);
5894 if (b == NULL)
5896 b = TREE_OPERAND (d, 1);
5897 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5899 if (integer_zerop (bias))
5900 bias = b;
5901 else
5903 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5904 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5905 TREE_TYPE (b), b, bias);
5907 d = TREE_OPERAND (d, 0);
5909 /* For ref build_outer_var_ref already performs this, so
5910 only new_var needs a dereference. */
5911 if (TREE_CODE (d) == INDIRECT_REF)
5913 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5914 gcc_assert (omp_is_reference (var) && var == orig_var);
5916 else if (TREE_CODE (d) == ADDR_EXPR)
5918 if (orig_var == var)
5920 new_var = build_fold_addr_expr (new_var);
5921 ref = build_fold_addr_expr (ref);
5924 else
5926 gcc_assert (orig_var == var);
5927 if (omp_is_reference (var))
5928 ref = build_fold_addr_expr (ref);
5930 if (DECL_P (v))
5932 tree t = maybe_lookup_decl (v, ctx);
5933 if (t)
5934 v = t;
5935 else
5936 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5937 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5939 if (!integer_zerop (bias))
5941 bias = fold_convert_loc (clause_loc, sizetype, bias);
5942 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5943 TREE_TYPE (new_var), new_var,
5944 unshare_expr (bias));
5945 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5946 TREE_TYPE (ref), ref, bias);
5948 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5949 ref = fold_convert_loc (clause_loc, ptype, ref);
5950 tree m = create_tmp_var (ptype);
5951 gimplify_assign (m, new_var, stmt_seqp);
5952 new_var = m;
5953 m = create_tmp_var (ptype);
5954 gimplify_assign (m, ref, stmt_seqp);
5955 ref = m;
5956 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5957 tree body = create_artificial_label (UNKNOWN_LOCATION);
5958 tree end = create_artificial_label (UNKNOWN_LOCATION);
5959 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5960 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5961 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5962 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5964 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5965 tree decl_placeholder
5966 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5967 SET_DECL_VALUE_EXPR (placeholder, out);
5968 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5969 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5970 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5971 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5972 gimple_seq_add_seq (&sub_seq,
5973 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5974 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5975 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5976 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5978 else
5980 x = build2 (code, TREE_TYPE (out), out, priv);
5981 out = unshare_expr (out);
5982 gimplify_assign (out, x, &sub_seq);
5984 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5985 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5986 gimple_seq_add_stmt (&sub_seq, g);
5987 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5988 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5989 gimple_seq_add_stmt (&sub_seq, g);
5990 g = gimple_build_assign (i, PLUS_EXPR, i,
5991 build_int_cst (TREE_TYPE (i), 1));
5992 gimple_seq_add_stmt (&sub_seq, g);
5993 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5994 gimple_seq_add_stmt (&sub_seq, g);
5995 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5997 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5999 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6001 if (omp_is_reference (var)
6002 && !useless_type_conversion_p (TREE_TYPE (placeholder),
6003 TREE_TYPE (ref)))
6004 ref = build_fold_addr_expr_loc (clause_loc, ref);
6005 SET_DECL_VALUE_EXPR (placeholder, ref);
6006 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6007 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6008 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6009 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6010 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6012 else
6014 x = build2 (code, TREE_TYPE (ref), ref, new_var);
6015 ref = build_outer_var_ref (var, ctx);
6016 gimplify_assign (ref, x, &sub_seq);
6020 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
6022 gimple_seq_add_stmt (stmt_seqp, stmt);
6024 gimple_seq_add_seq (stmt_seqp, sub_seq);
6026 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
6028 gimple_seq_add_stmt (stmt_seqp, stmt);
6032 /* Generate code to implement the COPYPRIVATE clauses. */
6034 static void
6035 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
6036 omp_context *ctx)
6038 tree c;
6040 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6042 tree var, new_var, ref, x;
6043 bool by_ref;
6044 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6046 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
6047 continue;
6049 var = OMP_CLAUSE_DECL (c);
6050 by_ref = use_pointer_for_field (var, NULL);
6052 ref = build_sender_ref (var, ctx);
6053 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
6054 if (by_ref)
6056 x = build_fold_addr_expr_loc (clause_loc, new_var);
6057 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
6059 gimplify_assign (ref, x, slist);
6061 ref = build_receiver_ref (var, false, ctx);
6062 if (by_ref)
6064 ref = fold_convert_loc (clause_loc,
6065 build_pointer_type (TREE_TYPE (new_var)),
6066 ref);
6067 ref = build_fold_indirect_ref_loc (clause_loc, ref);
6069 if (omp_is_reference (var))
6071 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
6072 ref = build_simple_mem_ref_loc (clause_loc, ref);
6073 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6075 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
6076 gimplify_and_add (x, rlist);
6081 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
6082 and REDUCTION from the sender (aka parent) side. */
6084 static void
6085 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
6086 omp_context *ctx)
6088 tree c, t;
6089 int ignored_looptemp = 0;
6090 bool is_taskloop = false;
6092 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
6093 by GOMP_taskloop. */
6094 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
6096 ignored_looptemp = 2;
6097 is_taskloop = true;
6100 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6102 tree val, ref, x, var;
6103 bool by_ref, do_in = false, do_out = false;
6104 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6106 switch (OMP_CLAUSE_CODE (c))
6108 case OMP_CLAUSE_PRIVATE:
6109 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
6110 break;
6111 continue;
6112 case OMP_CLAUSE_FIRSTPRIVATE:
6113 case OMP_CLAUSE_COPYIN:
6114 case OMP_CLAUSE_LASTPRIVATE:
6115 case OMP_CLAUSE_IN_REDUCTION:
6116 case OMP_CLAUSE__REDUCTEMP_:
6117 break;
6118 case OMP_CLAUSE_REDUCTION:
6119 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
6120 continue;
6121 break;
6122 case OMP_CLAUSE_SHARED:
6123 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
6124 break;
6125 continue;
6126 case OMP_CLAUSE__LOOPTEMP_:
6127 if (ignored_looptemp)
6129 ignored_looptemp--;
6130 continue;
6132 break;
6133 default:
6134 continue;
6137 val = OMP_CLAUSE_DECL (c);
6138 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6139 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
6140 && TREE_CODE (val) == MEM_REF)
6142 val = TREE_OPERAND (val, 0);
6143 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
6144 val = TREE_OPERAND (val, 0);
6145 if (TREE_CODE (val) == INDIRECT_REF
6146 || TREE_CODE (val) == ADDR_EXPR)
6147 val = TREE_OPERAND (val, 0);
6148 if (is_variable_sized (val))
6149 continue;
6152 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
6153 outer taskloop region. */
6154 omp_context *ctx_for_o = ctx;
6155 if (is_taskloop
6156 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
6157 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
6158 ctx_for_o = ctx->outer;
6160 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
6162 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
6163 && is_global_var (var)
6164 && (val == OMP_CLAUSE_DECL (c)
6165 || !is_task_ctx (ctx)
6166 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
6167 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
6168 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
6169 != POINTER_TYPE)))))
6170 continue;
6172 t = omp_member_access_dummy_var (var);
6173 if (t)
6175 var = DECL_VALUE_EXPR (var);
6176 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
6177 if (o != t)
6178 var = unshare_and_remap (var, t, o);
6179 else
6180 var = unshare_expr (var);
6183 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
6185 /* Handle taskloop firstprivate/lastprivate, where the
6186 lastprivate on GIMPLE_OMP_TASK is represented as
6187 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
6188 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
6189 x = omp_build_component_ref (ctx->sender_decl, f);
6190 if (use_pointer_for_field (val, ctx))
6191 var = build_fold_addr_expr (var);
6192 gimplify_assign (x, var, ilist);
6193 DECL_ABSTRACT_ORIGIN (f) = NULL;
6194 continue;
6197 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6198 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
6199 || val == OMP_CLAUSE_DECL (c))
6200 && is_variable_sized (val))
6201 continue;
6202 by_ref = use_pointer_for_field (val, NULL);
6204 switch (OMP_CLAUSE_CODE (c))
6206 case OMP_CLAUSE_FIRSTPRIVATE:
6207 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
6208 && !by_ref
6209 && is_task_ctx (ctx))
6210 TREE_NO_WARNING (var) = 1;
6211 do_in = true;
6212 break;
6214 case OMP_CLAUSE_PRIVATE:
6215 case OMP_CLAUSE_COPYIN:
6216 case OMP_CLAUSE__LOOPTEMP_:
6217 case OMP_CLAUSE__REDUCTEMP_:
6218 do_in = true;
6219 break;
6221 case OMP_CLAUSE_LASTPRIVATE:
6222 if (by_ref || omp_is_reference (val))
6224 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
6225 continue;
6226 do_in = true;
6228 else
6230 do_out = true;
6231 if (lang_hooks.decls.omp_private_outer_ref (val))
6232 do_in = true;
6234 break;
6236 case OMP_CLAUSE_REDUCTION:
6237 case OMP_CLAUSE_IN_REDUCTION:
6238 do_in = true;
6239 if (val == OMP_CLAUSE_DECL (c))
6241 if (is_task_ctx (ctx))
6242 by_ref = use_pointer_for_field (val, ctx);
6243 else
6244 do_out = !(by_ref || omp_is_reference (val));
6246 else
6247 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
6248 break;
6250 default:
6251 gcc_unreachable ();
6254 if (do_in)
6256 ref = build_sender_ref (val, ctx);
6257 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
6258 gimplify_assign (ref, x, ilist);
6259 if (is_task_ctx (ctx))
6260 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
6263 if (do_out)
6265 ref = build_sender_ref (val, ctx);
6266 gimplify_assign (var, ref, olist);
6271 /* Generate code to implement SHARED from the sender (aka parent)
6272 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
6273 list things that got automatically shared. */
6275 static void
6276 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
6278 tree var, ovar, nvar, t, f, x, record_type;
6280 if (ctx->record_type == NULL)
6281 return;
6283 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
6284 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
6286 ovar = DECL_ABSTRACT_ORIGIN (f);
6287 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
6288 continue;
6290 nvar = maybe_lookup_decl (ovar, ctx);
6291 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
6292 continue;
6294 /* If CTX is a nested parallel directive. Find the immediately
6295 enclosing parallel or workshare construct that contains a
6296 mapping for OVAR. */
6297 var = lookup_decl_in_outer_ctx (ovar, ctx);
6299 t = omp_member_access_dummy_var (var);
6300 if (t)
6302 var = DECL_VALUE_EXPR (var);
6303 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
6304 if (o != t)
6305 var = unshare_and_remap (var, t, o);
6306 else
6307 var = unshare_expr (var);
6310 if (use_pointer_for_field (ovar, ctx))
6312 x = build_sender_ref (ovar, ctx);
6313 var = build_fold_addr_expr (var);
6314 gimplify_assign (x, var, ilist);
6316 else
6318 x = build_sender_ref (ovar, ctx);
6319 gimplify_assign (x, var, ilist);
6321 if (!TREE_READONLY (var)
6322 /* We don't need to receive a new reference to a result
6323 or parm decl. In fact we may not store to it as we will
6324 invalidate any pending RSO and generate wrong gimple
6325 during inlining. */
6326 && !((TREE_CODE (var) == RESULT_DECL
6327 || TREE_CODE (var) == PARM_DECL)
6328 && DECL_BY_REFERENCE (var)))
6330 x = build_sender_ref (ovar, ctx);
6331 gimplify_assign (var, x, olist);
6337 /* Emit an OpenACC head marker call, encapulating the partitioning and
6338 other information that must be processed by the target compiler.
6339 Return the maximum number of dimensions the associated loop might
6340 be partitioned over. */
6342 static unsigned
6343 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
6344 gimple_seq *seq, omp_context *ctx)
6346 unsigned levels = 0;
6347 unsigned tag = 0;
6348 tree gang_static = NULL_TREE;
6349 auto_vec<tree, 5> args;
6351 args.quick_push (build_int_cst
6352 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
6353 args.quick_push (ddvar);
6354 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6356 switch (OMP_CLAUSE_CODE (c))
6358 case OMP_CLAUSE_GANG:
6359 tag |= OLF_DIM_GANG;
6360 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
6361 /* static:* is represented by -1, and we can ignore it, as
6362 scheduling is always static. */
6363 if (gang_static && integer_minus_onep (gang_static))
6364 gang_static = NULL_TREE;
6365 levels++;
6366 break;
6368 case OMP_CLAUSE_WORKER:
6369 tag |= OLF_DIM_WORKER;
6370 levels++;
6371 break;
6373 case OMP_CLAUSE_VECTOR:
6374 tag |= OLF_DIM_VECTOR;
6375 levels++;
6376 break;
6378 case OMP_CLAUSE_SEQ:
6379 tag |= OLF_SEQ;
6380 break;
6382 case OMP_CLAUSE_AUTO:
6383 tag |= OLF_AUTO;
6384 break;
6386 case OMP_CLAUSE_INDEPENDENT:
6387 tag |= OLF_INDEPENDENT;
6388 break;
6390 case OMP_CLAUSE_TILE:
6391 tag |= OLF_TILE;
6392 break;
6394 default:
6395 continue;
6399 if (gang_static)
6401 if (DECL_P (gang_static))
6402 gang_static = build_outer_var_ref (gang_static, ctx);
6403 tag |= OLF_GANG_STATIC;
6406 /* In a parallel region, loops are implicitly INDEPENDENT. */
6407 omp_context *tgt = enclosing_target_ctx (ctx);
6408 if (!tgt || is_oacc_parallel (tgt))
6409 tag |= OLF_INDEPENDENT;
6411 if (tag & OLF_TILE)
6412 /* Tiling could use all 3 levels. */
6413 levels = 3;
6414 else
6416 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
6417 Ensure at least one level, or 2 for possible auto
6418 partitioning */
6419 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
6420 << OLF_DIM_BASE) | OLF_SEQ));
6422 if (levels < 1u + maybe_auto)
6423 levels = 1u + maybe_auto;
6426 args.quick_push (build_int_cst (integer_type_node, levels));
6427 args.quick_push (build_int_cst (integer_type_node, tag));
6428 if (gang_static)
6429 args.quick_push (gang_static);
6431 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
6432 gimple_set_location (call, loc);
6433 gimple_set_lhs (call, ddvar);
6434 gimple_seq_add_stmt (seq, call);
6436 return levels;
6439 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
6440 partitioning level of the enclosed region. */
6442 static void
6443 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
6444 tree tofollow, gimple_seq *seq)
6446 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
6447 : IFN_UNIQUE_OACC_TAIL_MARK);
6448 tree marker = build_int_cst (integer_type_node, marker_kind);
6449 int nargs = 2 + (tofollow != NULL_TREE);
6450 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
6451 marker, ddvar, tofollow);
6452 gimple_set_location (call, loc);
6453 gimple_set_lhs (call, ddvar);
6454 gimple_seq_add_stmt (seq, call);
6457 /* Generate the before and after OpenACC loop sequences. CLAUSES are
6458 the loop clauses, from which we extract reductions. Initialize
6459 HEAD and TAIL. */
6461 static void
6462 lower_oacc_head_tail (location_t loc, tree clauses,
6463 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
6465 bool inner = false;
6466 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
6467 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
6469 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
6470 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
6471 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
6473 gcc_assert (count);
6474 for (unsigned done = 1; count; count--, done++)
6476 gimple_seq fork_seq = NULL;
6477 gimple_seq join_seq = NULL;
6479 tree place = build_int_cst (integer_type_node, -1);
6480 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
6481 fork_kind, ddvar, place);
6482 gimple_set_location (fork, loc);
6483 gimple_set_lhs (fork, ddvar);
6485 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
6486 join_kind, ddvar, place);
6487 gimple_set_location (join, loc);
6488 gimple_set_lhs (join, ddvar);
6490 /* Mark the beginning of this level sequence. */
6491 if (inner)
6492 lower_oacc_loop_marker (loc, ddvar, true,
6493 build_int_cst (integer_type_node, count),
6494 &fork_seq);
6495 lower_oacc_loop_marker (loc, ddvar, false,
6496 build_int_cst (integer_type_node, done),
6497 &join_seq);
6499 lower_oacc_reductions (loc, clauses, place, inner,
6500 fork, join, &fork_seq, &join_seq, ctx);
6502 /* Append this level to head. */
6503 gimple_seq_add_seq (head, fork_seq);
6504 /* Prepend it to tail. */
6505 gimple_seq_add_seq (&join_seq, *tail);
6506 *tail = join_seq;
6508 inner = true;
6511 /* Mark the end of the sequence. */
6512 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
6513 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
6516 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
6517 catch handler and return it. This prevents programs from violating the
6518 structured block semantics with throws. */
6520 static gimple_seq
6521 maybe_catch_exception (gimple_seq body)
6523 gimple *g;
6524 tree decl;
6526 if (!flag_exceptions)
6527 return body;
6529 if (lang_hooks.eh_protect_cleanup_actions != NULL)
6530 decl = lang_hooks.eh_protect_cleanup_actions ();
6531 else
6532 decl = builtin_decl_explicit (BUILT_IN_TRAP);
6534 g = gimple_build_eh_must_not_throw (decl);
6535 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
6536 GIMPLE_TRY_CATCH);
6538 return gimple_seq_alloc_with_stmt (g);
6542 /* Routines to lower OMP directives into OMP-GIMPLE. */
6544 /* If ctx is a worksharing context inside of a cancellable parallel
6545 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
6546 and conditional branch to parallel's cancel_label to handle
6547 cancellation in the implicit barrier. */
6549 static void
6550 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
6551 gimple_seq *body)
6553 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
6554 if (gimple_omp_return_nowait_p (omp_return))
6555 return;
6556 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
6557 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
6558 && outer->cancellable)
6560 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
6561 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
6562 tree lhs = create_tmp_var (c_bool_type);
6563 gimple_omp_return_set_lhs (omp_return, lhs);
6564 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
6565 gimple *g = gimple_build_cond (NE_EXPR, lhs,
6566 fold_convert (c_bool_type,
6567 boolean_false_node),
6568 outer->cancel_label, fallthru_label);
6569 gimple_seq_add_stmt (body, g);
6570 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
6572 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
6573 return;
6576 /* Find the first task_reduction or reduction clause or return NULL
6577 if there are none. */
6579 static inline tree
6580 omp_task_reductions_find_first (tree clauses, enum tree_code code,
6581 enum omp_clause_code ccode)
6583 while (1)
6585 clauses = omp_find_clause (clauses, ccode);
6586 if (clauses == NULL_TREE)
6587 return NULL_TREE;
6588 if (ccode != OMP_CLAUSE_REDUCTION
6589 || code == OMP_TASKLOOP
6590 || OMP_CLAUSE_REDUCTION_TASK (clauses))
6591 return clauses;
6592 clauses = OMP_CLAUSE_CHAIN (clauses);
6596 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
6597 gimple_seq *, gimple_seq *);
6599 /* Lower the OpenMP sections directive in the current statement in GSI_P.
6600 CTX is the enclosing OMP context for the current statement. */
6602 static void
6603 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6605 tree block, control;
6606 gimple_stmt_iterator tgsi;
6607 gomp_sections *stmt;
6608 gimple *t;
6609 gbind *new_stmt, *bind;
6610 gimple_seq ilist, dlist, olist, tred_dlist = NULL, new_body;
6612 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
6614 push_gimplify_context ();
6616 dlist = NULL;
6617 ilist = NULL;
6619 tree rclauses
6620 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
6621 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
6622 tree rtmp = NULL_TREE;
6623 if (rclauses)
6625 tree type = build_pointer_type (pointer_sized_int_node);
6626 tree temp = create_tmp_var (type);
6627 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
6628 OMP_CLAUSE_DECL (c) = temp;
6629 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
6630 gimple_omp_sections_set_clauses (stmt, c);
6631 lower_omp_task_reductions (ctx, OMP_SECTIONS,
6632 gimple_omp_sections_clauses (stmt),
6633 &ilist, &tred_dlist);
6634 rclauses = c;
6635 rtmp = make_ssa_name (type);
6636 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
6639 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
6640 &ilist, &dlist, ctx, NULL);
6642 new_body = gimple_omp_body (stmt);
6643 gimple_omp_set_body (stmt, NULL);
6644 tgsi = gsi_start (new_body);
6645 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
6647 omp_context *sctx;
6648 gimple *sec_start;
6650 sec_start = gsi_stmt (tgsi);
6651 sctx = maybe_lookup_ctx (sec_start);
6652 gcc_assert (sctx);
6654 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
6655 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
6656 GSI_CONTINUE_LINKING);
6657 gimple_omp_set_body (sec_start, NULL);
6659 if (gsi_one_before_end_p (tgsi))
6661 gimple_seq l = NULL;
6662 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
6663 &l, ctx);
6664 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
6665 gimple_omp_section_set_last (sec_start);
6668 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
6669 GSI_CONTINUE_LINKING);
6672 block = make_node (BLOCK);
6673 bind = gimple_build_bind (NULL, new_body, block);
6675 olist = NULL;
6676 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
6678 block = make_node (BLOCK);
6679 new_stmt = gimple_build_bind (NULL, NULL, block);
6680 gsi_replace (gsi_p, new_stmt, true);
6682 pop_gimplify_context (new_stmt);
6683 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6684 BLOCK_VARS (block) = gimple_bind_vars (bind);
6685 if (BLOCK_VARS (block))
6686 TREE_USED (block) = 1;
6688 new_body = NULL;
6689 gimple_seq_add_seq (&new_body, ilist);
6690 gimple_seq_add_stmt (&new_body, stmt);
6691 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
6692 gimple_seq_add_stmt (&new_body, bind);
6694 control = create_tmp_var (unsigned_type_node, ".section");
6695 t = gimple_build_omp_continue (control, control);
6696 gimple_omp_sections_set_control (stmt, control);
6697 gimple_seq_add_stmt (&new_body, t);
6699 gimple_seq_add_seq (&new_body, olist);
6700 if (ctx->cancellable)
6701 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
6702 gimple_seq_add_seq (&new_body, dlist);
6704 new_body = maybe_catch_exception (new_body);
6706 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
6707 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6708 t = gimple_build_omp_return (nowait);
6709 gimple_seq_add_stmt (&new_body, t);
6710 gimple_seq_add_seq (&new_body, tred_dlist);
6711 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
6713 if (rclauses)
6714 OMP_CLAUSE_DECL (rclauses) = rtmp;
6716 gimple_bind_set_body (new_stmt, new_body);
6720 /* A subroutine of lower_omp_single. Expand the simple form of
6721 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
6723 if (GOMP_single_start ())
6724 BODY;
6725 [ GOMP_barrier (); ] -> unless 'nowait' is present.
6727 FIXME. It may be better to delay expanding the logic of this until
6728 pass_expand_omp. The expanded logic may make the job more difficult
6729 to a synchronization analysis pass. */
6731 static void
6732 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
6734 location_t loc = gimple_location (single_stmt);
6735 tree tlabel = create_artificial_label (loc);
6736 tree flabel = create_artificial_label (loc);
6737 gimple *call, *cond;
6738 tree lhs, decl;
6740 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
6741 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
6742 call = gimple_build_call (decl, 0);
6743 gimple_call_set_lhs (call, lhs);
6744 gimple_seq_add_stmt (pre_p, call);
6746 cond = gimple_build_cond (EQ_EXPR, lhs,
6747 fold_convert_loc (loc, TREE_TYPE (lhs),
6748 boolean_true_node),
6749 tlabel, flabel);
6750 gimple_seq_add_stmt (pre_p, cond);
6751 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
6752 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6753 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
6757 /* A subroutine of lower_omp_single. Expand the simple form of
6758 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
6760 #pragma omp single copyprivate (a, b, c)
6762 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
6765 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6767 BODY;
6768 copyout.a = a;
6769 copyout.b = b;
6770 copyout.c = c;
6771 GOMP_single_copy_end (&copyout);
6773 else
6775 a = copyout_p->a;
6776 b = copyout_p->b;
6777 c = copyout_p->c;
6779 GOMP_barrier ();
6782 FIXME. It may be better to delay expanding the logic of this until
6783 pass_expand_omp. The expanded logic may make the job more difficult
6784 to a synchronization analysis pass. */
6786 static void
6787 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
6788 omp_context *ctx)
6790 tree ptr_type, t, l0, l1, l2, bfn_decl;
6791 gimple_seq copyin_seq;
6792 location_t loc = gimple_location (single_stmt);
6794 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
6796 ptr_type = build_pointer_type (ctx->record_type);
6797 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
6799 l0 = create_artificial_label (loc);
6800 l1 = create_artificial_label (loc);
6801 l2 = create_artificial_label (loc);
6803 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6804 t = build_call_expr_loc (loc, bfn_decl, 0);
6805 t = fold_convert_loc (loc, ptr_type, t);
6806 gimplify_assign (ctx->receiver_decl, t, pre_p);
6808 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6809 build_int_cst (ptr_type, 0));
6810 t = build3 (COND_EXPR, void_type_node, t,
6811 build_and_jump (&l0), build_and_jump (&l1));
6812 gimplify_and_add (t, pre_p);
6814 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6816 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6818 copyin_seq = NULL;
6819 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6820 &copyin_seq, ctx);
6822 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6823 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6824 t = build_call_expr_loc (loc, bfn_decl, 1, t);
6825 gimplify_and_add (t, pre_p);
6827 t = build_and_jump (&l2);
6828 gimplify_and_add (t, pre_p);
6830 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6832 gimple_seq_add_seq (pre_p, copyin_seq);
6834 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6838 /* Expand code for an OpenMP single directive. */
6840 static void
6841 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6843 tree block;
6844 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6845 gbind *bind;
6846 gimple_seq bind_body, bind_body_tail = NULL, dlist;
6848 push_gimplify_context ();
6850 block = make_node (BLOCK);
6851 bind = gimple_build_bind (NULL, NULL, block);
6852 gsi_replace (gsi_p, bind, true);
6853 bind_body = NULL;
6854 dlist = NULL;
6855 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6856 &bind_body, &dlist, ctx, NULL);
6857 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6859 gimple_seq_add_stmt (&bind_body, single_stmt);
6861 if (ctx->record_type)
6862 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6863 else
6864 lower_omp_single_simple (single_stmt, &bind_body);
6866 gimple_omp_set_body (single_stmt, NULL);
6868 gimple_seq_add_seq (&bind_body, dlist);
6870 bind_body = maybe_catch_exception (bind_body);
6872 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6873 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6874 gimple *g = gimple_build_omp_return (nowait);
6875 gimple_seq_add_stmt (&bind_body_tail, g);
6876 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
6877 if (ctx->record_type)
6879 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6880 tree clobber = build_constructor (ctx->record_type, NULL);
6881 TREE_THIS_VOLATILE (clobber) = 1;
6882 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6883 clobber), GSI_SAME_STMT);
6885 gimple_seq_add_seq (&bind_body, bind_body_tail);
6886 gimple_bind_set_body (bind, bind_body);
6888 pop_gimplify_context (bind);
6890 gimple_bind_append_vars (bind, ctx->block_vars);
6891 BLOCK_VARS (block) = ctx->block_vars;
6892 if (BLOCK_VARS (block))
6893 TREE_USED (block) = 1;
6897 /* Expand code for an OpenMP master directive. */
6899 static void
6900 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6902 tree block, lab = NULL, x, bfn_decl;
6903 gimple *stmt = gsi_stmt (*gsi_p);
6904 gbind *bind;
6905 location_t loc = gimple_location (stmt);
6906 gimple_seq tseq;
6908 push_gimplify_context ();
6910 block = make_node (BLOCK);
6911 bind = gimple_build_bind (NULL, NULL, block);
6912 gsi_replace (gsi_p, bind, true);
6913 gimple_bind_add_stmt (bind, stmt);
6915 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6916 x = build_call_expr_loc (loc, bfn_decl, 0);
6917 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6918 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6919 tseq = NULL;
6920 gimplify_and_add (x, &tseq);
6921 gimple_bind_add_seq (bind, tseq);
6923 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6924 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6925 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6926 gimple_omp_set_body (stmt, NULL);
6928 gimple_bind_add_stmt (bind, gimple_build_label (lab));
6930 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6932 pop_gimplify_context (bind);
6934 gimple_bind_append_vars (bind, ctx->block_vars);
6935 BLOCK_VARS (block) = ctx->block_vars;
6938 /* Helper function for lower_omp_task_reductions. For a specific PASS
6939 find out the current clause it should be processed, or return false
6940 if all have been processed already. */
6942 static inline bool
6943 omp_task_reduction_iterate (int pass, enum tree_code code,
6944 enum omp_clause_code ccode, tree *c, tree *decl,
6945 tree *type, tree *next)
6947 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
6949 if (ccode == OMP_CLAUSE_REDUCTION
6950 && code != OMP_TASKLOOP
6951 && !OMP_CLAUSE_REDUCTION_TASK (*c))
6952 continue;
6953 *decl = OMP_CLAUSE_DECL (*c);
6954 *type = TREE_TYPE (*decl);
6955 if (TREE_CODE (*decl) == MEM_REF)
6957 if (pass != 1)
6958 continue;
6960 else
6962 if (omp_is_reference (*decl))
6963 *type = TREE_TYPE (*type);
6964 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
6965 continue;
6967 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
6968 return true;
6970 *decl = NULL_TREE;
6971 *type = NULL_TREE;
6972 *next = NULL_TREE;
6973 return false;
6976 /* Lower task_reduction and reduction clauses (the latter unless CODE is
6977 OMP_TASKGROUP only with task modifier). Register mapping of those in
6978 START sequence and reducing them and unregister them in the END sequence. */
6980 static void
6981 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
6982 gimple_seq *start, gimple_seq *end)
6984 enum omp_clause_code ccode
6985 = (code == OMP_TASKGROUP
6986 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
6987 tree cancellable = NULL_TREE;
6988 clauses = omp_task_reductions_find_first (clauses, code, ccode);
6989 if (clauses == NULL_TREE)
6990 return;
6991 if (code == OMP_FOR || code == OMP_SECTIONS)
6993 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
6994 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
6995 && outer->cancellable)
6997 cancellable = error_mark_node;
6998 break;
7000 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7001 break;
7003 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
7004 tree *last = &TYPE_FIELDS (record_type);
7005 unsigned cnt = 0;
7006 if (cancellable)
7008 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
7009 ptr_type_node);
7010 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
7011 integer_type_node);
7012 *last = field;
7013 DECL_CHAIN (field) = ifield;
7014 last = &DECL_CHAIN (ifield);
7015 DECL_CONTEXT (field) = record_type;
7016 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
7017 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
7018 DECL_CONTEXT (ifield) = record_type;
7019 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
7020 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
7022 for (int pass = 0; pass < 2; pass++)
7024 tree decl, type, next;
7025 for (tree c = clauses;
7026 omp_task_reduction_iterate (pass, code, ccode,
7027 &c, &decl, &type, &next); c = next)
7029 ++cnt;
7030 tree new_type = type;
7031 if (ctx->outer)
7032 new_type = remap_type (type, &ctx->outer->cb);
7033 tree field
7034 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
7035 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
7036 new_type);
7037 if (DECL_P (decl) && type == TREE_TYPE (decl))
7039 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
7040 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
7041 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
7043 else
7044 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
7045 DECL_CONTEXT (field) = record_type;
7046 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
7047 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
7048 *last = field;
7049 last = &DECL_CHAIN (field);
7050 tree bfield
7051 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
7052 boolean_type_node);
7053 DECL_CONTEXT (bfield) = record_type;
7054 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
7055 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
7056 *last = bfield;
7057 last = &DECL_CHAIN (bfield);
7060 *last = NULL_TREE;
7061 layout_type (record_type);
7063 /* Build up an array which registers with the runtime all the reductions
7064 and deregisters them at the end. Format documented in libgomp/task.c. */
7065 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
7066 tree avar = create_tmp_var_raw (atype);
7067 gimple_add_tmp_var (avar);
7068 TREE_ADDRESSABLE (avar) = 1;
7069 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
7070 NULL_TREE, NULL_TREE);
7071 tree t = build_int_cst (pointer_sized_int_node, cnt);
7072 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7073 gimple_seq seq = NULL;
7074 tree sz = fold_convert (pointer_sized_int_node,
7075 TYPE_SIZE_UNIT (record_type));
7076 int cachesz = 64;
7077 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
7078 build_int_cst (pointer_sized_int_node, cachesz - 1));
7079 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
7080 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
7081 ctx->task_reductions.create (1 + cnt);
7082 ctx->task_reduction_map = new hash_map<tree, unsigned>;
7083 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
7084 ? sz : NULL_TREE);
7085 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
7086 gimple_seq_add_seq (start, seq);
7087 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
7088 NULL_TREE, NULL_TREE);
7089 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
7090 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
7091 NULL_TREE, NULL_TREE);
7092 t = build_int_cst (pointer_sized_int_node,
7093 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
7094 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7095 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
7096 NULL_TREE, NULL_TREE);
7097 t = build_int_cst (pointer_sized_int_node, -1);
7098 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7099 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
7100 NULL_TREE, NULL_TREE);
7101 t = build_int_cst (pointer_sized_int_node, 0);
7102 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7104 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
7105 and for each task reduction checks a bool right after the private variable
7106 within that thread's chunk; if the bool is clear, it hasn't been
7107 initialized and thus isn't going to be reduced nor destructed, otherwise
7108 reduce and destruct it. */
7109 tree idx = create_tmp_var (size_type_node);
7110 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
7111 tree num_thr_sz = create_tmp_var (size_type_node);
7112 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7113 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
7114 tree lab3 = NULL_TREE;
7115 gimple *g;
7116 if (code == OMP_FOR || code == OMP_SECTIONS)
7118 /* For worksharing constructs, only perform it in the master thread,
7119 with the exception of cancelled implicit barriers - then only handle
7120 the current thread. */
7121 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
7122 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
7123 tree thr_num = create_tmp_var (integer_type_node);
7124 g = gimple_build_call (t, 0);
7125 gimple_call_set_lhs (g, thr_num);
7126 gimple_seq_add_stmt (end, g);
7127 if (cancellable)
7129 tree c;
7130 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7131 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
7132 lab3 = create_artificial_label (UNKNOWN_LOCATION);
7133 if (code == OMP_FOR)
7134 c = gimple_omp_for_clauses (ctx->stmt);
7135 else /* if (code == OMP_SECTIONS) */
7136 c = gimple_omp_sections_clauses (ctx->stmt);
7137 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
7138 cancellable = c;
7139 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
7140 lab5, lab6);
7141 gimple_seq_add_stmt (end, g);
7142 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7143 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
7144 gimple_seq_add_stmt (end, g);
7145 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
7146 build_one_cst (TREE_TYPE (idx)));
7147 gimple_seq_add_stmt (end, g);
7148 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
7149 gimple_seq_add_stmt (end, gimple_build_label (lab6));
7151 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
7152 gimple_seq_add_stmt (end, g);
7153 gimple_seq_add_stmt (end, gimple_build_label (lab4));
7155 if (code != OMP_PARALLEL)
7157 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
7158 tree num_thr = create_tmp_var (integer_type_node);
7159 g = gimple_build_call (t, 0);
7160 gimple_call_set_lhs (g, num_thr);
7161 gimple_seq_add_stmt (end, g);
7162 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
7163 gimple_seq_add_stmt (end, g);
7164 if (cancellable)
7165 gimple_seq_add_stmt (end, gimple_build_label (lab3));
7167 else
7169 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7170 OMP_CLAUSE__REDUCTEMP_);
7171 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
7172 t = fold_convert (size_type_node, t);
7173 gimplify_assign (num_thr_sz, t, end);
7175 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
7176 NULL_TREE, NULL_TREE);
7177 tree data = create_tmp_var (pointer_sized_int_node);
7178 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
7179 gimple_seq_add_stmt (end, gimple_build_label (lab1));
7180 tree ptr;
7181 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
7182 ptr = create_tmp_var (build_pointer_type (record_type));
7183 else
7184 ptr = create_tmp_var (ptr_type_node);
7185 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
7187 tree field = TYPE_FIELDS (record_type);
7188 cnt = 0;
7189 if (cancellable)
7190 field = DECL_CHAIN (DECL_CHAIN (field));
7191 for (int pass = 0; pass < 2; pass++)
7193 tree decl, type, next;
7194 for (tree c = clauses;
7195 omp_task_reduction_iterate (pass, code, ccode,
7196 &c, &decl, &type, &next); c = next)
7198 tree var = decl, ref;
7199 if (TREE_CODE (decl) == MEM_REF)
7201 var = TREE_OPERAND (var, 0);
7202 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7203 var = TREE_OPERAND (var, 0);
7204 tree v = var;
7205 if (TREE_CODE (var) == ADDR_EXPR)
7206 var = TREE_OPERAND (var, 0);
7207 else if (TREE_CODE (var) == INDIRECT_REF)
7208 var = TREE_OPERAND (var, 0);
7209 tree orig_var = var;
7210 if (is_variable_sized (var))
7212 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7213 var = DECL_VALUE_EXPR (var);
7214 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7215 var = TREE_OPERAND (var, 0);
7216 gcc_assert (DECL_P (var));
7218 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
7219 if (orig_var != var)
7220 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
7221 else if (TREE_CODE (v) == ADDR_EXPR)
7222 t = build_fold_addr_expr (t);
7223 else if (TREE_CODE (v) == INDIRECT_REF)
7224 t = build_fold_indirect_ref (t);
7225 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
7227 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
7228 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7229 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
7231 if (!integer_zerop (TREE_OPERAND (decl, 1)))
7232 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
7233 fold_convert (size_type_node,
7234 TREE_OPERAND (decl, 1)));
7236 else
7238 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
7239 if (!omp_is_reference (decl))
7240 t = build_fold_addr_expr (t);
7242 t = fold_convert (pointer_sized_int_node, t);
7243 seq = NULL;
7244 t = force_gimple_operand (t, &seq, true, NULL_TREE);
7245 gimple_seq_add_seq (start, seq);
7246 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7247 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
7248 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7249 t = unshare_expr (byte_position (field));
7250 t = fold_convert (pointer_sized_int_node, t);
7251 ctx->task_reduction_map->put (c, cnt);
7252 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
7253 ? t : NULL_TREE);
7254 seq = NULL;
7255 t = force_gimple_operand (t, &seq, true, NULL_TREE);
7256 gimple_seq_add_seq (start, seq);
7257 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7258 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
7259 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7261 tree bfield = DECL_CHAIN (field);
7262 tree cond;
7263 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
7264 /* In parallel or worksharing all threads unconditionally
7265 initialize all their task reduction private variables. */
7266 cond = boolean_true_node;
7267 else if (TREE_TYPE (ptr) == ptr_type_node)
7269 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
7270 unshare_expr (byte_position (bfield)));
7271 seq = NULL;
7272 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
7273 gimple_seq_add_seq (end, seq);
7274 tree pbool = build_pointer_type (TREE_TYPE (bfield));
7275 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
7276 build_int_cst (pbool, 0));
7278 else
7279 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
7280 build_simple_mem_ref (ptr), bfield, NULL_TREE);
7281 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
7282 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
7283 tree condv = create_tmp_var (boolean_type_node);
7284 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
7285 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
7286 lab3, lab4);
7287 gimple_seq_add_stmt (end, g);
7288 gimple_seq_add_stmt (end, gimple_build_label (lab3));
7289 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
7291 /* If this reduction doesn't need destruction and parallel
7292 has been cancelled, there is nothing to do for this
7293 reduction, so jump around the merge operation. */
7294 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7295 g = gimple_build_cond (NE_EXPR, cancellable,
7296 build_zero_cst (TREE_TYPE (cancellable)),
7297 lab4, lab5);
7298 gimple_seq_add_stmt (end, g);
7299 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7302 tree new_var;
7303 if (TREE_TYPE (ptr) == ptr_type_node)
7305 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
7306 unshare_expr (byte_position (field)));
7307 seq = NULL;
7308 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
7309 gimple_seq_add_seq (end, seq);
7310 tree pbool = build_pointer_type (TREE_TYPE (field));
7311 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
7312 build_int_cst (pbool, 0));
7314 else
7315 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
7316 build_simple_mem_ref (ptr), field, NULL_TREE);
7318 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7319 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
7320 ref = build_simple_mem_ref (ref);
7321 /* reduction(-:var) sums up the partial results, so it acts
7322 identically to reduction(+:var). */
7323 if (rcode == MINUS_EXPR)
7324 rcode = PLUS_EXPR;
7325 if (TREE_CODE (decl) == MEM_REF)
7327 tree type = TREE_TYPE (new_var);
7328 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7329 tree i = create_tmp_var (TREE_TYPE (v));
7330 tree ptype = build_pointer_type (TREE_TYPE (type));
7331 if (DECL_P (v))
7333 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7334 tree vv = create_tmp_var (TREE_TYPE (v));
7335 gimplify_assign (vv, v, start);
7336 v = vv;
7338 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7339 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
7340 new_var = build_fold_addr_expr (new_var);
7341 new_var = fold_convert (ptype, new_var);
7342 ref = fold_convert (ptype, ref);
7343 tree m = create_tmp_var (ptype);
7344 gimplify_assign (m, new_var, end);
7345 new_var = m;
7346 m = create_tmp_var (ptype);
7347 gimplify_assign (m, ref, end);
7348 ref = m;
7349 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
7350 tree body = create_artificial_label (UNKNOWN_LOCATION);
7351 tree endl = create_artificial_label (UNKNOWN_LOCATION);
7352 gimple_seq_add_stmt (end, gimple_build_label (body));
7353 tree priv = build_simple_mem_ref (new_var);
7354 tree out = build_simple_mem_ref (ref);
7355 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7357 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7358 tree decl_placeholder
7359 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7360 tree lab6 = NULL_TREE;
7361 if (cancellable)
7363 /* If this reduction needs destruction and parallel
7364 has been cancelled, jump around the merge operation
7365 to the destruction. */
7366 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7367 lab6 = create_artificial_label (UNKNOWN_LOCATION);
7368 tree zero = build_zero_cst (TREE_TYPE (cancellable));
7369 g = gimple_build_cond (NE_EXPR, cancellable, zero,
7370 lab6, lab5);
7371 gimple_seq_add_stmt (end, g);
7372 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7374 SET_DECL_VALUE_EXPR (placeholder, out);
7375 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7376 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7377 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7378 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7379 gimple_seq_add_seq (end,
7380 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7381 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7382 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
7384 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7385 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7387 if (cancellable)
7388 gimple_seq_add_stmt (end, gimple_build_label (lab6));
7389 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
7390 if (x)
7392 gimple_seq tseq = NULL;
7393 gimplify_stmt (&x, &tseq);
7394 gimple_seq_add_seq (end, tseq);
7397 else
7399 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
7400 out = unshare_expr (out);
7401 gimplify_assign (out, x, end);
7403 gimple *g
7404 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7405 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7406 gimple_seq_add_stmt (end, g);
7407 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7408 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7409 gimple_seq_add_stmt (end, g);
7410 g = gimple_build_assign (i, PLUS_EXPR, i,
7411 build_int_cst (TREE_TYPE (i), 1));
7412 gimple_seq_add_stmt (end, g);
7413 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
7414 gimple_seq_add_stmt (end, g);
7415 gimple_seq_add_stmt (end, gimple_build_label (endl));
7417 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7419 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7420 tree oldv = NULL_TREE;
7421 tree lab6 = NULL_TREE;
7422 if (cancellable)
7424 /* If this reduction needs destruction and parallel
7425 has been cancelled, jump around the merge operation
7426 to the destruction. */
7427 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7428 lab6 = create_artificial_label (UNKNOWN_LOCATION);
7429 tree zero = build_zero_cst (TREE_TYPE (cancellable));
7430 g = gimple_build_cond (NE_EXPR, cancellable, zero,
7431 lab6, lab5);
7432 gimple_seq_add_stmt (end, g);
7433 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7435 if (omp_is_reference (decl)
7436 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7437 TREE_TYPE (ref)))
7438 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
7439 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
7440 tree refv = create_tmp_var (TREE_TYPE (ref));
7441 gimplify_assign (refv, ref, end);
7442 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
7443 SET_DECL_VALUE_EXPR (placeholder, ref);
7444 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7445 tree d = maybe_lookup_decl (decl, ctx);
7446 gcc_assert (d);
7447 if (DECL_HAS_VALUE_EXPR_P (d))
7448 oldv = DECL_VALUE_EXPR (d);
7449 if (omp_is_reference (var))
7451 tree v = fold_convert (TREE_TYPE (d),
7452 build_fold_addr_expr (new_var));
7453 SET_DECL_VALUE_EXPR (d, v);
7455 else
7456 SET_DECL_VALUE_EXPR (d, new_var);
7457 DECL_HAS_VALUE_EXPR_P (d) = 1;
7458 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7459 if (oldv)
7460 SET_DECL_VALUE_EXPR (d, oldv);
7461 else
7463 SET_DECL_VALUE_EXPR (d, NULL_TREE);
7464 DECL_HAS_VALUE_EXPR_P (d) = 0;
7466 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7467 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7468 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
7469 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7470 if (cancellable)
7471 gimple_seq_add_stmt (end, gimple_build_label (lab6));
7472 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
7473 if (x)
7475 gimple_seq tseq = NULL;
7476 gimplify_stmt (&x, &tseq);
7477 gimple_seq_add_seq (end, tseq);
7480 else
7482 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
7483 ref = unshare_expr (ref);
7484 gimplify_assign (ref, x, end);
7486 gimple_seq_add_stmt (end, gimple_build_label (lab4));
7487 ++cnt;
7488 field = DECL_CHAIN (bfield);
7492 if (code == OMP_TASKGROUP)
7494 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
7495 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
7496 gimple_seq_add_stmt (start, g);
7498 else
7500 tree c;
7501 if (code == OMP_FOR)
7502 c = gimple_omp_for_clauses (ctx->stmt);
7503 else if (code == OMP_SECTIONS)
7504 c = gimple_omp_sections_clauses (ctx->stmt);
7505 else
7506 c = gimple_omp_taskreg_clauses (ctx->stmt);
7507 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
7508 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
7509 build_fold_addr_expr (avar));
7510 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
7513 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
7514 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
7515 size_one_node));
7516 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
7517 gimple_seq_add_stmt (end, g);
7518 gimple_seq_add_stmt (end, gimple_build_label (lab2));
7519 if (code == OMP_FOR || code == OMP_SECTIONS)
7521 enum built_in_function bfn
7522 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
7523 t = builtin_decl_explicit (bfn);
7524 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
7525 tree arg;
7526 if (cancellable)
7528 arg = create_tmp_var (c_bool_type);
7529 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
7530 cancellable));
7532 else
7533 arg = build_int_cst (c_bool_type, 0);
7534 g = gimple_build_call (t, 1, arg);
7536 else
7538 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
7539 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
7541 gimple_seq_add_stmt (end, g);
7542 t = build_constructor (atype, NULL);
7543 TREE_THIS_VOLATILE (t) = 1;
7544 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
7547 /* Expand code for an OpenMP taskgroup directive. */
7549 static void
7550 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7552 gimple *stmt = gsi_stmt (*gsi_p);
7553 gcall *x;
7554 gbind *bind;
7555 gimple_seq dseq = NULL;
7556 tree block = make_node (BLOCK);
7558 bind = gimple_build_bind (NULL, NULL, block);
7559 gsi_replace (gsi_p, bind, true);
7560 gimple_bind_add_stmt (bind, stmt);
7562 push_gimplify_context ();
7564 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
7566 gimple_bind_add_stmt (bind, x);
7568 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
7569 gimple_omp_taskgroup_clauses (stmt),
7570 gimple_bind_body_ptr (bind), &dseq);
7572 lower_omp (gimple_omp_body_ptr (stmt), ctx);
7573 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7574 gimple_omp_set_body (stmt, NULL);
7576 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
7577 gimple_bind_add_seq (bind, dseq);
7579 pop_gimplify_context (bind);
7581 gimple_bind_append_vars (bind, ctx->block_vars);
7582 BLOCK_VARS (block) = ctx->block_vars;
7586 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
7588 static void
7589 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
7590 omp_context *ctx)
7592 struct omp_for_data fd;
7593 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
7594 return;
7596 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
7597 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
7598 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
7599 if (!fd.ordered)
7600 return;
7602 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
7603 tree c = gimple_omp_ordered_clauses (ord_stmt);
7604 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7605 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
7607 /* Merge depend clauses from multiple adjacent
7608 #pragma omp ordered depend(sink:...) constructs
7609 into one #pragma omp ordered depend(sink:...), so that
7610 we can optimize them together. */
7611 gimple_stmt_iterator gsi = *gsi_p;
7612 gsi_next (&gsi);
7613 while (!gsi_end_p (gsi))
7615 gimple *stmt = gsi_stmt (gsi);
7616 if (is_gimple_debug (stmt)
7617 || gimple_code (stmt) == GIMPLE_NOP)
7619 gsi_next (&gsi);
7620 continue;
7622 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
7623 break;
7624 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
7625 c = gimple_omp_ordered_clauses (ord_stmt2);
7626 if (c == NULL_TREE
7627 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
7628 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
7629 break;
7630 while (*list_p)
7631 list_p = &OMP_CLAUSE_CHAIN (*list_p);
7632 *list_p = c;
7633 gsi_remove (&gsi, true);
7637 /* Canonicalize sink dependence clauses into one folded clause if
7638 possible.
7640 The basic algorithm is to create a sink vector whose first
7641 element is the GCD of all the first elements, and whose remaining
7642 elements are the minimum of the subsequent columns.
7644 We ignore dependence vectors whose first element is zero because
7645 such dependencies are known to be executed by the same thread.
7647 We take into account the direction of the loop, so a minimum
7648 becomes a maximum if the loop is iterating forwards. We also
7649 ignore sink clauses where the loop direction is unknown, or where
7650 the offsets are clearly invalid because they are not a multiple
7651 of the loop increment.
7653 For example:
7655 #pragma omp for ordered(2)
7656 for (i=0; i < N; ++i)
7657 for (j=0; j < M; ++j)
7659 #pragma omp ordered \
7660 depend(sink:i-8,j-2) \
7661 depend(sink:i,j-1) \ // Completely ignored because i+0.
7662 depend(sink:i-4,j-3) \
7663 depend(sink:i-6,j-4)
7664 #pragma omp ordered depend(source)
7667 Folded clause is:
7669 depend(sink:-gcd(8,4,6),-min(2,3,4))
7670 -or-
7671 depend(sink:-2,-2)
7674 /* FIXME: Computing GCD's where the first element is zero is
7675 non-trivial in the presence of collapsed loops. Do this later. */
7676 if (fd.collapse > 1)
7677 return;
7679 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
7681 /* wide_int is not a POD so it must be default-constructed. */
7682 for (unsigned i = 0; i != 2 * len - 1; ++i)
7683 new (static_cast<void*>(folded_deps + i)) wide_int ();
7685 tree folded_dep = NULL_TREE;
7686 /* TRUE if the first dimension's offset is negative. */
7687 bool neg_offset_p = false;
7689 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
7690 unsigned int i;
7691 while ((c = *list_p) != NULL)
7693 bool remove = false;
7695 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
7696 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
7697 goto next_ordered_clause;
7699 tree vec;
7700 for (vec = OMP_CLAUSE_DECL (c), i = 0;
7701 vec && TREE_CODE (vec) == TREE_LIST;
7702 vec = TREE_CHAIN (vec), ++i)
7704 gcc_assert (i < len);
7706 /* omp_extract_for_data has canonicalized the condition. */
7707 gcc_assert (fd.loops[i].cond_code == LT_EXPR
7708 || fd.loops[i].cond_code == GT_EXPR);
7709 bool forward = fd.loops[i].cond_code == LT_EXPR;
7710 bool maybe_lexically_later = true;
7712 /* While the committee makes up its mind, bail if we have any
7713 non-constant steps. */
7714 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
7715 goto lower_omp_ordered_ret;
7717 tree itype = TREE_TYPE (TREE_VALUE (vec));
7718 if (POINTER_TYPE_P (itype))
7719 itype = sizetype;
7720 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
7721 TYPE_PRECISION (itype),
7722 TYPE_SIGN (itype));
7724 /* Ignore invalid offsets that are not multiples of the step. */
7725 if (!wi::multiple_of_p (wi::abs (offset),
7726 wi::abs (wi::to_wide (fd.loops[i].step)),
7727 UNSIGNED))
7729 warning_at (OMP_CLAUSE_LOCATION (c), 0,
7730 "ignoring sink clause with offset that is not "
7731 "a multiple of the loop step");
7732 remove = true;
7733 goto next_ordered_clause;
7736 /* Calculate the first dimension. The first dimension of
7737 the folded dependency vector is the GCD of the first
7738 elements, while ignoring any first elements whose offset
7739 is 0. */
7740 if (i == 0)
7742 /* Ignore dependence vectors whose first dimension is 0. */
7743 if (offset == 0)
7745 remove = true;
7746 goto next_ordered_clause;
7748 else
7750 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
7752 error_at (OMP_CLAUSE_LOCATION (c),
7753 "first offset must be in opposite direction "
7754 "of loop iterations");
7755 goto lower_omp_ordered_ret;
7757 if (forward)
7758 offset = -offset;
7759 neg_offset_p = forward;
7760 /* Initialize the first time around. */
7761 if (folded_dep == NULL_TREE)
7763 folded_dep = c;
7764 folded_deps[0] = offset;
7766 else
7767 folded_deps[0] = wi::gcd (folded_deps[0],
7768 offset, UNSIGNED);
7771 /* Calculate minimum for the remaining dimensions. */
7772 else
7774 folded_deps[len + i - 1] = offset;
7775 if (folded_dep == c)
7776 folded_deps[i] = offset;
7777 else if (maybe_lexically_later
7778 && !wi::eq_p (folded_deps[i], offset))
7780 if (forward ^ wi::gts_p (folded_deps[i], offset))
7782 unsigned int j;
7783 folded_dep = c;
7784 for (j = 1; j <= i; j++)
7785 folded_deps[j] = folded_deps[len + j - 1];
7787 else
7788 maybe_lexically_later = false;
7792 gcc_assert (i == len);
7794 remove = true;
7796 next_ordered_clause:
7797 if (remove)
7798 *list_p = OMP_CLAUSE_CHAIN (c);
7799 else
7800 list_p = &OMP_CLAUSE_CHAIN (c);
7803 if (folded_dep)
7805 if (neg_offset_p)
7806 folded_deps[0] = -folded_deps[0];
7808 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
7809 if (POINTER_TYPE_P (itype))
7810 itype = sizetype;
7812 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
7813 = wide_int_to_tree (itype, folded_deps[0]);
7814 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
7815 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
7818 lower_omp_ordered_ret:
7820 /* Ordered without clauses is #pragma omp threads, while we want
7821 a nop instead if we remove all clauses. */
7822 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
7823 gsi_replace (gsi_p, gimple_build_nop (), true);
7827 /* Expand code for an OpenMP ordered directive. */
7829 static void
7830 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7832 tree block;
7833 gimple *stmt = gsi_stmt (*gsi_p), *g;
7834 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
7835 gcall *x;
7836 gbind *bind;
7837 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
7838 OMP_CLAUSE_SIMD);
7839 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
7840 loop. */
7841 bool maybe_simt
7842 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
7843 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
7844 OMP_CLAUSE_THREADS);
7846 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
7847 OMP_CLAUSE_DEPEND))
7849 /* FIXME: This is needs to be moved to the expansion to verify various
7850 conditions only testable on cfg with dominators computed, and also
7851 all the depend clauses to be merged still might need to be available
7852 for the runtime checks. */
7853 if (0)
7854 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
7855 return;
7858 push_gimplify_context ();
7860 block = make_node (BLOCK);
7861 bind = gimple_build_bind (NULL, NULL, block);
7862 gsi_replace (gsi_p, bind, true);
7863 gimple_bind_add_stmt (bind, stmt);
7865 if (simd)
7867 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
7868 build_int_cst (NULL_TREE, threads));
7869 cfun->has_simduid_loops = true;
7871 else
7872 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
7874 gimple_bind_add_stmt (bind, x);
7876 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
7877 if (maybe_simt)
7879 counter = create_tmp_var (integer_type_node);
7880 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
7881 gimple_call_set_lhs (g, counter);
7882 gimple_bind_add_stmt (bind, g);
7884 body = create_artificial_label (UNKNOWN_LOCATION);
7885 test = create_artificial_label (UNKNOWN_LOCATION);
7886 gimple_bind_add_stmt (bind, gimple_build_label (body));
7888 tree simt_pred = create_tmp_var (integer_type_node);
7889 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
7890 gimple_call_set_lhs (g, simt_pred);
7891 gimple_bind_add_stmt (bind, g);
7893 tree t = create_artificial_label (UNKNOWN_LOCATION);
7894 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
7895 gimple_bind_add_stmt (bind, g);
7897 gimple_bind_add_stmt (bind, gimple_build_label (t));
7899 lower_omp (gimple_omp_body_ptr (stmt), ctx);
7900 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
7901 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7902 gimple_omp_set_body (stmt, NULL);
7904 if (maybe_simt)
7906 gimple_bind_add_stmt (bind, gimple_build_label (test));
7907 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
7908 gimple_bind_add_stmt (bind, g);
7910 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
7911 tree nonneg = create_tmp_var (integer_type_node);
7912 gimple_seq tseq = NULL;
7913 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
7914 gimple_bind_add_seq (bind, tseq);
7916 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
7917 gimple_call_set_lhs (g, nonneg);
7918 gimple_bind_add_stmt (bind, g);
7920 tree end = create_artificial_label (UNKNOWN_LOCATION);
7921 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
7922 gimple_bind_add_stmt (bind, g);
7924 gimple_bind_add_stmt (bind, gimple_build_label (end));
7926 if (simd)
7927 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
7928 build_int_cst (NULL_TREE, threads));
7929 else
7930 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
7932 gimple_bind_add_stmt (bind, x);
7934 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
7936 pop_gimplify_context (bind);
7938 gimple_bind_append_vars (bind, ctx->block_vars);
7939 BLOCK_VARS (block) = gimple_bind_vars (bind);
7943 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
7944 substitution of a couple of function calls. But in the NAMED case,
7945 requires that languages coordinate a symbol name. It is therefore
7946 best put here in common code. */
7948 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
7950 static void
7951 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7953 tree block;
7954 tree name, lock, unlock;
7955 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
7956 gbind *bind;
7957 location_t loc = gimple_location (stmt);
7958 gimple_seq tbody;
7960 name = gimple_omp_critical_name (stmt);
7961 if (name)
7963 tree decl;
7965 if (!critical_name_mutexes)
7966 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
7968 tree *n = critical_name_mutexes->get (name);
7969 if (n == NULL)
7971 char *new_str;
7973 decl = create_tmp_var_raw (ptr_type_node);
7975 new_str = ACONCAT ((".gomp_critical_user_",
7976 IDENTIFIER_POINTER (name), NULL));
7977 DECL_NAME (decl) = get_identifier (new_str);
7978 TREE_PUBLIC (decl) = 1;
7979 TREE_STATIC (decl) = 1;
7980 DECL_COMMON (decl) = 1;
7981 DECL_ARTIFICIAL (decl) = 1;
7982 DECL_IGNORED_P (decl) = 1;
7984 varpool_node::finalize_decl (decl);
7986 critical_name_mutexes->put (name, decl);
7988 else
7989 decl = *n;
7991 /* If '#pragma omp critical' is inside offloaded region or
7992 inside function marked as offloadable, the symbol must be
7993 marked as offloadable too. */
7994 omp_context *octx;
7995 if (cgraph_node::get (current_function_decl)->offloadable)
7996 varpool_node::get_create (decl)->offloadable = 1;
7997 else
7998 for (octx = ctx->outer; octx; octx = octx->outer)
7999 if (is_gimple_omp_offloaded (octx->stmt))
8001 varpool_node::get_create (decl)->offloadable = 1;
8002 break;
8005 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
8006 lock = build_call_expr_loc (loc, lock, 1,
8007 build_fold_addr_expr_loc (loc, decl));
8009 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
8010 unlock = build_call_expr_loc (loc, unlock, 1,
8011 build_fold_addr_expr_loc (loc, decl));
8013 else
8015 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
8016 lock = build_call_expr_loc (loc, lock, 0);
8018 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
8019 unlock = build_call_expr_loc (loc, unlock, 0);
8022 push_gimplify_context ();
8024 block = make_node (BLOCK);
8025 bind = gimple_build_bind (NULL, NULL, block);
8026 gsi_replace (gsi_p, bind, true);
8027 gimple_bind_add_stmt (bind, stmt);
8029 tbody = gimple_bind_body (bind);
8030 gimplify_and_add (lock, &tbody);
8031 gimple_bind_set_body (bind, tbody);
8033 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8034 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8035 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8036 gimple_omp_set_body (stmt, NULL);
8038 tbody = gimple_bind_body (bind);
8039 gimplify_and_add (unlock, &tbody);
8040 gimple_bind_set_body (bind, tbody);
8042 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8044 pop_gimplify_context (bind);
8045 gimple_bind_append_vars (bind, ctx->block_vars);
8046 BLOCK_VARS (block) = gimple_bind_vars (bind);
8049 /* A subroutine of lower_omp_for. Generate code to emit the predicate
8050 for a lastprivate clause. Given a loop control predicate of (V
8051 cond N2), we gate the clause on (!(V cond N2)). The lowered form
8052 is appended to *DLIST, iterator initialization is appended to
8053 *BODY_P. */
8055 static void
8056 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
8057 gimple_seq *dlist, struct omp_context *ctx)
8059 tree clauses, cond, vinit;
8060 enum tree_code cond_code;
8061 gimple_seq stmts;
8063 cond_code = fd->loop.cond_code;
8064 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
8066 /* When possible, use a strict equality expression. This can let VRP
8067 type optimizations deduce the value and remove a copy. */
8068 if (tree_fits_shwi_p (fd->loop.step))
8070 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
8071 if (step == 1 || step == -1)
8072 cond_code = EQ_EXPR;
8075 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
8076 || gimple_omp_for_grid_phony (fd->for_stmt))
8077 cond = omp_grid_lastprivate_predicate (fd);
8078 else
8080 tree n2 = fd->loop.n2;
8081 if (fd->collapse > 1
8082 && TREE_CODE (n2) != INTEGER_CST
8083 && gimple_omp_for_combined_into_p (fd->for_stmt))
8085 struct omp_context *taskreg_ctx = NULL;
8086 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
8088 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
8089 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
8090 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
8092 if (gimple_omp_for_combined_into_p (gfor))
8094 gcc_assert (ctx->outer->outer
8095 && is_parallel_ctx (ctx->outer->outer));
8096 taskreg_ctx = ctx->outer->outer;
8098 else
8100 struct omp_for_data outer_fd;
8101 omp_extract_for_data (gfor, &outer_fd, NULL);
8102 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
8105 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
8106 taskreg_ctx = ctx->outer->outer;
8108 else if (is_taskreg_ctx (ctx->outer))
8109 taskreg_ctx = ctx->outer;
8110 if (taskreg_ctx)
8112 int i;
8113 tree taskreg_clauses
8114 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
8115 tree innerc = omp_find_clause (taskreg_clauses,
8116 OMP_CLAUSE__LOOPTEMP_);
8117 gcc_assert (innerc);
8118 for (i = 0; i < fd->collapse; i++)
8120 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
8121 OMP_CLAUSE__LOOPTEMP_);
8122 gcc_assert (innerc);
8124 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
8125 OMP_CLAUSE__LOOPTEMP_);
8126 if (innerc)
8127 n2 = fold_convert (TREE_TYPE (n2),
8128 lookup_decl (OMP_CLAUSE_DECL (innerc),
8129 taskreg_ctx));
8132 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
8135 clauses = gimple_omp_for_clauses (fd->for_stmt);
8136 stmts = NULL;
8137 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
8138 if (!gimple_seq_empty_p (stmts))
8140 gimple_seq_add_seq (&stmts, *dlist);
8141 *dlist = stmts;
8143 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
8144 vinit = fd->loop.n1;
8145 if (cond_code == EQ_EXPR
8146 && tree_fits_shwi_p (fd->loop.n2)
8147 && ! integer_zerop (fd->loop.n2))
8148 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
8149 else
8150 vinit = unshare_expr (vinit);
8152 /* Initialize the iterator variable, so that threads that don't execute
8153 any iterations don't execute the lastprivate clauses by accident. */
8154 gimplify_assign (fd->loop.v, vinit, body_p);
8159 /* Lower code for an OMP loop directive. */
8161 static void
8162 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8164 tree *rhs_p, block;
8165 struct omp_for_data fd, *fdp = NULL;
8166 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
8167 gbind *new_stmt;
8168 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
8169 gimple_seq cnt_list = NULL;
8170 gimple_seq oacc_head = NULL, oacc_tail = NULL;
8171 size_t i;
8173 push_gimplify_context ();
8175 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
8177 block = make_node (BLOCK);
8178 new_stmt = gimple_build_bind (NULL, NULL, block);
8179 /* Replace at gsi right away, so that 'stmt' is no member
8180 of a sequence anymore as we're going to add to a different
8181 one below. */
8182 gsi_replace (gsi_p, new_stmt, true);
8184 /* Move declaration of temporaries in the loop body before we make
8185 it go away. */
8186 omp_for_body = gimple_omp_body (stmt);
8187 if (!gimple_seq_empty_p (omp_for_body)
8188 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
8190 gbind *inner_bind
8191 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
8192 tree vars = gimple_bind_vars (inner_bind);
8193 gimple_bind_append_vars (new_stmt, vars);
8194 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
8195 keep them on the inner_bind and it's block. */
8196 gimple_bind_set_vars (inner_bind, NULL_TREE);
8197 if (gimple_bind_block (inner_bind))
8198 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
8201 if (gimple_omp_for_combined_into_p (stmt))
8203 omp_extract_for_data (stmt, &fd, NULL);
8204 fdp = &fd;
8206 /* We need two temporaries with fd.loop.v type (istart/iend)
8207 and then (fd.collapse - 1) temporaries with the same
8208 type for count2 ... countN-1 vars if not constant. */
8209 size_t count = 2;
8210 tree type = fd.iter_type;
8211 if (fd.collapse > 1
8212 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
8213 count += fd.collapse - 1;
8214 bool taskreg_for
8215 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
8216 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
8217 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
8218 tree simtc = NULL;
8219 tree clauses = *pc;
8220 if (taskreg_for)
8221 outerc
8222 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
8223 OMP_CLAUSE__LOOPTEMP_);
8224 if (ctx->simt_stmt)
8225 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
8226 OMP_CLAUSE__LOOPTEMP_);
8227 for (i = 0; i < count; i++)
8229 tree temp;
8230 if (taskreg_for)
8232 gcc_assert (outerc);
8233 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
8234 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
8235 OMP_CLAUSE__LOOPTEMP_);
8237 else
8239 /* If there are 2 adjacent SIMD stmts, one with _simt_
8240 clause, another without, make sure they have the same
8241 decls in _looptemp_ clauses, because the outer stmt
8242 they are combined into will look up just one inner_stmt. */
8243 if (ctx->simt_stmt)
8244 temp = OMP_CLAUSE_DECL (simtc);
8245 else
8246 temp = create_tmp_var (type);
8247 insert_decl_map (&ctx->outer->cb, temp, temp);
8249 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
8250 OMP_CLAUSE_DECL (*pc) = temp;
8251 pc = &OMP_CLAUSE_CHAIN (*pc);
8252 if (ctx->simt_stmt)
8253 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
8254 OMP_CLAUSE__LOOPTEMP_);
8256 *pc = clauses;
8259 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
8260 dlist = NULL;
8261 body = NULL;
8262 tree rclauses
8263 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
8264 OMP_CLAUSE_REDUCTION);
8265 tree rtmp = NULL_TREE;
8266 if (rclauses)
8268 tree type = build_pointer_type (pointer_sized_int_node);
8269 tree temp = create_tmp_var (type);
8270 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8271 OMP_CLAUSE_DECL (c) = temp;
8272 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
8273 gimple_omp_for_set_clauses (stmt, c);
8274 lower_omp_task_reductions (ctx, OMP_FOR,
8275 gimple_omp_for_clauses (stmt),
8276 &tred_ilist, &tred_dlist);
8277 rclauses = c;
8278 rtmp = make_ssa_name (type);
8279 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
8282 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
8283 fdp);
8284 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
8285 gimple_omp_for_pre_body (stmt));
8287 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8289 /* Lower the header expressions. At this point, we can assume that
8290 the header is of the form:
8292 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
8294 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
8295 using the .omp_data_s mapping, if needed. */
8296 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
8298 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
8299 if (!is_gimple_min_invariant (*rhs_p))
8300 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
8301 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
8302 recompute_tree_invariant_for_addr_expr (*rhs_p);
8304 rhs_p = gimple_omp_for_final_ptr (stmt, i);
8305 if (!is_gimple_min_invariant (*rhs_p))
8306 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
8307 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
8308 recompute_tree_invariant_for_addr_expr (*rhs_p);
8310 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
8311 if (!is_gimple_min_invariant (*rhs_p))
8312 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
8314 if (rclauses)
8315 gimple_seq_add_seq (&tred_ilist, cnt_list);
8316 else
8317 gimple_seq_add_seq (&body, cnt_list);
8319 /* Once lowered, extract the bounds and clauses. */
8320 omp_extract_for_data (stmt, &fd, NULL);
8322 if (is_gimple_omp_oacc (ctx->stmt)
8323 && !ctx_in_oacc_kernels_region (ctx))
8324 lower_oacc_head_tail (gimple_location (stmt),
8325 gimple_omp_for_clauses (stmt),
8326 &oacc_head, &oacc_tail, ctx);
8328 /* Add OpenACC partitioning and reduction markers just before the loop. */
8329 if (oacc_head)
8330 gimple_seq_add_seq (&body, oacc_head);
8332 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
8334 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
8335 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
8336 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8337 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8339 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
8340 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
8341 OMP_CLAUSE_LINEAR_STEP (c)
8342 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
8343 ctx);
8346 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
8347 && gimple_omp_for_grid_phony (stmt));
8348 if (!phony_loop)
8349 gimple_seq_add_stmt (&body, stmt);
8350 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
8352 if (!phony_loop)
8353 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
8354 fd.loop.v));
8356 /* After the loop, add exit clauses. */
8357 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
8359 if (ctx->cancellable)
8360 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
8362 gimple_seq_add_seq (&body, dlist);
8364 if (rclauses)
8366 gimple_seq_add_seq (&tred_ilist, body);
8367 body = tred_ilist;
8370 body = maybe_catch_exception (body);
8372 if (!phony_loop)
8374 /* Region exit marker goes at the end of the loop body. */
8375 gimple *g = gimple_build_omp_return (fd.have_nowait);
8376 gimple_seq_add_stmt (&body, g);
8378 gimple_seq_add_seq (&body, tred_dlist);
8380 maybe_add_implicit_barrier_cancel (ctx, g, &body);
8382 if (rclauses)
8383 OMP_CLAUSE_DECL (rclauses) = rtmp;
8386 /* Add OpenACC joining and reduction markers just after the loop. */
8387 if (oacc_tail)
8388 gimple_seq_add_seq (&body, oacc_tail);
8390 pop_gimplify_context (new_stmt);
8392 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8393 maybe_remove_omp_member_access_dummy_vars (new_stmt);
8394 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
8395 if (BLOCK_VARS (block))
8396 TREE_USED (block) = 1;
8398 gimple_bind_set_body (new_stmt, body);
8399 gimple_omp_set_body (stmt, NULL);
8400 gimple_omp_for_set_pre_body (stmt, NULL);
8403 /* Callback for walk_stmts. Check if the current statement only contains
8404 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
8406 static tree
8407 check_combined_parallel (gimple_stmt_iterator *gsi_p,
8408 bool *handled_ops_p,
8409 struct walk_stmt_info *wi)
8411 int *info = (int *) wi->info;
8412 gimple *stmt = gsi_stmt (*gsi_p);
8414 *handled_ops_p = true;
8415 switch (gimple_code (stmt))
8417 WALK_SUBSTMTS;
8419 case GIMPLE_DEBUG:
8420 break;
8421 case GIMPLE_OMP_FOR:
8422 case GIMPLE_OMP_SECTIONS:
8423 *info = *info == 0 ? 1 : -1;
8424 break;
8425 default:
8426 *info = -1;
8427 break;
8429 return NULL;
8432 struct omp_taskcopy_context
8434 /* This field must be at the beginning, as we do "inheritance": Some
8435 callback functions for tree-inline.c (e.g., omp_copy_decl)
8436 receive a copy_body_data pointer that is up-casted to an
8437 omp_context pointer. */
8438 copy_body_data cb;
8439 omp_context *ctx;
8442 static tree
8443 task_copyfn_copy_decl (tree var, copy_body_data *cb)
8445 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
8447 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
8448 return create_tmp_var (TREE_TYPE (var));
8450 return var;
8453 static tree
8454 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
8456 tree name, new_fields = NULL, type, f;
8458 type = lang_hooks.types.make_type (RECORD_TYPE);
8459 name = DECL_NAME (TYPE_NAME (orig_type));
8460 name = build_decl (gimple_location (tcctx->ctx->stmt),
8461 TYPE_DECL, name, type);
8462 TYPE_NAME (type) = name;
8464 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
8466 tree new_f = copy_node (f);
8467 DECL_CONTEXT (new_f) = type;
8468 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
8469 TREE_CHAIN (new_f) = new_fields;
8470 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
8471 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
8472 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
8473 &tcctx->cb, NULL);
8474 new_fields = new_f;
8475 tcctx->cb.decl_map->put (f, new_f);
8477 TYPE_FIELDS (type) = nreverse (new_fields);
8478 layout_type (type);
8479 return type;
8482 /* Create task copyfn. */
8484 static void
8485 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
8487 struct function *child_cfun;
8488 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
8489 tree record_type, srecord_type, bind, list;
8490 bool record_needs_remap = false, srecord_needs_remap = false;
8491 splay_tree_node n;
8492 struct omp_taskcopy_context tcctx;
8493 location_t loc = gimple_location (task_stmt);
8494 size_t looptempno = 0;
8496 child_fn = gimple_omp_task_copy_fn (task_stmt);
8497 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
8498 gcc_assert (child_cfun->cfg == NULL);
8499 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
8501 /* Reset DECL_CONTEXT on function arguments. */
8502 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
8503 DECL_CONTEXT (t) = child_fn;
8505 /* Populate the function. */
8506 push_gimplify_context ();
8507 push_cfun (child_cfun);
8509 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
8510 TREE_SIDE_EFFECTS (bind) = 1;
8511 list = NULL;
8512 DECL_SAVED_TREE (child_fn) = bind;
8513 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
8515 /* Remap src and dst argument types if needed. */
8516 record_type = ctx->record_type;
8517 srecord_type = ctx->srecord_type;
8518 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
8519 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
8521 record_needs_remap = true;
8522 break;
8524 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
8525 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
8527 srecord_needs_remap = true;
8528 break;
8531 if (record_needs_remap || srecord_needs_remap)
8533 memset (&tcctx, '\0', sizeof (tcctx));
8534 tcctx.cb.src_fn = ctx->cb.src_fn;
8535 tcctx.cb.dst_fn = child_fn;
8536 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
8537 gcc_checking_assert (tcctx.cb.src_node);
8538 tcctx.cb.dst_node = tcctx.cb.src_node;
8539 tcctx.cb.src_cfun = ctx->cb.src_cfun;
8540 tcctx.cb.copy_decl = task_copyfn_copy_decl;
8541 tcctx.cb.eh_lp_nr = 0;
8542 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
8543 tcctx.cb.decl_map = new hash_map<tree, tree>;
8544 tcctx.ctx = ctx;
8546 if (record_needs_remap)
8547 record_type = task_copyfn_remap_type (&tcctx, record_type);
8548 if (srecord_needs_remap)
8549 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
8551 else
8552 tcctx.cb.decl_map = NULL;
8554 arg = DECL_ARGUMENTS (child_fn);
8555 TREE_TYPE (arg) = build_pointer_type (record_type);
8556 sarg = DECL_CHAIN (arg);
8557 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
8559 /* First pass: initialize temporaries used in record_type and srecord_type
8560 sizes and field offsets. */
8561 if (tcctx.cb.decl_map)
8562 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8563 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
8565 tree *p;
8567 decl = OMP_CLAUSE_DECL (c);
8568 p = tcctx.cb.decl_map->get (decl);
8569 if (p == NULL)
8570 continue;
8571 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
8572 sf = (tree) n->value;
8573 sf = *tcctx.cb.decl_map->get (sf);
8574 src = build_simple_mem_ref_loc (loc, sarg);
8575 src = omp_build_component_ref (src, sf);
8576 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
8577 append_to_statement_list (t, &list);
8580 /* Second pass: copy shared var pointers and copy construct non-VLA
8581 firstprivate vars. */
8582 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8583 switch (OMP_CLAUSE_CODE (c))
8585 splay_tree_key key;
8586 case OMP_CLAUSE_SHARED:
8587 decl = OMP_CLAUSE_DECL (c);
8588 key = (splay_tree_key) decl;
8589 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8590 key = (splay_tree_key) &DECL_UID (decl);
8591 n = splay_tree_lookup (ctx->field_map, key);
8592 if (n == NULL)
8593 break;
8594 f = (tree) n->value;
8595 if (tcctx.cb.decl_map)
8596 f = *tcctx.cb.decl_map->get (f);
8597 n = splay_tree_lookup (ctx->sfield_map, key);
8598 sf = (tree) n->value;
8599 if (tcctx.cb.decl_map)
8600 sf = *tcctx.cb.decl_map->get (sf);
8601 src = build_simple_mem_ref_loc (loc, sarg);
8602 src = omp_build_component_ref (src, sf);
8603 dst = build_simple_mem_ref_loc (loc, arg);
8604 dst = omp_build_component_ref (dst, f);
8605 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8606 append_to_statement_list (t, &list);
8607 break;
8608 case OMP_CLAUSE_REDUCTION:
8609 case OMP_CLAUSE_IN_REDUCTION:
8610 decl = OMP_CLAUSE_DECL (c);
8611 if (TREE_CODE (decl) == MEM_REF)
8613 decl = TREE_OPERAND (decl, 0);
8614 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
8615 decl = TREE_OPERAND (decl, 0);
8616 if (TREE_CODE (decl) == INDIRECT_REF
8617 || TREE_CODE (decl) == ADDR_EXPR)
8618 decl = TREE_OPERAND (decl, 0);
8620 key = (splay_tree_key) decl;
8621 n = splay_tree_lookup (ctx->field_map, key);
8622 if (n == NULL)
8623 break;
8624 f = (tree) n->value;
8625 if (tcctx.cb.decl_map)
8626 f = *tcctx.cb.decl_map->get (f);
8627 n = splay_tree_lookup (ctx->sfield_map, key);
8628 sf = (tree) n->value;
8629 if (tcctx.cb.decl_map)
8630 sf = *tcctx.cb.decl_map->get (sf);
8631 src = build_simple_mem_ref_loc (loc, sarg);
8632 src = omp_build_component_ref (src, sf);
8633 if (decl != OMP_CLAUSE_DECL (c)
8634 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8635 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
8636 src = build_simple_mem_ref_loc (loc, src);
8637 dst = build_simple_mem_ref_loc (loc, arg);
8638 dst = omp_build_component_ref (dst, f);
8639 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8640 append_to_statement_list (t, &list);
8641 break;
8642 case OMP_CLAUSE__LOOPTEMP_:
8643 /* Fields for first two _looptemp_ clauses are initialized by
8644 GOMP_taskloop*, the rest are handled like firstprivate. */
8645 if (looptempno < 2)
8647 looptempno++;
8648 break;
8650 /* FALLTHRU */
8651 case OMP_CLAUSE__REDUCTEMP_:
8652 case OMP_CLAUSE_FIRSTPRIVATE:
8653 decl = OMP_CLAUSE_DECL (c);
8654 if (is_variable_sized (decl))
8655 break;
8656 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
8657 if (n == NULL)
8658 break;
8659 f = (tree) n->value;
8660 if (tcctx.cb.decl_map)
8661 f = *tcctx.cb.decl_map->get (f);
8662 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
8663 if (n != NULL)
8665 sf = (tree) n->value;
8666 if (tcctx.cb.decl_map)
8667 sf = *tcctx.cb.decl_map->get (sf);
8668 src = build_simple_mem_ref_loc (loc, sarg);
8669 src = omp_build_component_ref (src, sf);
8670 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
8671 src = build_simple_mem_ref_loc (loc, src);
8673 else
8674 src = decl;
8675 dst = build_simple_mem_ref_loc (loc, arg);
8676 dst = omp_build_component_ref (dst, f);
8677 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
8678 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8679 else
8680 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
8681 append_to_statement_list (t, &list);
8682 break;
8683 case OMP_CLAUSE_PRIVATE:
8684 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
8685 break;
8686 decl = OMP_CLAUSE_DECL (c);
8687 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
8688 f = (tree) n->value;
8689 if (tcctx.cb.decl_map)
8690 f = *tcctx.cb.decl_map->get (f);
8691 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
8692 if (n != NULL)
8694 sf = (tree) n->value;
8695 if (tcctx.cb.decl_map)
8696 sf = *tcctx.cb.decl_map->get (sf);
8697 src = build_simple_mem_ref_loc (loc, sarg);
8698 src = omp_build_component_ref (src, sf);
8699 if (use_pointer_for_field (decl, NULL))
8700 src = build_simple_mem_ref_loc (loc, src);
8702 else
8703 src = decl;
8704 dst = build_simple_mem_ref_loc (loc, arg);
8705 dst = omp_build_component_ref (dst, f);
8706 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8707 append_to_statement_list (t, &list);
8708 break;
8709 default:
8710 break;
8713 /* Last pass: handle VLA firstprivates. */
8714 if (tcctx.cb.decl_map)
8715 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8716 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
8718 tree ind, ptr, df;
8720 decl = OMP_CLAUSE_DECL (c);
8721 if (!is_variable_sized (decl))
8722 continue;
8723 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
8724 if (n == NULL)
8725 continue;
8726 f = (tree) n->value;
8727 f = *tcctx.cb.decl_map->get (f);
8728 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
8729 ind = DECL_VALUE_EXPR (decl);
8730 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
8731 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
8732 n = splay_tree_lookup (ctx->sfield_map,
8733 (splay_tree_key) TREE_OPERAND (ind, 0));
8734 sf = (tree) n->value;
8735 sf = *tcctx.cb.decl_map->get (sf);
8736 src = build_simple_mem_ref_loc (loc, sarg);
8737 src = omp_build_component_ref (src, sf);
8738 src = build_simple_mem_ref_loc (loc, src);
8739 dst = build_simple_mem_ref_loc (loc, arg);
8740 dst = omp_build_component_ref (dst, f);
8741 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
8742 append_to_statement_list (t, &list);
8743 n = splay_tree_lookup (ctx->field_map,
8744 (splay_tree_key) TREE_OPERAND (ind, 0));
8745 df = (tree) n->value;
8746 df = *tcctx.cb.decl_map->get (df);
8747 ptr = build_simple_mem_ref_loc (loc, arg);
8748 ptr = omp_build_component_ref (ptr, df);
8749 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
8750 build_fold_addr_expr_loc (loc, dst));
8751 append_to_statement_list (t, &list);
8754 t = build1 (RETURN_EXPR, void_type_node, NULL);
8755 append_to_statement_list (t, &list);
8757 if (tcctx.cb.decl_map)
8758 delete tcctx.cb.decl_map;
8759 pop_gimplify_context (NULL);
8760 BIND_EXPR_BODY (bind) = list;
8761 pop_cfun ();
8764 static void
8765 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
8767 tree c, clauses;
8768 gimple *g;
8769 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
8771 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
8772 gcc_assert (clauses);
8773 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8774 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8775 switch (OMP_CLAUSE_DEPEND_KIND (c))
8777 case OMP_CLAUSE_DEPEND_LAST:
8778 /* Lowering already done at gimplification. */
8779 return;
8780 case OMP_CLAUSE_DEPEND_IN:
8781 cnt[2]++;
8782 break;
8783 case OMP_CLAUSE_DEPEND_OUT:
8784 case OMP_CLAUSE_DEPEND_INOUT:
8785 cnt[0]++;
8786 break;
8787 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8788 cnt[1]++;
8789 break;
8790 case OMP_CLAUSE_DEPEND_DEPOBJ:
8791 cnt[3]++;
8792 break;
8793 case OMP_CLAUSE_DEPEND_SOURCE:
8794 case OMP_CLAUSE_DEPEND_SINK:
8795 /* FALLTHRU */
8796 default:
8797 gcc_unreachable ();
8799 if (cnt[1] || cnt[3])
8800 idx = 5;
8801 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
8802 tree type = build_array_type_nelts (ptr_type_node, total + idx);
8803 tree array = create_tmp_var (type);
8804 TREE_ADDRESSABLE (array) = 1;
8805 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
8806 NULL_TREE);
8807 if (idx == 5)
8809 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
8810 gimple_seq_add_stmt (iseq, g);
8811 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
8812 NULL_TREE);
8814 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
8815 gimple_seq_add_stmt (iseq, g);
8816 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
8818 r = build4 (ARRAY_REF, ptr_type_node, array,
8819 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
8820 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
8821 gimple_seq_add_stmt (iseq, g);
8823 for (i = 0; i < 4; i++)
8825 if (cnt[i] == 0)
8826 continue;
8827 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8828 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
8829 continue;
8830 else
8832 switch (OMP_CLAUSE_DEPEND_KIND (c))
8834 case OMP_CLAUSE_DEPEND_IN:
8835 if (i != 2)
8836 continue;
8837 break;
8838 case OMP_CLAUSE_DEPEND_OUT:
8839 case OMP_CLAUSE_DEPEND_INOUT:
8840 if (i != 0)
8841 continue;
8842 break;
8843 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8844 if (i != 1)
8845 continue;
8846 break;
8847 case OMP_CLAUSE_DEPEND_DEPOBJ:
8848 if (i != 3)
8849 continue;
8850 break;
8851 default:
8852 gcc_unreachable ();
8854 tree t = OMP_CLAUSE_DECL (c);
8855 t = fold_convert (ptr_type_node, t);
8856 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
8857 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
8858 NULL_TREE, NULL_TREE);
8859 g = gimple_build_assign (r, t);
8860 gimple_seq_add_stmt (iseq, g);
8863 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8864 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8865 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8866 OMP_CLAUSE_CHAIN (c) = *pclauses;
8867 *pclauses = c;
8868 tree clobber = build_constructor (type, NULL);
8869 TREE_THIS_VOLATILE (clobber) = 1;
8870 g = gimple_build_assign (array, clobber);
8871 gimple_seq_add_stmt (oseq, g);
8874 /* Lower the OpenMP parallel or task directive in the current statement
8875 in GSI_P. CTX holds context information for the directive. */
8877 static void
8878 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8880 tree clauses;
8881 tree child_fn, t;
8882 gimple *stmt = gsi_stmt (*gsi_p);
8883 gbind *par_bind, *bind, *dep_bind = NULL;
8884 gimple_seq par_body;
8885 location_t loc = gimple_location (stmt);
8887 clauses = gimple_omp_taskreg_clauses (stmt);
8888 if (gimple_code (stmt) == GIMPLE_OMP_TASK
8889 && gimple_omp_task_taskwait_p (stmt))
8891 par_bind = NULL;
8892 par_body = NULL;
8894 else
8896 par_bind
8897 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
8898 par_body = gimple_bind_body (par_bind);
8900 child_fn = ctx->cb.dst_fn;
8901 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
8902 && !gimple_omp_parallel_combined_p (stmt))
8904 struct walk_stmt_info wi;
8905 int ws_num = 0;
8907 memset (&wi, 0, sizeof (wi));
8908 wi.info = &ws_num;
8909 wi.val_only = true;
8910 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
8911 if (ws_num == 1)
8912 gimple_omp_parallel_set_combined_p (stmt, true);
8914 gimple_seq dep_ilist = NULL;
8915 gimple_seq dep_olist = NULL;
8916 if (gimple_code (stmt) == GIMPLE_OMP_TASK
8917 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
8919 push_gimplify_context ();
8920 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
8921 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
8922 &dep_ilist, &dep_olist);
8925 if (gimple_code (stmt) == GIMPLE_OMP_TASK
8926 && gimple_omp_task_taskwait_p (stmt))
8928 if (dep_bind)
8930 gsi_replace (gsi_p, dep_bind, true);
8931 gimple_bind_add_seq (dep_bind, dep_ilist);
8932 gimple_bind_add_stmt (dep_bind, stmt);
8933 gimple_bind_add_seq (dep_bind, dep_olist);
8934 pop_gimplify_context (dep_bind);
8936 return;
8939 if (ctx->srecord_type)
8940 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
8942 gimple_seq tskred_ilist = NULL;
8943 gimple_seq tskred_olist = NULL;
8944 if ((is_task_ctx (ctx)
8945 && gimple_omp_task_taskloop_p (ctx->stmt)
8946 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
8947 OMP_CLAUSE_REDUCTION))
8948 || (is_parallel_ctx (ctx)
8949 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
8950 OMP_CLAUSE__REDUCTEMP_)))
8952 if (dep_bind == NULL)
8954 push_gimplify_context ();
8955 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
8957 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
8958 : OMP_PARALLEL,
8959 gimple_omp_taskreg_clauses (ctx->stmt),
8960 &tskred_ilist, &tskred_olist);
8963 push_gimplify_context ();
8965 gimple_seq par_olist = NULL;
8966 gimple_seq par_ilist = NULL;
8967 gimple_seq par_rlist = NULL;
8968 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
8969 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
8970 if (phony_construct && ctx->record_type)
8972 gcc_checking_assert (!ctx->receiver_decl);
8973 ctx->receiver_decl = create_tmp_var
8974 (build_reference_type (ctx->record_type), ".omp_rec");
8976 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
8977 lower_omp (&par_body, ctx);
8978 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
8979 lower_reduction_clauses (clauses, &par_rlist, ctx);
8981 /* Declare all the variables created by mapping and the variables
8982 declared in the scope of the parallel body. */
8983 record_vars_into (ctx->block_vars, child_fn);
8984 maybe_remove_omp_member_access_dummy_vars (par_bind);
8985 record_vars_into (gimple_bind_vars (par_bind), child_fn);
8987 if (ctx->record_type)
8989 ctx->sender_decl
8990 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
8991 : ctx->record_type, ".omp_data_o");
8992 DECL_NAMELESS (ctx->sender_decl) = 1;
8993 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
8994 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
8997 gimple_seq olist = NULL;
8998 gimple_seq ilist = NULL;
8999 lower_send_clauses (clauses, &ilist, &olist, ctx);
9000 lower_send_shared_vars (&ilist, &olist, ctx);
9002 if (ctx->record_type)
9004 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
9005 TREE_THIS_VOLATILE (clobber) = 1;
9006 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
9007 clobber));
9010 /* Once all the expansions are done, sequence all the different
9011 fragments inside gimple_omp_body. */
9013 gimple_seq new_body = NULL;
9015 if (ctx->record_type)
9017 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
9018 /* fixup_child_record_type might have changed receiver_decl's type. */
9019 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
9020 gimple_seq_add_stmt (&new_body,
9021 gimple_build_assign (ctx->receiver_decl, t));
9024 gimple_seq_add_seq (&new_body, par_ilist);
9025 gimple_seq_add_seq (&new_body, par_body);
9026 gimple_seq_add_seq (&new_body, par_rlist);
9027 if (ctx->cancellable)
9028 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
9029 gimple_seq_add_seq (&new_body, par_olist);
9030 new_body = maybe_catch_exception (new_body);
9031 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
9032 gimple_seq_add_stmt (&new_body,
9033 gimple_build_omp_continue (integer_zero_node,
9034 integer_zero_node));
9035 if (!phony_construct)
9037 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
9038 gimple_omp_set_body (stmt, new_body);
9041 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
9042 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9043 else
9044 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
9045 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
9046 gimple_bind_add_seq (bind, ilist);
9047 if (!phony_construct)
9048 gimple_bind_add_stmt (bind, stmt);
9049 else
9050 gimple_bind_add_seq (bind, new_body);
9051 gimple_bind_add_seq (bind, olist);
9053 pop_gimplify_context (NULL);
9055 if (dep_bind)
9057 gimple_bind_add_seq (dep_bind, dep_ilist);
9058 gimple_bind_add_seq (dep_bind, tskred_ilist);
9059 gimple_bind_add_stmt (dep_bind, bind);
9060 gimple_bind_add_seq (dep_bind, tskred_olist);
9061 gimple_bind_add_seq (dep_bind, dep_olist);
9062 pop_gimplify_context (dep_bind);
9066 /* Lower the GIMPLE_OMP_TARGET in the current statement
9067 in GSI_P. CTX holds context information for the directive. */
9069 static void
9070 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9072 tree clauses;
9073 tree child_fn, t, c;
9074 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
9075 gbind *tgt_bind, *bind, *dep_bind = NULL;
9076 gimple_seq tgt_body, olist, ilist, fplist, new_body;
9077 location_t loc = gimple_location (stmt);
9078 bool offloaded, data_region;
9079 unsigned int map_cnt = 0;
9081 offloaded = is_gimple_omp_offloaded (stmt);
9082 switch (gimple_omp_target_kind (stmt))
9084 case GF_OMP_TARGET_KIND_REGION:
9085 case GF_OMP_TARGET_KIND_UPDATE:
9086 case GF_OMP_TARGET_KIND_ENTER_DATA:
9087 case GF_OMP_TARGET_KIND_EXIT_DATA:
9088 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
9089 case GF_OMP_TARGET_KIND_OACC_KERNELS:
9090 case GF_OMP_TARGET_KIND_OACC_UPDATE:
9091 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
9092 case GF_OMP_TARGET_KIND_OACC_DECLARE:
9093 data_region = false;
9094 break;
9095 case GF_OMP_TARGET_KIND_DATA:
9096 case GF_OMP_TARGET_KIND_OACC_DATA:
9097 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
9098 data_region = true;
9099 break;
9100 default:
9101 gcc_unreachable ();
9104 clauses = gimple_omp_target_clauses (stmt);
9106 gimple_seq dep_ilist = NULL;
9107 gimple_seq dep_olist = NULL;
9108 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
9110 push_gimplify_context ();
9111 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9112 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
9113 &dep_ilist, &dep_olist);
9116 tgt_bind = NULL;
9117 tgt_body = NULL;
9118 if (offloaded)
9120 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
9121 tgt_body = gimple_bind_body (tgt_bind);
9123 else if (data_region)
9124 tgt_body = gimple_omp_body (stmt);
9125 child_fn = ctx->cb.dst_fn;
9127 push_gimplify_context ();
9128 fplist = NULL;
9130 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9131 switch (OMP_CLAUSE_CODE (c))
9133 tree var, x;
9135 default:
9136 break;
9137 case OMP_CLAUSE_MAP:
9138 #if CHECKING_P
9139 /* First check what we're prepared to handle in the following. */
9140 switch (OMP_CLAUSE_MAP_KIND (c))
9142 case GOMP_MAP_ALLOC:
9143 case GOMP_MAP_TO:
9144 case GOMP_MAP_FROM:
9145 case GOMP_MAP_TOFROM:
9146 case GOMP_MAP_POINTER:
9147 case GOMP_MAP_TO_PSET:
9148 case GOMP_MAP_DELETE:
9149 case GOMP_MAP_RELEASE:
9150 case GOMP_MAP_ALWAYS_TO:
9151 case GOMP_MAP_ALWAYS_FROM:
9152 case GOMP_MAP_ALWAYS_TOFROM:
9153 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9154 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9155 case GOMP_MAP_STRUCT:
9156 case GOMP_MAP_ALWAYS_POINTER:
9157 break;
9158 case GOMP_MAP_FORCE_ALLOC:
9159 case GOMP_MAP_FORCE_TO:
9160 case GOMP_MAP_FORCE_FROM:
9161 case GOMP_MAP_FORCE_TOFROM:
9162 case GOMP_MAP_FORCE_PRESENT:
9163 case GOMP_MAP_FORCE_DEVICEPTR:
9164 case GOMP_MAP_DEVICE_RESIDENT:
9165 case GOMP_MAP_LINK:
9166 gcc_assert (is_gimple_omp_oacc (stmt));
9167 break;
9168 default:
9169 gcc_unreachable ();
9171 #endif
9172 /* FALLTHRU */
9173 case OMP_CLAUSE_TO:
9174 case OMP_CLAUSE_FROM:
9175 oacc_firstprivate:
9176 var = OMP_CLAUSE_DECL (c);
9177 if (!DECL_P (var))
9179 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
9180 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9181 && (OMP_CLAUSE_MAP_KIND (c)
9182 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
9183 map_cnt++;
9184 continue;
9187 if (DECL_SIZE (var)
9188 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
9190 tree var2 = DECL_VALUE_EXPR (var);
9191 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
9192 var2 = TREE_OPERAND (var2, 0);
9193 gcc_assert (DECL_P (var2));
9194 var = var2;
9197 if (offloaded
9198 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9199 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9200 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9202 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9204 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
9205 && varpool_node::get_create (var)->offloadable)
9206 continue;
9208 tree type = build_pointer_type (TREE_TYPE (var));
9209 tree new_var = lookup_decl (var, ctx);
9210 x = create_tmp_var_raw (type, get_name (new_var));
9211 gimple_add_tmp_var (x);
9212 x = build_simple_mem_ref (x);
9213 SET_DECL_VALUE_EXPR (new_var, x);
9214 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9216 continue;
9219 if (!maybe_lookup_field (var, ctx))
9220 continue;
9222 /* Don't remap oacc parallel reduction variables, because the
9223 intermediate result must be local to each gang. */
9224 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9225 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
9227 x = build_receiver_ref (var, true, ctx);
9228 tree new_var = lookup_decl (var, ctx);
9230 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9231 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
9232 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9233 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9234 x = build_simple_mem_ref (x);
9235 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9237 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
9238 if (omp_is_reference (new_var)
9239 && TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE)
9241 /* Create a local object to hold the instance
9242 value. */
9243 tree type = TREE_TYPE (TREE_TYPE (new_var));
9244 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
9245 tree inst = create_tmp_var (type, id);
9246 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
9247 x = build_fold_addr_expr (inst);
9249 gimplify_assign (new_var, x, &fplist);
9251 else if (DECL_P (new_var))
9253 SET_DECL_VALUE_EXPR (new_var, x);
9254 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9256 else
9257 gcc_unreachable ();
9259 map_cnt++;
9260 break;
9262 case OMP_CLAUSE_FIRSTPRIVATE:
9263 if (is_oacc_parallel (ctx))
9264 goto oacc_firstprivate;
9265 map_cnt++;
9266 var = OMP_CLAUSE_DECL (c);
9267 if (!omp_is_reference (var)
9268 && !is_gimple_reg_type (TREE_TYPE (var)))
9270 tree new_var = lookup_decl (var, ctx);
9271 if (is_variable_sized (var))
9273 tree pvar = DECL_VALUE_EXPR (var);
9274 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9275 pvar = TREE_OPERAND (pvar, 0);
9276 gcc_assert (DECL_P (pvar));
9277 tree new_pvar = lookup_decl (pvar, ctx);
9278 x = build_fold_indirect_ref (new_pvar);
9279 TREE_THIS_NOTRAP (x) = 1;
9281 else
9282 x = build_receiver_ref (var, true, ctx);
9283 SET_DECL_VALUE_EXPR (new_var, x);
9284 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9286 break;
9288 case OMP_CLAUSE_PRIVATE:
9289 if (is_gimple_omp_oacc (ctx->stmt))
9290 break;
9291 var = OMP_CLAUSE_DECL (c);
9292 if (is_variable_sized (var))
9294 tree new_var = lookup_decl (var, ctx);
9295 tree pvar = DECL_VALUE_EXPR (var);
9296 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9297 pvar = TREE_OPERAND (pvar, 0);
9298 gcc_assert (DECL_P (pvar));
9299 tree new_pvar = lookup_decl (pvar, ctx);
9300 x = build_fold_indirect_ref (new_pvar);
9301 TREE_THIS_NOTRAP (x) = 1;
9302 SET_DECL_VALUE_EXPR (new_var, x);
9303 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9305 break;
9307 case OMP_CLAUSE_USE_DEVICE_PTR:
9308 case OMP_CLAUSE_IS_DEVICE_PTR:
9309 var = OMP_CLAUSE_DECL (c);
9310 map_cnt++;
9311 if (is_variable_sized (var))
9313 tree new_var = lookup_decl (var, ctx);
9314 tree pvar = DECL_VALUE_EXPR (var);
9315 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9316 pvar = TREE_OPERAND (pvar, 0);
9317 gcc_assert (DECL_P (pvar));
9318 tree new_pvar = lookup_decl (pvar, ctx);
9319 x = build_fold_indirect_ref (new_pvar);
9320 TREE_THIS_NOTRAP (x) = 1;
9321 SET_DECL_VALUE_EXPR (new_var, x);
9322 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9324 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9326 tree new_var = lookup_decl (var, ctx);
9327 tree type = build_pointer_type (TREE_TYPE (var));
9328 x = create_tmp_var_raw (type, get_name (new_var));
9329 gimple_add_tmp_var (x);
9330 x = build_simple_mem_ref (x);
9331 SET_DECL_VALUE_EXPR (new_var, x);
9332 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9334 else
9336 tree new_var = lookup_decl (var, ctx);
9337 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
9338 gimple_add_tmp_var (x);
9339 SET_DECL_VALUE_EXPR (new_var, x);
9340 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9342 break;
9345 if (offloaded)
9347 target_nesting_level++;
9348 lower_omp (&tgt_body, ctx);
9349 target_nesting_level--;
9351 else if (data_region)
9352 lower_omp (&tgt_body, ctx);
9354 if (offloaded)
9356 /* Declare all the variables created by mapping and the variables
9357 declared in the scope of the target body. */
9358 record_vars_into (ctx->block_vars, child_fn);
9359 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
9360 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
9363 olist = NULL;
9364 ilist = NULL;
9365 if (ctx->record_type)
9367 ctx->sender_decl
9368 = create_tmp_var (ctx->record_type, ".omp_data_arr");
9369 DECL_NAMELESS (ctx->sender_decl) = 1;
9370 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
9371 t = make_tree_vec (3);
9372 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
9373 TREE_VEC_ELT (t, 1)
9374 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
9375 ".omp_data_sizes");
9376 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
9377 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
9378 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
9379 tree tkind_type = short_unsigned_type_node;
9380 int talign_shift = 8;
9381 TREE_VEC_ELT (t, 2)
9382 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
9383 ".omp_data_kinds");
9384 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
9385 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
9386 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
9387 gimple_omp_target_set_data_arg (stmt, t);
9389 vec<constructor_elt, va_gc> *vsize;
9390 vec<constructor_elt, va_gc> *vkind;
9391 vec_alloc (vsize, map_cnt);
9392 vec_alloc (vkind, map_cnt);
9393 unsigned int map_idx = 0;
9395 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9396 switch (OMP_CLAUSE_CODE (c))
9398 tree ovar, nc, s, purpose, var, x, type;
9399 unsigned int talign;
9401 default:
9402 break;
9404 case OMP_CLAUSE_MAP:
9405 case OMP_CLAUSE_TO:
9406 case OMP_CLAUSE_FROM:
9407 oacc_firstprivate_map:
9408 nc = c;
9409 ovar = OMP_CLAUSE_DECL (c);
9410 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9411 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9412 || (OMP_CLAUSE_MAP_KIND (c)
9413 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
9414 break;
9415 if (!DECL_P (ovar))
9417 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9418 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
9420 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
9421 == get_base_address (ovar));
9422 nc = OMP_CLAUSE_CHAIN (c);
9423 ovar = OMP_CLAUSE_DECL (nc);
9425 else
9427 tree x = build_sender_ref (ovar, ctx);
9428 tree v
9429 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
9430 gimplify_assign (x, v, &ilist);
9431 nc = NULL_TREE;
9434 else
9436 if (DECL_SIZE (ovar)
9437 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
9439 tree ovar2 = DECL_VALUE_EXPR (ovar);
9440 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
9441 ovar2 = TREE_OPERAND (ovar2, 0);
9442 gcc_assert (DECL_P (ovar2));
9443 ovar = ovar2;
9445 if (!maybe_lookup_field (ovar, ctx))
9446 continue;
9449 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
9450 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
9451 talign = DECL_ALIGN_UNIT (ovar);
9452 if (nc)
9454 var = lookup_decl_in_outer_ctx (ovar, ctx);
9455 x = build_sender_ref (ovar, ctx);
9457 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9458 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
9459 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9460 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
9462 gcc_assert (offloaded);
9463 tree avar
9464 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
9465 mark_addressable (avar);
9466 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
9467 talign = DECL_ALIGN_UNIT (avar);
9468 avar = build_fold_addr_expr (avar);
9469 gimplify_assign (x, avar, &ilist);
9471 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9473 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
9474 if (!omp_is_reference (var))
9476 if (is_gimple_reg (var)
9477 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
9478 TREE_NO_WARNING (var) = 1;
9479 var = build_fold_addr_expr (var);
9481 else
9482 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
9483 gimplify_assign (x, var, &ilist);
9485 else if (is_gimple_reg (var))
9487 gcc_assert (offloaded);
9488 tree avar = create_tmp_var (TREE_TYPE (var));
9489 mark_addressable (avar);
9490 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
9491 if (GOMP_MAP_COPY_TO_P (map_kind)
9492 || map_kind == GOMP_MAP_POINTER
9493 || map_kind == GOMP_MAP_TO_PSET
9494 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
9496 /* If we need to initialize a temporary
9497 with VAR because it is not addressable, and
9498 the variable hasn't been initialized yet, then
9499 we'll get a warning for the store to avar.
9500 Don't warn in that case, the mapping might
9501 be implicit. */
9502 TREE_NO_WARNING (var) = 1;
9503 gimplify_assign (avar, var, &ilist);
9505 avar = build_fold_addr_expr (avar);
9506 gimplify_assign (x, avar, &ilist);
9507 if ((GOMP_MAP_COPY_FROM_P (map_kind)
9508 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
9509 && !TYPE_READONLY (TREE_TYPE (var)))
9511 x = unshare_expr (x);
9512 x = build_simple_mem_ref (x);
9513 gimplify_assign (var, x, &olist);
9516 else
9518 var = build_fold_addr_expr (var);
9519 gimplify_assign (x, var, &ilist);
9522 s = NULL_TREE;
9523 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9525 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
9526 s = TREE_TYPE (ovar);
9527 if (TREE_CODE (s) == REFERENCE_TYPE)
9528 s = TREE_TYPE (s);
9529 s = TYPE_SIZE_UNIT (s);
9531 else
9532 s = OMP_CLAUSE_SIZE (c);
9533 if (s == NULL_TREE)
9534 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
9535 s = fold_convert (size_type_node, s);
9536 purpose = size_int (map_idx++);
9537 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
9538 if (TREE_CODE (s) != INTEGER_CST)
9539 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
9541 unsigned HOST_WIDE_INT tkind, tkind_zero;
9542 switch (OMP_CLAUSE_CODE (c))
9544 case OMP_CLAUSE_MAP:
9545 tkind = OMP_CLAUSE_MAP_KIND (c);
9546 tkind_zero = tkind;
9547 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
9548 switch (tkind)
9550 case GOMP_MAP_ALLOC:
9551 case GOMP_MAP_TO:
9552 case GOMP_MAP_FROM:
9553 case GOMP_MAP_TOFROM:
9554 case GOMP_MAP_ALWAYS_TO:
9555 case GOMP_MAP_ALWAYS_FROM:
9556 case GOMP_MAP_ALWAYS_TOFROM:
9557 case GOMP_MAP_RELEASE:
9558 case GOMP_MAP_FORCE_TO:
9559 case GOMP_MAP_FORCE_FROM:
9560 case GOMP_MAP_FORCE_TOFROM:
9561 case GOMP_MAP_FORCE_PRESENT:
9562 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
9563 break;
9564 case GOMP_MAP_DELETE:
9565 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
9566 default:
9567 break;
9569 if (tkind_zero != tkind)
9571 if (integer_zerop (s))
9572 tkind = tkind_zero;
9573 else if (integer_nonzerop (s))
9574 tkind_zero = tkind;
9576 break;
9577 case OMP_CLAUSE_FIRSTPRIVATE:
9578 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
9579 tkind = GOMP_MAP_TO;
9580 tkind_zero = tkind;
9581 break;
9582 case OMP_CLAUSE_TO:
9583 tkind = GOMP_MAP_TO;
9584 tkind_zero = tkind;
9585 break;
9586 case OMP_CLAUSE_FROM:
9587 tkind = GOMP_MAP_FROM;
9588 tkind_zero = tkind;
9589 break;
9590 default:
9591 gcc_unreachable ();
9593 gcc_checking_assert (tkind
9594 < (HOST_WIDE_INT_C (1U) << talign_shift));
9595 gcc_checking_assert (tkind_zero
9596 < (HOST_WIDE_INT_C (1U) << talign_shift));
9597 talign = ceil_log2 (talign);
9598 tkind |= talign << talign_shift;
9599 tkind_zero |= talign << talign_shift;
9600 gcc_checking_assert (tkind
9601 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
9602 gcc_checking_assert (tkind_zero
9603 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
9604 if (tkind == tkind_zero)
9605 x = build_int_cstu (tkind_type, tkind);
9606 else
9608 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
9609 x = build3 (COND_EXPR, tkind_type,
9610 fold_build2 (EQ_EXPR, boolean_type_node,
9611 unshare_expr (s), size_zero_node),
9612 build_int_cstu (tkind_type, tkind_zero),
9613 build_int_cstu (tkind_type, tkind));
9615 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
9616 if (nc && nc != c)
9617 c = nc;
9618 break;
9620 case OMP_CLAUSE_FIRSTPRIVATE:
9621 if (is_oacc_parallel (ctx))
9622 goto oacc_firstprivate_map;
9623 ovar = OMP_CLAUSE_DECL (c);
9624 if (omp_is_reference (ovar))
9625 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
9626 else
9627 talign = DECL_ALIGN_UNIT (ovar);
9628 var = lookup_decl_in_outer_ctx (ovar, ctx);
9629 x = build_sender_ref (ovar, ctx);
9630 tkind = GOMP_MAP_FIRSTPRIVATE;
9631 type = TREE_TYPE (ovar);
9632 if (omp_is_reference (ovar))
9633 type = TREE_TYPE (type);
9634 if ((INTEGRAL_TYPE_P (type)
9635 && TYPE_PRECISION (type) <= POINTER_SIZE)
9636 || TREE_CODE (type) == POINTER_TYPE)
9638 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
9639 tree t = var;
9640 if (omp_is_reference (var))
9641 t = build_simple_mem_ref (var);
9642 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
9643 TREE_NO_WARNING (var) = 1;
9644 if (TREE_CODE (type) != POINTER_TYPE)
9645 t = fold_convert (pointer_sized_int_node, t);
9646 t = fold_convert (TREE_TYPE (x), t);
9647 gimplify_assign (x, t, &ilist);
9649 else if (omp_is_reference (var))
9650 gimplify_assign (x, var, &ilist);
9651 else if (is_gimple_reg (var))
9653 tree avar = create_tmp_var (TREE_TYPE (var));
9654 mark_addressable (avar);
9655 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
9656 TREE_NO_WARNING (var) = 1;
9657 gimplify_assign (avar, var, &ilist);
9658 avar = build_fold_addr_expr (avar);
9659 gimplify_assign (x, avar, &ilist);
9661 else
9663 var = build_fold_addr_expr (var);
9664 gimplify_assign (x, var, &ilist);
9666 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
9667 s = size_int (0);
9668 else if (omp_is_reference (ovar))
9669 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
9670 else
9671 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
9672 s = fold_convert (size_type_node, s);
9673 purpose = size_int (map_idx++);
9674 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
9675 if (TREE_CODE (s) != INTEGER_CST)
9676 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
9678 gcc_checking_assert (tkind
9679 < (HOST_WIDE_INT_C (1U) << talign_shift));
9680 talign = ceil_log2 (talign);
9681 tkind |= talign << talign_shift;
9682 gcc_checking_assert (tkind
9683 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
9684 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
9685 build_int_cstu (tkind_type, tkind));
9686 break;
9688 case OMP_CLAUSE_USE_DEVICE_PTR:
9689 case OMP_CLAUSE_IS_DEVICE_PTR:
9690 ovar = OMP_CLAUSE_DECL (c);
9691 var = lookup_decl_in_outer_ctx (ovar, ctx);
9692 x = build_sender_ref (ovar, ctx);
9693 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
9694 tkind = GOMP_MAP_USE_DEVICE_PTR;
9695 else
9696 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
9697 type = TREE_TYPE (ovar);
9698 if (TREE_CODE (type) == ARRAY_TYPE)
9699 var = build_fold_addr_expr (var);
9700 else
9702 if (omp_is_reference (ovar))
9704 type = TREE_TYPE (type);
9705 if (TREE_CODE (type) != ARRAY_TYPE)
9706 var = build_simple_mem_ref (var);
9707 var = fold_convert (TREE_TYPE (x), var);
9710 gimplify_assign (x, var, &ilist);
9711 s = size_int (0);
9712 purpose = size_int (map_idx++);
9713 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
9714 gcc_checking_assert (tkind
9715 < (HOST_WIDE_INT_C (1U) << talign_shift));
9716 gcc_checking_assert (tkind
9717 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
9718 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
9719 build_int_cstu (tkind_type, tkind));
9720 break;
9723 gcc_assert (map_idx == map_cnt);
9725 DECL_INITIAL (TREE_VEC_ELT (t, 1))
9726 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
9727 DECL_INITIAL (TREE_VEC_ELT (t, 2))
9728 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
9729 for (int i = 1; i <= 2; i++)
9730 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
9732 gimple_seq initlist = NULL;
9733 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
9734 TREE_VEC_ELT (t, i)),
9735 &initlist, true, NULL_TREE);
9736 gimple_seq_add_seq (&ilist, initlist);
9738 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
9739 NULL);
9740 TREE_THIS_VOLATILE (clobber) = 1;
9741 gimple_seq_add_stmt (&olist,
9742 gimple_build_assign (TREE_VEC_ELT (t, i),
9743 clobber));
9746 tree clobber = build_constructor (ctx->record_type, NULL);
9747 TREE_THIS_VOLATILE (clobber) = 1;
9748 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
9749 clobber));
9752 /* Once all the expansions are done, sequence all the different
9753 fragments inside gimple_omp_body. */
9755 new_body = NULL;
9757 if (offloaded
9758 && ctx->record_type)
9760 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
9761 /* fixup_child_record_type might have changed receiver_decl's type. */
9762 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
9763 gimple_seq_add_stmt (&new_body,
9764 gimple_build_assign (ctx->receiver_decl, t));
9766 gimple_seq_add_seq (&new_body, fplist);
9768 if (offloaded || data_region)
9770 tree prev = NULL_TREE;
9771 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9772 switch (OMP_CLAUSE_CODE (c))
9774 tree var, x;
9775 default:
9776 break;
9777 case OMP_CLAUSE_FIRSTPRIVATE:
9778 if (is_gimple_omp_oacc (ctx->stmt))
9779 break;
9780 var = OMP_CLAUSE_DECL (c);
9781 if (omp_is_reference (var)
9782 || is_gimple_reg_type (TREE_TYPE (var)))
9784 tree new_var = lookup_decl (var, ctx);
9785 tree type;
9786 type = TREE_TYPE (var);
9787 if (omp_is_reference (var))
9788 type = TREE_TYPE (type);
9789 if ((INTEGRAL_TYPE_P (type)
9790 && TYPE_PRECISION (type) <= POINTER_SIZE)
9791 || TREE_CODE (type) == POINTER_TYPE)
9793 x = build_receiver_ref (var, false, ctx);
9794 if (TREE_CODE (type) != POINTER_TYPE)
9795 x = fold_convert (pointer_sized_int_node, x);
9796 x = fold_convert (type, x);
9797 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
9798 fb_rvalue);
9799 if (omp_is_reference (var))
9801 tree v = create_tmp_var_raw (type, get_name (var));
9802 gimple_add_tmp_var (v);
9803 TREE_ADDRESSABLE (v) = 1;
9804 gimple_seq_add_stmt (&new_body,
9805 gimple_build_assign (v, x));
9806 x = build_fold_addr_expr (v);
9808 gimple_seq_add_stmt (&new_body,
9809 gimple_build_assign (new_var, x));
9811 else
9813 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
9814 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
9815 fb_rvalue);
9816 gimple_seq_add_stmt (&new_body,
9817 gimple_build_assign (new_var, x));
9820 else if (is_variable_sized (var))
9822 tree pvar = DECL_VALUE_EXPR (var);
9823 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9824 pvar = TREE_OPERAND (pvar, 0);
9825 gcc_assert (DECL_P (pvar));
9826 tree new_var = lookup_decl (pvar, ctx);
9827 x = build_receiver_ref (var, false, ctx);
9828 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9829 gimple_seq_add_stmt (&new_body,
9830 gimple_build_assign (new_var, x));
9832 break;
9833 case OMP_CLAUSE_PRIVATE:
9834 if (is_gimple_omp_oacc (ctx->stmt))
9835 break;
9836 var = OMP_CLAUSE_DECL (c);
9837 if (omp_is_reference (var))
9839 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9840 tree new_var = lookup_decl (var, ctx);
9841 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
9842 if (TREE_CONSTANT (x))
9844 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
9845 get_name (var));
9846 gimple_add_tmp_var (x);
9847 TREE_ADDRESSABLE (x) = 1;
9848 x = build_fold_addr_expr_loc (clause_loc, x);
9850 else
9851 break;
9853 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
9854 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9855 gimple_seq_add_stmt (&new_body,
9856 gimple_build_assign (new_var, x));
9858 break;
9859 case OMP_CLAUSE_USE_DEVICE_PTR:
9860 case OMP_CLAUSE_IS_DEVICE_PTR:
9861 var = OMP_CLAUSE_DECL (c);
9862 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
9863 x = build_sender_ref (var, ctx);
9864 else
9865 x = build_receiver_ref (var, false, ctx);
9866 if (is_variable_sized (var))
9868 tree pvar = DECL_VALUE_EXPR (var);
9869 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9870 pvar = TREE_OPERAND (pvar, 0);
9871 gcc_assert (DECL_P (pvar));
9872 tree new_var = lookup_decl (pvar, ctx);
9873 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9874 gimple_seq_add_stmt (&new_body,
9875 gimple_build_assign (new_var, x));
9877 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9879 tree new_var = lookup_decl (var, ctx);
9880 new_var = DECL_VALUE_EXPR (new_var);
9881 gcc_assert (TREE_CODE (new_var) == MEM_REF);
9882 new_var = TREE_OPERAND (new_var, 0);
9883 gcc_assert (DECL_P (new_var));
9884 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9885 gimple_seq_add_stmt (&new_body,
9886 gimple_build_assign (new_var, x));
9888 else
9890 tree type = TREE_TYPE (var);
9891 tree new_var = lookup_decl (var, ctx);
9892 if (omp_is_reference (var))
9894 type = TREE_TYPE (type);
9895 if (TREE_CODE (type) != ARRAY_TYPE)
9897 tree v = create_tmp_var_raw (type, get_name (var));
9898 gimple_add_tmp_var (v);
9899 TREE_ADDRESSABLE (v) = 1;
9900 x = fold_convert (type, x);
9901 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
9902 fb_rvalue);
9903 gimple_seq_add_stmt (&new_body,
9904 gimple_build_assign (v, x));
9905 x = build_fold_addr_expr (v);
9908 new_var = DECL_VALUE_EXPR (new_var);
9909 x = fold_convert (TREE_TYPE (new_var), x);
9910 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9911 gimple_seq_add_stmt (&new_body,
9912 gimple_build_assign (new_var, x));
9914 break;
9916 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
9917 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
9918 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
9919 or references to VLAs. */
9920 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
9921 switch (OMP_CLAUSE_CODE (c))
9923 tree var;
9924 default:
9925 break;
9926 case OMP_CLAUSE_MAP:
9927 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9928 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9930 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9931 poly_int64 offset = 0;
9932 gcc_assert (prev);
9933 var = OMP_CLAUSE_DECL (c);
9934 if (DECL_P (var)
9935 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
9936 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
9937 ctx))
9938 && varpool_node::get_create (var)->offloadable)
9939 break;
9940 if (TREE_CODE (var) == INDIRECT_REF
9941 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
9942 var = TREE_OPERAND (var, 0);
9943 if (TREE_CODE (var) == COMPONENT_REF)
9945 var = get_addr_base_and_unit_offset (var, &offset);
9946 gcc_assert (var != NULL_TREE && DECL_P (var));
9948 else if (DECL_SIZE (var)
9949 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
9951 tree var2 = DECL_VALUE_EXPR (var);
9952 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
9953 var2 = TREE_OPERAND (var2, 0);
9954 gcc_assert (DECL_P (var2));
9955 var = var2;
9957 tree new_var = lookup_decl (var, ctx), x;
9958 tree type = TREE_TYPE (new_var);
9959 bool is_ref;
9960 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
9961 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9962 == COMPONENT_REF))
9964 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
9965 is_ref = true;
9966 new_var = build2 (MEM_REF, type,
9967 build_fold_addr_expr (new_var),
9968 build_int_cst (build_pointer_type (type),
9969 offset));
9971 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
9973 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
9974 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
9975 new_var = build2 (MEM_REF, type,
9976 build_fold_addr_expr (new_var),
9977 build_int_cst (build_pointer_type (type),
9978 offset));
9980 else
9981 is_ref = omp_is_reference (var);
9982 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9983 is_ref = false;
9984 bool ref_to_array = false;
9985 if (is_ref)
9987 type = TREE_TYPE (type);
9988 if (TREE_CODE (type) == ARRAY_TYPE)
9990 type = build_pointer_type (type);
9991 ref_to_array = true;
9994 else if (TREE_CODE (type) == ARRAY_TYPE)
9996 tree decl2 = DECL_VALUE_EXPR (new_var);
9997 gcc_assert (TREE_CODE (decl2) == MEM_REF);
9998 decl2 = TREE_OPERAND (decl2, 0);
9999 gcc_assert (DECL_P (decl2));
10000 new_var = decl2;
10001 type = TREE_TYPE (new_var);
10003 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
10004 x = fold_convert_loc (clause_loc, type, x);
10005 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
10007 tree bias = OMP_CLAUSE_SIZE (c);
10008 if (DECL_P (bias))
10009 bias = lookup_decl (bias, ctx);
10010 bias = fold_convert_loc (clause_loc, sizetype, bias);
10011 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
10012 bias);
10013 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
10014 TREE_TYPE (x), x, bias);
10016 if (ref_to_array)
10017 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
10018 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10019 if (is_ref && !ref_to_array)
10021 tree t = create_tmp_var_raw (type, get_name (var));
10022 gimple_add_tmp_var (t);
10023 TREE_ADDRESSABLE (t) = 1;
10024 gimple_seq_add_stmt (&new_body,
10025 gimple_build_assign (t, x));
10026 x = build_fold_addr_expr_loc (clause_loc, t);
10028 gimple_seq_add_stmt (&new_body,
10029 gimple_build_assign (new_var, x));
10030 prev = NULL_TREE;
10032 else if (OMP_CLAUSE_CHAIN (c)
10033 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
10034 == OMP_CLAUSE_MAP
10035 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
10036 == GOMP_MAP_FIRSTPRIVATE_POINTER
10037 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
10038 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
10039 prev = c;
10040 break;
10041 case OMP_CLAUSE_PRIVATE:
10042 var = OMP_CLAUSE_DECL (c);
10043 if (is_variable_sized (var))
10045 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10046 tree new_var = lookup_decl (var, ctx);
10047 tree pvar = DECL_VALUE_EXPR (var);
10048 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10049 pvar = TREE_OPERAND (pvar, 0);
10050 gcc_assert (DECL_P (pvar));
10051 tree new_pvar = lookup_decl (pvar, ctx);
10052 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10053 tree al = size_int (DECL_ALIGN (var));
10054 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
10055 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
10056 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
10057 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10058 gimple_seq_add_stmt (&new_body,
10059 gimple_build_assign (new_pvar, x));
10061 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
10063 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10064 tree new_var = lookup_decl (var, ctx);
10065 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
10066 if (TREE_CONSTANT (x))
10067 break;
10068 else
10070 tree atmp
10071 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10072 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
10073 tree al = size_int (TYPE_ALIGN (rtype));
10074 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
10077 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
10078 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10079 gimple_seq_add_stmt (&new_body,
10080 gimple_build_assign (new_var, x));
10082 break;
10085 gimple_seq fork_seq = NULL;
10086 gimple_seq join_seq = NULL;
10088 if (is_oacc_parallel (ctx))
10090 /* If there are reductions on the offloaded region itself, treat
10091 them as a dummy GANG loop. */
10092 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
10094 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
10095 false, NULL, NULL, &fork_seq, &join_seq, ctx);
10098 gimple_seq_add_seq (&new_body, fork_seq);
10099 gimple_seq_add_seq (&new_body, tgt_body);
10100 gimple_seq_add_seq (&new_body, join_seq);
10102 if (offloaded)
10103 new_body = maybe_catch_exception (new_body);
10105 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
10106 gimple_omp_set_body (stmt, new_body);
10109 bind = gimple_build_bind (NULL, NULL,
10110 tgt_bind ? gimple_bind_block (tgt_bind)
10111 : NULL_TREE);
10112 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
10113 gimple_bind_add_seq (bind, ilist);
10114 gimple_bind_add_stmt (bind, stmt);
10115 gimple_bind_add_seq (bind, olist);
10117 pop_gimplify_context (NULL);
10119 if (dep_bind)
10121 gimple_bind_add_seq (dep_bind, dep_ilist);
10122 gimple_bind_add_stmt (dep_bind, bind);
10123 gimple_bind_add_seq (dep_bind, dep_olist);
10124 pop_gimplify_context (dep_bind);
10128 /* Expand code for an OpenMP teams directive. */
10130 static void
10131 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10133 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
10134 push_gimplify_context ();
10136 tree block = make_node (BLOCK);
10137 gbind *bind = gimple_build_bind (NULL, NULL, block);
10138 gsi_replace (gsi_p, bind, true);
10139 gimple_seq bind_body = NULL;
10140 gimple_seq dlist = NULL;
10141 gimple_seq olist = NULL;
10143 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
10144 OMP_CLAUSE_NUM_TEAMS);
10145 if (num_teams == NULL_TREE)
10146 num_teams = build_int_cst (unsigned_type_node, 0);
10147 else
10149 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
10150 num_teams = fold_convert (unsigned_type_node, num_teams);
10151 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
10153 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
10154 OMP_CLAUSE_THREAD_LIMIT);
10155 if (thread_limit == NULL_TREE)
10156 thread_limit = build_int_cst (unsigned_type_node, 0);
10157 else
10159 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
10160 thread_limit = fold_convert (unsigned_type_node, thread_limit);
10161 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
10162 fb_rvalue);
10165 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
10166 &bind_body, &dlist, ctx, NULL);
10167 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
10168 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
10169 if (!gimple_omp_teams_grid_phony (teams_stmt))
10171 gimple_seq_add_stmt (&bind_body, teams_stmt);
10172 location_t loc = gimple_location (teams_stmt);
10173 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
10174 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
10175 gimple_set_location (call, loc);
10176 gimple_seq_add_stmt (&bind_body, call);
10179 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
10180 gimple_omp_set_body (teams_stmt, NULL);
10181 gimple_seq_add_seq (&bind_body, olist);
10182 gimple_seq_add_seq (&bind_body, dlist);
10183 if (!gimple_omp_teams_grid_phony (teams_stmt))
10184 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
10185 gimple_bind_set_body (bind, bind_body);
10187 pop_gimplify_context (bind);
10189 gimple_bind_append_vars (bind, ctx->block_vars);
10190 BLOCK_VARS (block) = ctx->block_vars;
10191 if (BLOCK_VARS (block))
10192 TREE_USED (block) = 1;
10195 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
10197 static void
10198 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10200 gimple *stmt = gsi_stmt (*gsi_p);
10201 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10202 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
10203 gimple_build_omp_return (false));
10207 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
10208 regimplified. If DATA is non-NULL, lower_omp_1 is outside
10209 of OMP context, but with task_shared_vars set. */
10211 static tree
10212 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
10213 void *data)
10215 tree t = *tp;
10217 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
10218 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
10219 return t;
10221 if (task_shared_vars
10222 && DECL_P (t)
10223 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
10224 return t;
10226 /* If a global variable has been privatized, TREE_CONSTANT on
10227 ADDR_EXPR might be wrong. */
10228 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
10229 recompute_tree_invariant_for_addr_expr (t);
10231 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
10232 return NULL_TREE;
10235 /* Data to be communicated between lower_omp_regimplify_operands and
10236 lower_omp_regimplify_operands_p. */
10238 struct lower_omp_regimplify_operands_data
10240 omp_context *ctx;
10241 vec<tree> *decls;
10244 /* Helper function for lower_omp_regimplify_operands. Find
10245 omp_member_access_dummy_var vars and adjust temporarily their
10246 DECL_VALUE_EXPRs if needed. */
10248 static tree
10249 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
10250 void *data)
10252 tree t = omp_member_access_dummy_var (*tp);
10253 if (t)
10255 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
10256 lower_omp_regimplify_operands_data *ldata
10257 = (lower_omp_regimplify_operands_data *) wi->info;
10258 tree o = maybe_lookup_decl (t, ldata->ctx);
10259 if (o != t)
10261 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
10262 ldata->decls->safe_push (*tp);
10263 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
10264 SET_DECL_VALUE_EXPR (*tp, v);
10267 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
10268 return NULL_TREE;
10271 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
10272 of omp_member_access_dummy_var vars during regimplification. */
10274 static void
10275 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
10276 gimple_stmt_iterator *gsi_p)
10278 auto_vec<tree, 10> decls;
10279 if (ctx)
10281 struct walk_stmt_info wi;
10282 memset (&wi, '\0', sizeof (wi));
10283 struct lower_omp_regimplify_operands_data data;
10284 data.ctx = ctx;
10285 data.decls = &decls;
10286 wi.info = &data;
10287 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
10289 gimple_regimplify_operands (stmt, gsi_p);
10290 while (!decls.is_empty ())
10292 tree t = decls.pop ();
10293 tree v = decls.pop ();
10294 SET_DECL_VALUE_EXPR (t, v);
10298 static void
10299 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10301 gimple *stmt = gsi_stmt (*gsi_p);
10302 struct walk_stmt_info wi;
10303 gcall *call_stmt;
10305 if (gimple_has_location (stmt))
10306 input_location = gimple_location (stmt);
10308 if (task_shared_vars)
10309 memset (&wi, '\0', sizeof (wi));
10311 /* If we have issued syntax errors, avoid doing any heavy lifting.
10312 Just replace the OMP directives with a NOP to avoid
10313 confusing RTL expansion. */
10314 if (seen_error () && is_gimple_omp (stmt))
10316 gsi_replace (gsi_p, gimple_build_nop (), true);
10317 return;
10320 switch (gimple_code (stmt))
10322 case GIMPLE_COND:
10324 gcond *cond_stmt = as_a <gcond *> (stmt);
10325 if ((ctx || task_shared_vars)
10326 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
10327 lower_omp_regimplify_p,
10328 ctx ? NULL : &wi, NULL)
10329 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
10330 lower_omp_regimplify_p,
10331 ctx ? NULL : &wi, NULL)))
10332 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
10334 break;
10335 case GIMPLE_CATCH:
10336 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
10337 break;
10338 case GIMPLE_EH_FILTER:
10339 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
10340 break;
10341 case GIMPLE_TRY:
10342 lower_omp (gimple_try_eval_ptr (stmt), ctx);
10343 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
10344 break;
10345 case GIMPLE_TRANSACTION:
10346 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
10347 ctx);
10348 break;
10349 case GIMPLE_BIND:
10350 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
10351 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
10352 break;
10353 case GIMPLE_OMP_PARALLEL:
10354 case GIMPLE_OMP_TASK:
10355 ctx = maybe_lookup_ctx (stmt);
10356 gcc_assert (ctx);
10357 if (ctx->cancellable)
10358 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
10359 lower_omp_taskreg (gsi_p, ctx);
10360 break;
10361 case GIMPLE_OMP_FOR:
10362 ctx = maybe_lookup_ctx (stmt);
10363 gcc_assert (ctx);
10364 if (ctx->cancellable)
10365 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
10366 lower_omp_for (gsi_p, ctx);
10367 break;
10368 case GIMPLE_OMP_SECTIONS:
10369 ctx = maybe_lookup_ctx (stmt);
10370 gcc_assert (ctx);
10371 if (ctx->cancellable)
10372 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
10373 lower_omp_sections (gsi_p, ctx);
10374 break;
10375 case GIMPLE_OMP_SINGLE:
10376 ctx = maybe_lookup_ctx (stmt);
10377 gcc_assert (ctx);
10378 lower_omp_single (gsi_p, ctx);
10379 break;
10380 case GIMPLE_OMP_MASTER:
10381 ctx = maybe_lookup_ctx (stmt);
10382 gcc_assert (ctx);
10383 lower_omp_master (gsi_p, ctx);
10384 break;
10385 case GIMPLE_OMP_TASKGROUP:
10386 ctx = maybe_lookup_ctx (stmt);
10387 gcc_assert (ctx);
10388 lower_omp_taskgroup (gsi_p, ctx);
10389 break;
10390 case GIMPLE_OMP_ORDERED:
10391 ctx = maybe_lookup_ctx (stmt);
10392 gcc_assert (ctx);
10393 lower_omp_ordered (gsi_p, ctx);
10394 break;
10395 case GIMPLE_OMP_CRITICAL:
10396 ctx = maybe_lookup_ctx (stmt);
10397 gcc_assert (ctx);
10398 lower_omp_critical (gsi_p, ctx);
10399 break;
10400 case GIMPLE_OMP_ATOMIC_LOAD:
10401 if ((ctx || task_shared_vars)
10402 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
10403 as_a <gomp_atomic_load *> (stmt)),
10404 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
10405 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
10406 break;
10407 case GIMPLE_OMP_TARGET:
10408 ctx = maybe_lookup_ctx (stmt);
10409 gcc_assert (ctx);
10410 lower_omp_target (gsi_p, ctx);
10411 break;
10412 case GIMPLE_OMP_TEAMS:
10413 ctx = maybe_lookup_ctx (stmt);
10414 gcc_assert (ctx);
10415 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
10416 lower_omp_taskreg (gsi_p, ctx);
10417 else
10418 lower_omp_teams (gsi_p, ctx);
10419 break;
10420 case GIMPLE_OMP_GRID_BODY:
10421 ctx = maybe_lookup_ctx (stmt);
10422 gcc_assert (ctx);
10423 lower_omp_grid_body (gsi_p, ctx);
10424 break;
10425 case GIMPLE_CALL:
10426 tree fndecl;
10427 call_stmt = as_a <gcall *> (stmt);
10428 fndecl = gimple_call_fndecl (call_stmt);
10429 if (fndecl
10430 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
10431 switch (DECL_FUNCTION_CODE (fndecl))
10433 case BUILT_IN_GOMP_BARRIER:
10434 if (ctx == NULL)
10435 break;
10436 /* FALLTHRU */
10437 case BUILT_IN_GOMP_CANCEL:
10438 case BUILT_IN_GOMP_CANCELLATION_POINT:
10439 omp_context *cctx;
10440 cctx = ctx;
10441 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
10442 cctx = cctx->outer;
10443 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
10444 if (!cctx->cancellable)
10446 if (DECL_FUNCTION_CODE (fndecl)
10447 == BUILT_IN_GOMP_CANCELLATION_POINT)
10449 stmt = gimple_build_nop ();
10450 gsi_replace (gsi_p, stmt, false);
10452 break;
10454 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
10456 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
10457 gimple_call_set_fndecl (call_stmt, fndecl);
10458 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
10460 tree lhs;
10461 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
10462 gimple_call_set_lhs (call_stmt, lhs);
10463 tree fallthru_label;
10464 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
10465 gimple *g;
10466 g = gimple_build_label (fallthru_label);
10467 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
10468 g = gimple_build_cond (NE_EXPR, lhs,
10469 fold_convert (TREE_TYPE (lhs),
10470 boolean_false_node),
10471 cctx->cancel_label, fallthru_label);
10472 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
10473 break;
10474 default:
10475 break;
10477 /* FALLTHRU */
10478 default:
10479 if ((ctx || task_shared_vars)
10480 && walk_gimple_op (stmt, lower_omp_regimplify_p,
10481 ctx ? NULL : &wi))
10483 /* Just remove clobbers, this should happen only if we have
10484 "privatized" local addressable variables in SIMD regions,
10485 the clobber isn't needed in that case and gimplifying address
10486 of the ARRAY_REF into a pointer and creating MEM_REF based
10487 clobber would create worse code than we get with the clobber
10488 dropped. */
10489 if (gimple_clobber_p (stmt))
10491 gsi_replace (gsi_p, gimple_build_nop (), true);
10492 break;
10494 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
10496 break;
10500 static void
10501 lower_omp (gimple_seq *body, omp_context *ctx)
10503 location_t saved_location = input_location;
10504 gimple_stmt_iterator gsi;
10505 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
10506 lower_omp_1 (&gsi, ctx);
10507 /* During gimplification, we haven't folded statments inside offloading
10508 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
10509 if (target_nesting_level || taskreg_nesting_level)
10510 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
10511 fold_stmt (&gsi);
10512 input_location = saved_location;
10515 /* Main entry point. */
10517 static unsigned int
10518 execute_lower_omp (void)
10520 gimple_seq body;
10521 int i;
10522 omp_context *ctx;
10524 /* This pass always runs, to provide PROP_gimple_lomp.
10525 But often, there is nothing to do. */
10526 if (flag_openacc == 0 && flag_openmp == 0
10527 && flag_openmp_simd == 0)
10528 return 0;
10530 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
10531 delete_omp_context);
10533 body = gimple_body (current_function_decl);
10535 if (hsa_gen_requested_p ())
10536 omp_grid_gridify_all_targets (&body);
10538 scan_omp (&body, NULL);
10539 gcc_assert (taskreg_nesting_level == 0);
10540 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
10541 finish_taskreg_scan (ctx);
10542 taskreg_contexts.release ();
10544 if (all_contexts->root)
10546 if (task_shared_vars)
10547 push_gimplify_context ();
10548 lower_omp (&body, NULL);
10549 if (task_shared_vars)
10550 pop_gimplify_context (NULL);
10553 if (all_contexts)
10555 splay_tree_delete (all_contexts);
10556 all_contexts = NULL;
10558 BITMAP_FREE (task_shared_vars);
10560 /* If current function is a method, remove artificial dummy VAR_DECL created
10561 for non-static data member privatization, they aren't needed for
10562 debuginfo nor anything else, have been already replaced everywhere in the
10563 IL and cause problems with LTO. */
10564 if (DECL_ARGUMENTS (current_function_decl)
10565 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
10566 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
10567 == POINTER_TYPE))
10568 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
10569 return 0;
10572 namespace {
10574 const pass_data pass_data_lower_omp =
10576 GIMPLE_PASS, /* type */
10577 "omplower", /* name */
10578 OPTGROUP_OMP, /* optinfo_flags */
10579 TV_NONE, /* tv_id */
10580 PROP_gimple_any, /* properties_required */
10581 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
10582 0, /* properties_destroyed */
10583 0, /* todo_flags_start */
10584 0, /* todo_flags_finish */
10587 class pass_lower_omp : public gimple_opt_pass
10589 public:
10590 pass_lower_omp (gcc::context *ctxt)
10591 : gimple_opt_pass (pass_data_lower_omp, ctxt)
10594 /* opt_pass methods: */
10595 virtual unsigned int execute (function *) { return execute_lower_omp (); }
10597 }; // class pass_lower_omp
10599 } // anon namespace
10601 gimple_opt_pass *
10602 make_pass_lower_omp (gcc::context *ctxt)
10604 return new pass_lower_omp (ctxt);
10607 /* The following is a utility to diagnose structured block violations.
10608 It is not part of the "omplower" pass, as that's invoked too late. It
10609 should be invoked by the respective front ends after gimplification. */
10611 static splay_tree all_labels;
10613 /* Check for mismatched contexts and generate an error if needed. Return
10614 true if an error is detected. */
10616 static bool
10617 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
10618 gimple *branch_ctx, gimple *label_ctx)
10620 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
10621 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
10623 if (label_ctx == branch_ctx)
10624 return false;
10626 const char* kind = NULL;
10628 if (flag_openacc)
10630 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
10631 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
10633 gcc_checking_assert (kind == NULL);
10634 kind = "OpenACC";
10637 if (kind == NULL)
10639 gcc_checking_assert (flag_openmp || flag_openmp_simd);
10640 kind = "OpenMP";
10643 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
10644 so we could traverse it and issue a correct "exit" or "enter" error
10645 message upon a structured block violation.
10647 We built the context by building a list with tree_cons'ing, but there is
10648 no easy counterpart in gimple tuples. It seems like far too much work
10649 for issuing exit/enter error messages. If someone really misses the
10650 distinct error message... patches welcome. */
10652 #if 0
10653 /* Try to avoid confusing the user by producing and error message
10654 with correct "exit" or "enter" verbiage. We prefer "exit"
10655 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
10656 if (branch_ctx == NULL)
10657 exit_p = false;
10658 else
10660 while (label_ctx)
10662 if (TREE_VALUE (label_ctx) == branch_ctx)
10664 exit_p = false;
10665 break;
10667 label_ctx = TREE_CHAIN (label_ctx);
10671 if (exit_p)
10672 error ("invalid exit from %s structured block", kind);
10673 else
10674 error ("invalid entry to %s structured block", kind);
10675 #endif
10677 /* If it's obvious we have an invalid entry, be specific about the error. */
10678 if (branch_ctx == NULL)
10679 error ("invalid entry to %s structured block", kind);
10680 else
10682 /* Otherwise, be vague and lazy, but efficient. */
10683 error ("invalid branch to/from %s structured block", kind);
10686 gsi_replace (gsi_p, gimple_build_nop (), false);
10687 return true;
10690 /* Pass 1: Create a minimal tree of structured blocks, and record
10691 where each label is found. */
10693 static tree
10694 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10695 struct walk_stmt_info *wi)
10697 gimple *context = (gimple *) wi->info;
10698 gimple *inner_context;
10699 gimple *stmt = gsi_stmt (*gsi_p);
10701 *handled_ops_p = true;
10703 switch (gimple_code (stmt))
10705 WALK_SUBSTMTS;
10707 case GIMPLE_OMP_PARALLEL:
10708 case GIMPLE_OMP_TASK:
10709 case GIMPLE_OMP_SECTIONS:
10710 case GIMPLE_OMP_SINGLE:
10711 case GIMPLE_OMP_SECTION:
10712 case GIMPLE_OMP_MASTER:
10713 case GIMPLE_OMP_ORDERED:
10714 case GIMPLE_OMP_CRITICAL:
10715 case GIMPLE_OMP_TARGET:
10716 case GIMPLE_OMP_TEAMS:
10717 case GIMPLE_OMP_TASKGROUP:
10718 /* The minimal context here is just the current OMP construct. */
10719 inner_context = stmt;
10720 wi->info = inner_context;
10721 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
10722 wi->info = context;
10723 break;
10725 case GIMPLE_OMP_FOR:
10726 inner_context = stmt;
10727 wi->info = inner_context;
10728 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
10729 walk them. */
10730 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
10731 diagnose_sb_1, NULL, wi);
10732 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
10733 wi->info = context;
10734 break;
10736 case GIMPLE_LABEL:
10737 splay_tree_insert (all_labels,
10738 (splay_tree_key) gimple_label_label (
10739 as_a <glabel *> (stmt)),
10740 (splay_tree_value) context);
10741 break;
10743 default:
10744 break;
10747 return NULL_TREE;
10750 /* Pass 2: Check each branch and see if its context differs from that of
10751 the destination label's context. */
10753 static tree
10754 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10755 struct walk_stmt_info *wi)
10757 gimple *context = (gimple *) wi->info;
10758 splay_tree_node n;
10759 gimple *stmt = gsi_stmt (*gsi_p);
10761 *handled_ops_p = true;
10763 switch (gimple_code (stmt))
10765 WALK_SUBSTMTS;
10767 case GIMPLE_OMP_PARALLEL:
10768 case GIMPLE_OMP_TASK:
10769 case GIMPLE_OMP_SECTIONS:
10770 case GIMPLE_OMP_SINGLE:
10771 case GIMPLE_OMP_SECTION:
10772 case GIMPLE_OMP_MASTER:
10773 case GIMPLE_OMP_ORDERED:
10774 case GIMPLE_OMP_CRITICAL:
10775 case GIMPLE_OMP_TARGET:
10776 case GIMPLE_OMP_TEAMS:
10777 case GIMPLE_OMP_TASKGROUP:
10778 wi->info = stmt;
10779 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
10780 wi->info = context;
10781 break;
10783 case GIMPLE_OMP_FOR:
10784 wi->info = stmt;
10785 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
10786 walk them. */
10787 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
10788 diagnose_sb_2, NULL, wi);
10789 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
10790 wi->info = context;
10791 break;
10793 case GIMPLE_COND:
10795 gcond *cond_stmt = as_a <gcond *> (stmt);
10796 tree lab = gimple_cond_true_label (cond_stmt);
10797 if (lab)
10799 n = splay_tree_lookup (all_labels,
10800 (splay_tree_key) lab);
10801 diagnose_sb_0 (gsi_p, context,
10802 n ? (gimple *) n->value : NULL);
10804 lab = gimple_cond_false_label (cond_stmt);
10805 if (lab)
10807 n = splay_tree_lookup (all_labels,
10808 (splay_tree_key) lab);
10809 diagnose_sb_0 (gsi_p, context,
10810 n ? (gimple *) n->value : NULL);
10813 break;
10815 case GIMPLE_GOTO:
10817 tree lab = gimple_goto_dest (stmt);
10818 if (TREE_CODE (lab) != LABEL_DECL)
10819 break;
10821 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
10822 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
10824 break;
10826 case GIMPLE_SWITCH:
10828 gswitch *switch_stmt = as_a <gswitch *> (stmt);
10829 unsigned int i;
10830 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
10832 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
10833 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
10834 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
10835 break;
10838 break;
10840 case GIMPLE_RETURN:
10841 diagnose_sb_0 (gsi_p, context, NULL);
10842 break;
10844 default:
10845 break;
10848 return NULL_TREE;
10851 static unsigned int
10852 diagnose_omp_structured_block_errors (void)
10854 struct walk_stmt_info wi;
10855 gimple_seq body = gimple_body (current_function_decl);
10857 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
10859 memset (&wi, 0, sizeof (wi));
10860 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
10862 memset (&wi, 0, sizeof (wi));
10863 wi.want_locations = true;
10864 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
10866 gimple_set_body (current_function_decl, body);
10868 splay_tree_delete (all_labels);
10869 all_labels = NULL;
10871 return 0;
10874 namespace {
10876 const pass_data pass_data_diagnose_omp_blocks =
10878 GIMPLE_PASS, /* type */
10879 "*diagnose_omp_blocks", /* name */
10880 OPTGROUP_OMP, /* optinfo_flags */
10881 TV_NONE, /* tv_id */
10882 PROP_gimple_any, /* properties_required */
10883 0, /* properties_provided */
10884 0, /* properties_destroyed */
10885 0, /* todo_flags_start */
10886 0, /* todo_flags_finish */
10889 class pass_diagnose_omp_blocks : public gimple_opt_pass
10891 public:
10892 pass_diagnose_omp_blocks (gcc::context *ctxt)
10893 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
10896 /* opt_pass methods: */
10897 virtual bool gate (function *)
10899 return flag_openacc || flag_openmp || flag_openmp_simd;
10901 virtual unsigned int execute (function *)
10903 return diagnose_omp_structured_block_errors ();
10906 }; // class pass_diagnose_omp_blocks
10908 } // anon namespace
10910 gimple_opt_pass *
10911 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
10913 return new pass_diagnose_omp_blocks (ctxt);
10917 #include "gt-omp-low.h"