typeck.c (cp_truthvalue_conversion): Add tsubst_flags_t parameter and use it in calls...
[official-gcc.git] / gcc / omp-low.c
blob3e470afe32b018770bb9470f9ff71136d4b63108
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2019 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "alloc-pool.h"
56 #include "symbol-summary.h"
57 #include "tree-nested.h"
58 #include "context.h"
59 #include "gomp-constants.h"
60 #include "gimple-pretty-print.h"
61 #include "hsa-common.h"
62 #include "stringpool.h"
63 #include "attribs.h"
65 /* Lowering of OMP parallel and workshare constructs proceeds in two
66 phases. The first phase scans the function looking for OMP statements
67 and then for variables that must be replaced to satisfy data sharing
68 clauses. The second phase expands code for the constructs, as well as
69 re-gimplifying things when variables have been replaced with complex
70 expressions.
72 Final code generation is done by pass_expand_omp. The flowgraph is
73 scanned for regions which are then moved to a new
74 function, to be invoked by the thread library, or offloaded. */
76 /* Context structure. Used to store information about each parallel
77 directive in the code. */
79 struct omp_context
81 /* This field must be at the beginning, as we do "inheritance": Some
82 callback functions for tree-inline.c (e.g., omp_copy_decl)
83 receive a copy_body_data pointer that is up-casted to an
84 omp_context pointer. */
85 copy_body_data cb;
87 /* The tree of contexts corresponding to the encountered constructs. */
88 struct omp_context *outer;
89 gimple *stmt;
91 /* Map variables to fields in a structure that allows communication
92 between sending and receiving threads. */
93 splay_tree field_map;
94 tree record_type;
95 tree sender_decl;
96 tree receiver_decl;
98 /* These are used just by task contexts, if task firstprivate fn is
99 needed. srecord_type is used to communicate from the thread
100 that encountered the task construct to task firstprivate fn,
101 record_type is allocated by GOMP_task, initialized by task firstprivate
102 fn and passed to the task body fn. */
103 splay_tree sfield_map;
104 tree srecord_type;
106 /* A chain of variables to add to the top-level block surrounding the
107 construct. In the case of a parallel, this is in the child function. */
108 tree block_vars;
110 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
111 barriers should jump to during omplower pass. */
112 tree cancel_label;
114 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
115 otherwise. */
116 gimple *simt_stmt;
118 /* For task reductions registered in this context, a vector containing
119 the length of the private copies block (if constant, otherwise NULL)
120 and then offsets (if constant, otherwise NULL) for each entry. */
121 vec<tree> task_reductions;
123 /* A hash map from the reduction clauses to the registered array
124 elts. */
125 hash_map<tree, unsigned> *task_reduction_map;
127 /* And a hash map from the lastprivate(conditional:) variables to their
128 corresponding tracking loop iteration variables. */
129 hash_map<tree, tree> *lastprivate_conditional_map;
131 /* A tree_list of the reduction clauses in this context. */
132 tree local_reduction_clauses;
134 /* A tree_list of the reduction clauses in outer contexts. */
135 tree outer_reduction_clauses;
137 /* Nesting depth of this context. Used to beautify error messages re
138 invalid gotos. The outermost ctx is depth 1, with depth 0 being
139 reserved for the main body of the function. */
140 int depth;
142 /* True if this parallel directive is nested within another. */
143 bool is_nested;
145 /* True if this construct can be cancelled. */
146 bool cancellable;
148 /* True if lower_omp_1 should look up lastprivate conditional in parent
149 context. */
150 bool combined_into_simd_safelen1;
152 /* True if there is nested scan context with inclusive clause. */
153 bool scan_inclusive;
155 /* True if there is nested scan context with exclusive clause. */
156 bool scan_exclusive;
158 /* True in the second simd loop of for simd with inscan reductions. */
159 bool for_simd_scan_phase;
161 /* True if there is order(concurrent) clause on the construct. */
162 bool order_concurrent;
164 /* True if there is bind clause on the construct (i.e. a loop construct). */
165 bool loop_p;
168 static splay_tree all_contexts;
169 static int taskreg_nesting_level;
170 static int target_nesting_level;
171 static bitmap task_shared_vars;
172 static bitmap global_nonaddressable_vars;
173 static vec<omp_context *> taskreg_contexts;
175 static void scan_omp (gimple_seq *, omp_context *);
176 static tree scan_omp_1_op (tree *, int *, void *);
178 #define WALK_SUBSTMTS \
179 case GIMPLE_BIND: \
180 case GIMPLE_TRY: \
181 case GIMPLE_CATCH: \
182 case GIMPLE_EH_FILTER: \
183 case GIMPLE_TRANSACTION: \
184 /* The sub-statements for these should be walked. */ \
185 *handled_ops_p = false; \
186 break;
188 /* Return true if CTX corresponds to an OpenACC 'parallel' or 'serial'
189 region. */
191 static bool
192 is_oacc_parallel_or_serial (omp_context *ctx)
194 enum gimple_code outer_type = gimple_code (ctx->stmt);
195 return ((outer_type == GIMPLE_OMP_TARGET)
196 && ((gimple_omp_target_kind (ctx->stmt)
197 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
198 || (gimple_omp_target_kind (ctx->stmt)
199 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
202 /* Return true if CTX corresponds to an oacc kernels region. */
204 static bool
205 is_oacc_kernels (omp_context *ctx)
207 enum gimple_code outer_type = gimple_code (ctx->stmt);
208 return ((outer_type == GIMPLE_OMP_TARGET)
209 && (gimple_omp_target_kind (ctx->stmt)
210 == GF_OMP_TARGET_KIND_OACC_KERNELS));
213 /* If DECL is the artificial dummy VAR_DECL created for non-static
214 data member privatization, return the underlying "this" parameter,
215 otherwise return NULL. */
217 tree
218 omp_member_access_dummy_var (tree decl)
220 if (!VAR_P (decl)
221 || !DECL_ARTIFICIAL (decl)
222 || !DECL_IGNORED_P (decl)
223 || !DECL_HAS_VALUE_EXPR_P (decl)
224 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
225 return NULL_TREE;
227 tree v = DECL_VALUE_EXPR (decl);
228 if (TREE_CODE (v) != COMPONENT_REF)
229 return NULL_TREE;
231 while (1)
232 switch (TREE_CODE (v))
234 case COMPONENT_REF:
235 case MEM_REF:
236 case INDIRECT_REF:
237 CASE_CONVERT:
238 case POINTER_PLUS_EXPR:
239 v = TREE_OPERAND (v, 0);
240 continue;
241 case PARM_DECL:
242 if (DECL_CONTEXT (v) == current_function_decl
243 && DECL_ARTIFICIAL (v)
244 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
245 return v;
246 return NULL_TREE;
247 default:
248 return NULL_TREE;
252 /* Helper for unshare_and_remap, called through walk_tree. */
254 static tree
255 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
257 tree *pair = (tree *) data;
258 if (*tp == pair[0])
260 *tp = unshare_expr (pair[1]);
261 *walk_subtrees = 0;
263 else if (IS_TYPE_OR_DECL_P (*tp))
264 *walk_subtrees = 0;
265 return NULL_TREE;
268 /* Return unshare_expr (X) with all occurrences of FROM
269 replaced with TO. */
271 static tree
272 unshare_and_remap (tree x, tree from, tree to)
274 tree pair[2] = { from, to };
275 x = unshare_expr (x);
276 walk_tree (&x, unshare_and_remap_1, pair, NULL);
277 return x;
280 /* Convenience function for calling scan_omp_1_op on tree operands. */
282 static inline tree
283 scan_omp_op (tree *tp, omp_context *ctx)
285 struct walk_stmt_info wi;
287 memset (&wi, 0, sizeof (wi));
288 wi.info = ctx;
289 wi.want_locations = true;
291 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
294 static void lower_omp (gimple_seq *, omp_context *);
295 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
296 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
298 /* Return true if CTX is for an omp parallel. */
300 static inline bool
301 is_parallel_ctx (omp_context *ctx)
303 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
307 /* Return true if CTX is for an omp task. */
309 static inline bool
310 is_task_ctx (omp_context *ctx)
312 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
316 /* Return true if CTX is for an omp taskloop. */
318 static inline bool
319 is_taskloop_ctx (omp_context *ctx)
321 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
322 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
326 /* Return true if CTX is for a host omp teams. */
328 static inline bool
329 is_host_teams_ctx (omp_context *ctx)
331 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
332 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
335 /* Return true if CTX is for an omp parallel or omp task or host omp teams
336 (the last one is strictly not a task region in OpenMP speak, but we
337 need to treat it similarly). */
339 static inline bool
340 is_taskreg_ctx (omp_context *ctx)
342 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
345 /* Return true if EXPR is variable sized. */
347 static inline bool
348 is_variable_sized (const_tree expr)
350 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
353 /* Lookup variables. The "maybe" form
354 allows for the variable form to not have been entered, otherwise we
355 assert that the variable must have been entered. */
357 static inline tree
358 lookup_decl (tree var, omp_context *ctx)
360 tree *n = ctx->cb.decl_map->get (var);
361 return *n;
364 static inline tree
365 maybe_lookup_decl (const_tree var, omp_context *ctx)
367 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
368 return n ? *n : NULL_TREE;
371 static inline tree
372 lookup_field (tree var, omp_context *ctx)
374 splay_tree_node n;
375 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
376 return (tree) n->value;
379 static inline tree
380 lookup_sfield (splay_tree_key key, omp_context *ctx)
382 splay_tree_node n;
383 n = splay_tree_lookup (ctx->sfield_map
384 ? ctx->sfield_map : ctx->field_map, key);
385 return (tree) n->value;
388 static inline tree
389 lookup_sfield (tree var, omp_context *ctx)
391 return lookup_sfield ((splay_tree_key) var, ctx);
394 static inline tree
395 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
397 splay_tree_node n;
398 n = splay_tree_lookup (ctx->field_map, key);
399 return n ? (tree) n->value : NULL_TREE;
402 static inline tree
403 maybe_lookup_field (tree var, omp_context *ctx)
405 return maybe_lookup_field ((splay_tree_key) var, ctx);
408 /* Return true if DECL should be copied by pointer. SHARED_CTX is
409 the parallel context if DECL is to be shared. */
411 static bool
412 use_pointer_for_field (tree decl, omp_context *shared_ctx)
414 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
415 || TYPE_ATOMIC (TREE_TYPE (decl)))
416 return true;
418 /* We can only use copy-in/copy-out semantics for shared variables
419 when we know the value is not accessible from an outer scope. */
420 if (shared_ctx)
422 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
424 /* ??? Trivially accessible from anywhere. But why would we even
425 be passing an address in this case? Should we simply assert
426 this to be false, or should we have a cleanup pass that removes
427 these from the list of mappings? */
428 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
429 return true;
431 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
432 without analyzing the expression whether or not its location
433 is accessible to anyone else. In the case of nested parallel
434 regions it certainly may be. */
435 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
436 return true;
438 /* Do not use copy-in/copy-out for variables that have their
439 address taken. */
440 if (is_global_var (decl))
442 /* For file scope vars, track whether we've seen them as
443 non-addressable initially and in that case, keep the same
444 answer for the duration of the pass, even when they are made
445 addressable later on e.g. through reduction expansion. Global
446 variables which weren't addressable before the pass will not
447 have their privatized copies address taken. See PR91216. */
448 if (!TREE_ADDRESSABLE (decl))
450 if (!global_nonaddressable_vars)
451 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
452 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
454 else if (!global_nonaddressable_vars
455 || !bitmap_bit_p (global_nonaddressable_vars,
456 DECL_UID (decl)))
457 return true;
459 else if (TREE_ADDRESSABLE (decl))
460 return true;
462 /* lower_send_shared_vars only uses copy-in, but not copy-out
463 for these. */
464 if (TREE_READONLY (decl)
465 || ((TREE_CODE (decl) == RESULT_DECL
466 || TREE_CODE (decl) == PARM_DECL)
467 && DECL_BY_REFERENCE (decl)))
468 return false;
470 /* Disallow copy-in/out in nested parallel if
471 decl is shared in outer parallel, otherwise
472 each thread could store the shared variable
473 in its own copy-in location, making the
474 variable no longer really shared. */
475 if (shared_ctx->is_nested)
477 omp_context *up;
479 for (up = shared_ctx->outer; up; up = up->outer)
480 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
481 break;
483 if (up)
485 tree c;
487 for (c = gimple_omp_taskreg_clauses (up->stmt);
488 c; c = OMP_CLAUSE_CHAIN (c))
489 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
490 && OMP_CLAUSE_DECL (c) == decl)
491 break;
493 if (c)
494 goto maybe_mark_addressable_and_ret;
498 /* For tasks avoid using copy-in/out. As tasks can be
499 deferred or executed in different thread, when GOMP_task
500 returns, the task hasn't necessarily terminated. */
501 if (is_task_ctx (shared_ctx))
503 tree outer;
504 maybe_mark_addressable_and_ret:
505 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
506 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
508 /* Taking address of OUTER in lower_send_shared_vars
509 might need regimplification of everything that uses the
510 variable. */
511 if (!task_shared_vars)
512 task_shared_vars = BITMAP_ALLOC (NULL);
513 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
514 TREE_ADDRESSABLE (outer) = 1;
516 return true;
520 return false;
523 /* Construct a new automatic decl similar to VAR. */
525 static tree
526 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
528 tree copy = copy_var_decl (var, name, type);
530 DECL_CONTEXT (copy) = current_function_decl;
531 DECL_CHAIN (copy) = ctx->block_vars;
532 /* If VAR is listed in task_shared_vars, it means it wasn't
533 originally addressable and is just because task needs to take
534 it's address. But we don't need to take address of privatizations
535 from that var. */
536 if (TREE_ADDRESSABLE (var)
537 && ((task_shared_vars
538 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
539 || (global_nonaddressable_vars
540 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
541 TREE_ADDRESSABLE (copy) = 0;
542 ctx->block_vars = copy;
544 return copy;
547 static tree
548 omp_copy_decl_1 (tree var, omp_context *ctx)
550 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
553 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
554 as appropriate. */
555 static tree
556 omp_build_component_ref (tree obj, tree field)
558 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
559 if (TREE_THIS_VOLATILE (field))
560 TREE_THIS_VOLATILE (ret) |= 1;
561 if (TREE_READONLY (field))
562 TREE_READONLY (ret) |= 1;
563 return ret;
566 /* Build tree nodes to access the field for VAR on the receiver side. */
568 static tree
569 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
571 tree x, field = lookup_field (var, ctx);
573 /* If the receiver record type was remapped in the child function,
574 remap the field into the new record type. */
575 x = maybe_lookup_field (field, ctx);
576 if (x != NULL)
577 field = x;
579 x = build_simple_mem_ref (ctx->receiver_decl);
580 TREE_THIS_NOTRAP (x) = 1;
581 x = omp_build_component_ref (x, field);
582 if (by_ref)
584 x = build_simple_mem_ref (x);
585 TREE_THIS_NOTRAP (x) = 1;
588 return x;
591 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
592 of a parallel, this is a component reference; for workshare constructs
593 this is some variable. */
595 static tree
596 build_outer_var_ref (tree var, omp_context *ctx,
597 enum omp_clause_code code = OMP_CLAUSE_ERROR)
599 tree x;
600 omp_context *outer = ctx->outer;
601 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
602 outer = outer->outer;
604 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
605 x = var;
606 else if (is_variable_sized (var))
608 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
609 x = build_outer_var_ref (x, ctx, code);
610 x = build_simple_mem_ref (x);
612 else if (is_taskreg_ctx (ctx))
614 bool by_ref = use_pointer_for_field (var, NULL);
615 x = build_receiver_ref (var, by_ref, ctx);
617 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
618 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
619 || ctx->loop_p
620 || (code == OMP_CLAUSE_PRIVATE
621 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
622 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
623 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
625 /* #pragma omp simd isn't a worksharing construct, and can reference
626 even private vars in its linear etc. clauses.
627 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
628 to private vars in all worksharing constructs. */
629 x = NULL_TREE;
630 if (outer && is_taskreg_ctx (outer))
631 x = lookup_decl (var, outer);
632 else if (outer)
633 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
634 if (x == NULL_TREE)
635 x = var;
637 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
639 gcc_assert (outer);
640 splay_tree_node n
641 = splay_tree_lookup (outer->field_map,
642 (splay_tree_key) &DECL_UID (var));
643 if (n == NULL)
645 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
646 x = var;
647 else
648 x = lookup_decl (var, outer);
650 else
652 tree field = (tree) n->value;
653 /* If the receiver record type was remapped in the child function,
654 remap the field into the new record type. */
655 x = maybe_lookup_field (field, outer);
656 if (x != NULL)
657 field = x;
659 x = build_simple_mem_ref (outer->receiver_decl);
660 x = omp_build_component_ref (x, field);
661 if (use_pointer_for_field (var, outer))
662 x = build_simple_mem_ref (x);
665 else if (outer)
667 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
669 outer = outer->outer;
670 gcc_assert (outer
671 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
673 x = lookup_decl (var, outer);
675 else if (omp_is_reference (var))
676 /* This can happen with orphaned constructs. If var is reference, it is
677 possible it is shared and as such valid. */
678 x = var;
679 else if (omp_member_access_dummy_var (var))
680 x = var;
681 else
682 gcc_unreachable ();
684 if (x == var)
686 tree t = omp_member_access_dummy_var (var);
687 if (t)
689 x = DECL_VALUE_EXPR (var);
690 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
691 if (o != t)
692 x = unshare_and_remap (x, t, o);
693 else
694 x = unshare_expr (x);
698 if (omp_is_reference (var))
699 x = build_simple_mem_ref (x);
701 return x;
704 /* Build tree nodes to access the field for VAR on the sender side. */
706 static tree
707 build_sender_ref (splay_tree_key key, omp_context *ctx)
709 tree field = lookup_sfield (key, ctx);
710 return omp_build_component_ref (ctx->sender_decl, field);
713 static tree
714 build_sender_ref (tree var, omp_context *ctx)
716 return build_sender_ref ((splay_tree_key) var, ctx);
719 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
720 BASE_POINTERS_RESTRICT, declare the field with restrict. */
722 static void
723 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
725 tree field, type, sfield = NULL_TREE;
726 splay_tree_key key = (splay_tree_key) var;
728 if ((mask & 16) != 0)
730 key = (splay_tree_key) &DECL_NAME (var);
731 gcc_checking_assert (key != (splay_tree_key) var);
733 if ((mask & 8) != 0)
735 key = (splay_tree_key) &DECL_UID (var);
736 gcc_checking_assert (key != (splay_tree_key) var);
738 gcc_assert ((mask & 1) == 0
739 || !splay_tree_lookup (ctx->field_map, key));
740 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
741 || !splay_tree_lookup (ctx->sfield_map, key));
742 gcc_assert ((mask & 3) == 3
743 || !is_gimple_omp_oacc (ctx->stmt));
745 type = TREE_TYPE (var);
746 if ((mask & 16) != 0)
747 type = lang_hooks.decls.omp_array_data (var, true);
749 /* Prevent redeclaring the var in the split-off function with a restrict
750 pointer type. Note that we only clear type itself, restrict qualifiers in
751 the pointed-to type will be ignored by points-to analysis. */
752 if (POINTER_TYPE_P (type)
753 && TYPE_RESTRICT (type))
754 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
756 if (mask & 4)
758 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
759 type = build_pointer_type (build_pointer_type (type));
761 else if (by_ref)
762 type = build_pointer_type (type);
763 else if ((mask & 3) == 1 && omp_is_reference (var))
764 type = TREE_TYPE (type);
766 field = build_decl (DECL_SOURCE_LOCATION (var),
767 FIELD_DECL, DECL_NAME (var), type);
769 /* Remember what variable this field was created for. This does have a
770 side effect of making dwarf2out ignore this member, so for helpful
771 debugging we clear it later in delete_omp_context. */
772 DECL_ABSTRACT_ORIGIN (field) = var;
773 if ((mask & 16) == 0 && type == TREE_TYPE (var))
775 SET_DECL_ALIGN (field, DECL_ALIGN (var));
776 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
777 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
779 else
780 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
782 if ((mask & 3) == 3)
784 insert_field_into_struct (ctx->record_type, field);
785 if (ctx->srecord_type)
787 sfield = build_decl (DECL_SOURCE_LOCATION (var),
788 FIELD_DECL, DECL_NAME (var), type);
789 DECL_ABSTRACT_ORIGIN (sfield) = var;
790 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
791 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
792 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
793 insert_field_into_struct (ctx->srecord_type, sfield);
796 else
798 if (ctx->srecord_type == NULL_TREE)
800 tree t;
802 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
803 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
804 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
806 sfield = build_decl (DECL_SOURCE_LOCATION (t),
807 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
808 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
809 insert_field_into_struct (ctx->srecord_type, sfield);
810 splay_tree_insert (ctx->sfield_map,
811 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
812 (splay_tree_value) sfield);
815 sfield = field;
816 insert_field_into_struct ((mask & 1) ? ctx->record_type
817 : ctx->srecord_type, field);
820 if (mask & 1)
821 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
822 if ((mask & 2) && ctx->sfield_map)
823 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
826 static tree
827 install_var_local (tree var, omp_context *ctx)
829 tree new_var = omp_copy_decl_1 (var, ctx);
830 insert_decl_map (&ctx->cb, var, new_var);
831 return new_var;
834 /* Adjust the replacement for DECL in CTX for the new context. This means
835 copying the DECL_VALUE_EXPR, and fixing up the type. */
837 static void
838 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
840 tree new_decl, size;
842 new_decl = lookup_decl (decl, ctx);
844 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
846 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
847 && DECL_HAS_VALUE_EXPR_P (decl))
849 tree ve = DECL_VALUE_EXPR (decl);
850 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
851 SET_DECL_VALUE_EXPR (new_decl, ve);
852 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
855 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
857 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
858 if (size == error_mark_node)
859 size = TYPE_SIZE (TREE_TYPE (new_decl));
860 DECL_SIZE (new_decl) = size;
862 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
863 if (size == error_mark_node)
864 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
865 DECL_SIZE_UNIT (new_decl) = size;
869 /* The callback for remap_decl. Search all containing contexts for a
870 mapping of the variable; this avoids having to duplicate the splay
871 tree ahead of time. We know a mapping doesn't already exist in the
872 given context. Create new mappings to implement default semantics. */
874 static tree
875 omp_copy_decl (tree var, copy_body_data *cb)
877 omp_context *ctx = (omp_context *) cb;
878 tree new_var;
880 if (TREE_CODE (var) == LABEL_DECL)
882 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
883 return var;
884 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
885 DECL_CONTEXT (new_var) = current_function_decl;
886 insert_decl_map (&ctx->cb, var, new_var);
887 return new_var;
890 while (!is_taskreg_ctx (ctx))
892 ctx = ctx->outer;
893 if (ctx == NULL)
894 return var;
895 new_var = maybe_lookup_decl (var, ctx);
896 if (new_var)
897 return new_var;
900 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
901 return var;
903 return error_mark_node;
906 /* Create a new context, with OUTER_CTX being the surrounding context. */
908 static omp_context *
909 new_omp_context (gimple *stmt, omp_context *outer_ctx)
911 omp_context *ctx = XCNEW (omp_context);
913 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
914 (splay_tree_value) ctx);
915 ctx->stmt = stmt;
917 if (outer_ctx)
919 ctx->outer = outer_ctx;
920 ctx->cb = outer_ctx->cb;
921 ctx->cb.block = NULL;
922 ctx->local_reduction_clauses = NULL;
923 ctx->outer_reduction_clauses = ctx->outer_reduction_clauses;
924 ctx->depth = outer_ctx->depth + 1;
926 else
928 ctx->cb.src_fn = current_function_decl;
929 ctx->cb.dst_fn = current_function_decl;
930 ctx->cb.src_node = cgraph_node::get (current_function_decl);
931 gcc_checking_assert (ctx->cb.src_node);
932 ctx->cb.dst_node = ctx->cb.src_node;
933 ctx->cb.src_cfun = cfun;
934 ctx->cb.copy_decl = omp_copy_decl;
935 ctx->cb.eh_lp_nr = 0;
936 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
937 ctx->cb.adjust_array_error_bounds = true;
938 ctx->cb.dont_remap_vla_if_no_change = true;
939 ctx->local_reduction_clauses = NULL;
940 ctx->outer_reduction_clauses = NULL;
941 ctx->depth = 1;
944 ctx->cb.decl_map = new hash_map<tree, tree>;
946 return ctx;
949 static gimple_seq maybe_catch_exception (gimple_seq);
951 /* Finalize task copyfn. */
953 static void
954 finalize_task_copyfn (gomp_task *task_stmt)
956 struct function *child_cfun;
957 tree child_fn;
958 gimple_seq seq = NULL, new_seq;
959 gbind *bind;
961 child_fn = gimple_omp_task_copy_fn (task_stmt);
962 if (child_fn == NULL_TREE)
963 return;
965 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
966 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
968 push_cfun (child_cfun);
969 bind = gimplify_body (child_fn, false);
970 gimple_seq_add_stmt (&seq, bind);
971 new_seq = maybe_catch_exception (seq);
972 if (new_seq != seq)
974 bind = gimple_build_bind (NULL, new_seq, NULL);
975 seq = NULL;
976 gimple_seq_add_stmt (&seq, bind);
978 gimple_set_body (child_fn, seq);
979 pop_cfun ();
981 /* Inform the callgraph about the new function. */
982 cgraph_node *node = cgraph_node::get_create (child_fn);
983 node->parallelized_function = 1;
984 cgraph_node::add_new_function (child_fn, false);
987 /* Destroy a omp_context data structures. Called through the splay tree
988 value delete callback. */
990 static void
991 delete_omp_context (splay_tree_value value)
993 omp_context *ctx = (omp_context *) value;
995 delete ctx->cb.decl_map;
997 if (ctx->field_map)
998 splay_tree_delete (ctx->field_map);
999 if (ctx->sfield_map)
1000 splay_tree_delete (ctx->sfield_map);
1002 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1003 it produces corrupt debug information. */
1004 if (ctx->record_type)
1006 tree t;
1007 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1008 DECL_ABSTRACT_ORIGIN (t) = NULL;
1010 if (ctx->srecord_type)
1012 tree t;
1013 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1014 DECL_ABSTRACT_ORIGIN (t) = NULL;
1017 if (is_task_ctx (ctx))
1018 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
1020 if (ctx->task_reduction_map)
1022 ctx->task_reductions.release ();
1023 delete ctx->task_reduction_map;
1026 delete ctx->lastprivate_conditional_map;
1028 XDELETE (ctx);
1031 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1032 context. */
1034 static void
1035 fixup_child_record_type (omp_context *ctx)
1037 tree f, type = ctx->record_type;
1039 if (!ctx->receiver_decl)
1040 return;
1041 /* ??? It isn't sufficient to just call remap_type here, because
1042 variably_modified_type_p doesn't work the way we expect for
1043 record types. Testing each field for whether it needs remapping
1044 and creating a new record by hand works, however. */
1045 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1046 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1047 break;
1048 if (f)
1050 tree name, new_fields = NULL;
1052 type = lang_hooks.types.make_type (RECORD_TYPE);
1053 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1054 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1055 TYPE_DECL, name, type);
1056 TYPE_NAME (type) = name;
1058 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1060 tree new_f = copy_node (f);
1061 DECL_CONTEXT (new_f) = type;
1062 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1063 DECL_CHAIN (new_f) = new_fields;
1064 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1065 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1066 &ctx->cb, NULL);
1067 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1068 &ctx->cb, NULL);
1069 new_fields = new_f;
1071 /* Arrange to be able to look up the receiver field
1072 given the sender field. */
1073 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1074 (splay_tree_value) new_f);
1076 TYPE_FIELDS (type) = nreverse (new_fields);
1077 layout_type (type);
1080 /* In a target region we never modify any of the pointers in *.omp_data_i,
1081 so attempt to help the optimizers. */
1082 if (is_gimple_omp_offloaded (ctx->stmt))
1083 type = build_qualified_type (type, TYPE_QUAL_CONST);
1085 TREE_TYPE (ctx->receiver_decl)
1086 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1089 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1090 specified by CLAUSES. */
1092 static void
1093 scan_sharing_clauses (tree clauses, omp_context *ctx)
1095 tree c, decl;
1096 bool scan_array_reductions = false;
1098 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1100 bool by_ref;
1102 switch (OMP_CLAUSE_CODE (c))
1104 case OMP_CLAUSE_PRIVATE:
1105 decl = OMP_CLAUSE_DECL (c);
1106 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1107 goto do_private;
1108 else if (!is_variable_sized (decl))
1109 install_var_local (decl, ctx);
1110 break;
1112 case OMP_CLAUSE_SHARED:
1113 decl = OMP_CLAUSE_DECL (c);
1114 /* Ignore shared directives in teams construct inside of
1115 target construct. */
1116 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1117 && !is_host_teams_ctx (ctx))
1119 /* Global variables don't need to be copied,
1120 the receiver side will use them directly. */
1121 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1122 if (is_global_var (odecl))
1123 break;
1124 insert_decl_map (&ctx->cb, decl, odecl);
1125 break;
1127 gcc_assert (is_taskreg_ctx (ctx));
1128 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1129 || !is_variable_sized (decl));
1130 /* Global variables don't need to be copied,
1131 the receiver side will use them directly. */
1132 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1133 break;
1134 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1136 use_pointer_for_field (decl, ctx);
1137 break;
1139 by_ref = use_pointer_for_field (decl, NULL);
1140 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1141 || TREE_ADDRESSABLE (decl)
1142 || by_ref
1143 || omp_is_reference (decl))
1145 by_ref = use_pointer_for_field (decl, ctx);
1146 install_var_field (decl, by_ref, 3, ctx);
1147 install_var_local (decl, ctx);
1148 break;
1150 /* We don't need to copy const scalar vars back. */
1151 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1152 goto do_private;
1154 case OMP_CLAUSE_REDUCTION:
1155 if (is_oacc_parallel_or_serial (ctx) || is_oacc_kernels (ctx))
1156 ctx->local_reduction_clauses
1157 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1158 /* FALLTHRU */
1160 case OMP_CLAUSE_IN_REDUCTION:
1161 decl = OMP_CLAUSE_DECL (c);
1162 if (TREE_CODE (decl) == MEM_REF)
1164 tree t = TREE_OPERAND (decl, 0);
1165 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1166 t = TREE_OPERAND (t, 0);
1167 if (TREE_CODE (t) == INDIRECT_REF
1168 || TREE_CODE (t) == ADDR_EXPR)
1169 t = TREE_OPERAND (t, 0);
1170 install_var_local (t, ctx);
1171 if (is_taskreg_ctx (ctx)
1172 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1173 || (is_task_ctx (ctx)
1174 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1175 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1176 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1177 == POINTER_TYPE)))))
1178 && !is_variable_sized (t)
1179 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1180 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1181 && !is_task_ctx (ctx))))
1183 by_ref = use_pointer_for_field (t, NULL);
1184 if (is_task_ctx (ctx)
1185 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1186 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1188 install_var_field (t, false, 1, ctx);
1189 install_var_field (t, by_ref, 2, ctx);
1191 else
1192 install_var_field (t, by_ref, 3, ctx);
1194 break;
1196 if (is_task_ctx (ctx)
1197 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1198 && OMP_CLAUSE_REDUCTION_TASK (c)
1199 && is_parallel_ctx (ctx)))
1201 /* Global variables don't need to be copied,
1202 the receiver side will use them directly. */
1203 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1205 by_ref = use_pointer_for_field (decl, ctx);
1206 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1207 install_var_field (decl, by_ref, 3, ctx);
1209 install_var_local (decl, ctx);
1210 break;
1212 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1213 && OMP_CLAUSE_REDUCTION_TASK (c))
1215 install_var_local (decl, ctx);
1216 break;
1218 goto do_private;
1220 case OMP_CLAUSE_LASTPRIVATE:
1221 /* Let the corresponding firstprivate clause create
1222 the variable. */
1223 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1224 break;
1225 /* FALLTHRU */
1227 case OMP_CLAUSE_FIRSTPRIVATE:
1228 case OMP_CLAUSE_LINEAR:
1229 decl = OMP_CLAUSE_DECL (c);
1230 do_private:
1231 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1232 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1233 && is_gimple_omp_offloaded (ctx->stmt))
1235 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1236 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1237 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1238 install_var_field (decl, true, 3, ctx);
1239 else
1240 install_var_field (decl, false, 3, ctx);
1242 if (is_variable_sized (decl))
1244 if (is_task_ctx (ctx))
1245 install_var_field (decl, false, 1, ctx);
1246 break;
1248 else if (is_taskreg_ctx (ctx))
1250 bool global
1251 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1252 by_ref = use_pointer_for_field (decl, NULL);
1254 if (is_task_ctx (ctx)
1255 && (global || by_ref || omp_is_reference (decl)))
1257 install_var_field (decl, false, 1, ctx);
1258 if (!global)
1259 install_var_field (decl, by_ref, 2, ctx);
1261 else if (!global)
1262 install_var_field (decl, by_ref, 3, ctx);
1264 install_var_local (decl, ctx);
1265 break;
1267 case OMP_CLAUSE_USE_DEVICE_PTR:
1268 case OMP_CLAUSE_USE_DEVICE_ADDR:
1269 decl = OMP_CLAUSE_DECL (c);
1271 /* Fortran array descriptors. */
1272 if (lang_hooks.decls.omp_array_data (decl, true))
1273 install_var_field (decl, false, 19, ctx);
1274 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1275 && !omp_is_reference (decl)
1276 && !omp_is_allocatable_or_ptr (decl))
1277 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1278 install_var_field (decl, true, 11, ctx);
1279 else
1280 install_var_field (decl, false, 11, ctx);
1281 if (DECL_SIZE (decl)
1282 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1284 tree decl2 = DECL_VALUE_EXPR (decl);
1285 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1286 decl2 = TREE_OPERAND (decl2, 0);
1287 gcc_assert (DECL_P (decl2));
1288 install_var_local (decl2, ctx);
1290 install_var_local (decl, ctx);
1291 break;
1293 case OMP_CLAUSE_IS_DEVICE_PTR:
1294 decl = OMP_CLAUSE_DECL (c);
1295 goto do_private;
1297 case OMP_CLAUSE__LOOPTEMP_:
1298 case OMP_CLAUSE__REDUCTEMP_:
1299 gcc_assert (is_taskreg_ctx (ctx));
1300 decl = OMP_CLAUSE_DECL (c);
1301 install_var_field (decl, false, 3, ctx);
1302 install_var_local (decl, ctx);
1303 break;
1305 case OMP_CLAUSE_COPYPRIVATE:
1306 case OMP_CLAUSE_COPYIN:
1307 decl = OMP_CLAUSE_DECL (c);
1308 by_ref = use_pointer_for_field (decl, NULL);
1309 install_var_field (decl, by_ref, 3, ctx);
1310 break;
1312 case OMP_CLAUSE_FINAL:
1313 case OMP_CLAUSE_IF:
1314 case OMP_CLAUSE_NUM_THREADS:
1315 case OMP_CLAUSE_NUM_TEAMS:
1316 case OMP_CLAUSE_THREAD_LIMIT:
1317 case OMP_CLAUSE_DEVICE:
1318 case OMP_CLAUSE_SCHEDULE:
1319 case OMP_CLAUSE_DIST_SCHEDULE:
1320 case OMP_CLAUSE_DEPEND:
1321 case OMP_CLAUSE_PRIORITY:
1322 case OMP_CLAUSE_GRAINSIZE:
1323 case OMP_CLAUSE_NUM_TASKS:
1324 case OMP_CLAUSE_NUM_GANGS:
1325 case OMP_CLAUSE_NUM_WORKERS:
1326 case OMP_CLAUSE_VECTOR_LENGTH:
1327 if (ctx->outer)
1328 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1329 break;
1331 case OMP_CLAUSE_TO:
1332 case OMP_CLAUSE_FROM:
1333 case OMP_CLAUSE_MAP:
1334 if (ctx->outer)
1335 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1336 decl = OMP_CLAUSE_DECL (c);
1337 /* Global variables with "omp declare target" attribute
1338 don't need to be copied, the receiver side will use them
1339 directly. However, global variables with "omp declare target link"
1340 attribute need to be copied. Or when ALWAYS modifier is used. */
1341 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1342 && DECL_P (decl)
1343 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1344 && (OMP_CLAUSE_MAP_KIND (c)
1345 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1346 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1347 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1348 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1349 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1350 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1351 && varpool_node::get_create (decl)->offloadable
1352 && !lookup_attribute ("omp declare target link",
1353 DECL_ATTRIBUTES (decl)))
1354 break;
1355 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1356 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1358 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1359 not offloaded; there is nothing to map for those. */
1360 if (!is_gimple_omp_offloaded (ctx->stmt)
1361 && !POINTER_TYPE_P (TREE_TYPE (decl))
1362 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1363 break;
1365 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1366 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1367 || (OMP_CLAUSE_MAP_KIND (c)
1368 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1370 if (TREE_CODE (decl) == COMPONENT_REF
1371 || (TREE_CODE (decl) == INDIRECT_REF
1372 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1373 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1374 == REFERENCE_TYPE)))
1375 break;
1376 if (DECL_SIZE (decl)
1377 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1379 tree decl2 = DECL_VALUE_EXPR (decl);
1380 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1381 decl2 = TREE_OPERAND (decl2, 0);
1382 gcc_assert (DECL_P (decl2));
1383 install_var_local (decl2, ctx);
1385 install_var_local (decl, ctx);
1386 break;
1388 if (DECL_P (decl))
1390 if (DECL_SIZE (decl)
1391 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1393 tree decl2 = DECL_VALUE_EXPR (decl);
1394 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1395 decl2 = TREE_OPERAND (decl2, 0);
1396 gcc_assert (DECL_P (decl2));
1397 install_var_field (decl2, true, 3, ctx);
1398 install_var_local (decl2, ctx);
1399 install_var_local (decl, ctx);
1401 else
1403 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1404 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1405 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1406 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1407 install_var_field (decl, true, 7, ctx);
1408 else
1409 install_var_field (decl, true, 3, ctx);
1410 if (is_gimple_omp_offloaded (ctx->stmt)
1411 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1412 install_var_local (decl, ctx);
1415 else
1417 tree base = get_base_address (decl);
1418 tree nc = OMP_CLAUSE_CHAIN (c);
1419 if (DECL_P (base)
1420 && nc != NULL_TREE
1421 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1422 && OMP_CLAUSE_DECL (nc) == base
1423 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1424 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1426 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1427 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1429 else
1431 if (ctx->outer)
1433 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1434 decl = OMP_CLAUSE_DECL (c);
1436 gcc_assert (!splay_tree_lookup (ctx->field_map,
1437 (splay_tree_key) decl));
1438 tree field
1439 = build_decl (OMP_CLAUSE_LOCATION (c),
1440 FIELD_DECL, NULL_TREE, ptr_type_node);
1441 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1442 insert_field_into_struct (ctx->record_type, field);
1443 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1444 (splay_tree_value) field);
1447 break;
1449 case OMP_CLAUSE__GRIDDIM_:
1450 if (ctx->outer)
1452 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1453 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1455 break;
1457 case OMP_CLAUSE_ORDER:
1458 ctx->order_concurrent = true;
1459 break;
1461 case OMP_CLAUSE_BIND:
1462 ctx->loop_p = true;
1463 break;
1465 case OMP_CLAUSE_NOWAIT:
1466 case OMP_CLAUSE_ORDERED:
1467 case OMP_CLAUSE_COLLAPSE:
1468 case OMP_CLAUSE_UNTIED:
1469 case OMP_CLAUSE_MERGEABLE:
1470 case OMP_CLAUSE_PROC_BIND:
1471 case OMP_CLAUSE_SAFELEN:
1472 case OMP_CLAUSE_SIMDLEN:
1473 case OMP_CLAUSE_THREADS:
1474 case OMP_CLAUSE_SIMD:
1475 case OMP_CLAUSE_NOGROUP:
1476 case OMP_CLAUSE_DEFAULTMAP:
1477 case OMP_CLAUSE_ASYNC:
1478 case OMP_CLAUSE_WAIT:
1479 case OMP_CLAUSE_GANG:
1480 case OMP_CLAUSE_WORKER:
1481 case OMP_CLAUSE_VECTOR:
1482 case OMP_CLAUSE_INDEPENDENT:
1483 case OMP_CLAUSE_AUTO:
1484 case OMP_CLAUSE_SEQ:
1485 case OMP_CLAUSE_TILE:
1486 case OMP_CLAUSE__SIMT_:
1487 case OMP_CLAUSE_DEFAULT:
1488 case OMP_CLAUSE_NONTEMPORAL:
1489 case OMP_CLAUSE_IF_PRESENT:
1490 case OMP_CLAUSE_FINALIZE:
1491 case OMP_CLAUSE_TASK_REDUCTION:
1492 break;
1494 case OMP_CLAUSE_ALIGNED:
1495 decl = OMP_CLAUSE_DECL (c);
1496 if (is_global_var (decl)
1497 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1498 install_var_local (decl, ctx);
1499 break;
1501 case OMP_CLAUSE__CONDTEMP_:
1502 decl = OMP_CLAUSE_DECL (c);
1503 if (is_parallel_ctx (ctx))
1505 install_var_field (decl, false, 3, ctx);
1506 install_var_local (decl, ctx);
1508 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1509 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1510 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1511 install_var_local (decl, ctx);
1512 break;
1514 case OMP_CLAUSE__CACHE_:
1515 default:
1516 gcc_unreachable ();
1520 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1522 switch (OMP_CLAUSE_CODE (c))
1524 case OMP_CLAUSE_LASTPRIVATE:
1525 /* Let the corresponding firstprivate clause create
1526 the variable. */
1527 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1528 scan_array_reductions = true;
1529 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1530 break;
1531 /* FALLTHRU */
1533 case OMP_CLAUSE_FIRSTPRIVATE:
1534 case OMP_CLAUSE_PRIVATE:
1535 case OMP_CLAUSE_LINEAR:
1536 case OMP_CLAUSE_IS_DEVICE_PTR:
1537 decl = OMP_CLAUSE_DECL (c);
1538 if (is_variable_sized (decl))
1540 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1541 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1542 && is_gimple_omp_offloaded (ctx->stmt))
1544 tree decl2 = DECL_VALUE_EXPR (decl);
1545 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1546 decl2 = TREE_OPERAND (decl2, 0);
1547 gcc_assert (DECL_P (decl2));
1548 install_var_local (decl2, ctx);
1549 fixup_remapped_decl (decl2, ctx, false);
1551 install_var_local (decl, ctx);
1553 fixup_remapped_decl (decl, ctx,
1554 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1555 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1556 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1557 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1558 scan_array_reductions = true;
1559 break;
1561 case OMP_CLAUSE_REDUCTION:
1562 case OMP_CLAUSE_IN_REDUCTION:
1563 decl = OMP_CLAUSE_DECL (c);
1564 if (TREE_CODE (decl) != MEM_REF)
1566 if (is_variable_sized (decl))
1567 install_var_local (decl, ctx);
1568 fixup_remapped_decl (decl, ctx, false);
1570 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1571 scan_array_reductions = true;
1572 break;
1574 case OMP_CLAUSE_TASK_REDUCTION:
1575 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1576 scan_array_reductions = true;
1577 break;
1579 case OMP_CLAUSE_SHARED:
1580 /* Ignore shared directives in teams construct inside of
1581 target construct. */
1582 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1583 && !is_host_teams_ctx (ctx))
1584 break;
1585 decl = OMP_CLAUSE_DECL (c);
1586 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1587 break;
1588 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1590 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1591 ctx->outer)))
1592 break;
1593 bool by_ref = use_pointer_for_field (decl, ctx);
1594 install_var_field (decl, by_ref, 11, ctx);
1595 break;
1597 fixup_remapped_decl (decl, ctx, false);
1598 break;
1600 case OMP_CLAUSE_MAP:
1601 if (!is_gimple_omp_offloaded (ctx->stmt))
1602 break;
1603 decl = OMP_CLAUSE_DECL (c);
1604 if (DECL_P (decl)
1605 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1606 && (OMP_CLAUSE_MAP_KIND (c)
1607 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1608 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1609 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1610 && varpool_node::get_create (decl)->offloadable)
1611 break;
1612 if (DECL_P (decl))
1614 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1615 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1616 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1617 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1619 tree new_decl = lookup_decl (decl, ctx);
1620 TREE_TYPE (new_decl)
1621 = remap_type (TREE_TYPE (decl), &ctx->cb);
1623 else if (DECL_SIZE (decl)
1624 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1626 tree decl2 = DECL_VALUE_EXPR (decl);
1627 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1628 decl2 = TREE_OPERAND (decl2, 0);
1629 gcc_assert (DECL_P (decl2));
1630 fixup_remapped_decl (decl2, ctx, false);
1631 fixup_remapped_decl (decl, ctx, true);
1633 else
1634 fixup_remapped_decl (decl, ctx, false);
1636 break;
1638 case OMP_CLAUSE_COPYPRIVATE:
1639 case OMP_CLAUSE_COPYIN:
1640 case OMP_CLAUSE_DEFAULT:
1641 case OMP_CLAUSE_IF:
1642 case OMP_CLAUSE_NUM_THREADS:
1643 case OMP_CLAUSE_NUM_TEAMS:
1644 case OMP_CLAUSE_THREAD_LIMIT:
1645 case OMP_CLAUSE_DEVICE:
1646 case OMP_CLAUSE_SCHEDULE:
1647 case OMP_CLAUSE_DIST_SCHEDULE:
1648 case OMP_CLAUSE_NOWAIT:
1649 case OMP_CLAUSE_ORDERED:
1650 case OMP_CLAUSE_COLLAPSE:
1651 case OMP_CLAUSE_UNTIED:
1652 case OMP_CLAUSE_FINAL:
1653 case OMP_CLAUSE_MERGEABLE:
1654 case OMP_CLAUSE_PROC_BIND:
1655 case OMP_CLAUSE_SAFELEN:
1656 case OMP_CLAUSE_SIMDLEN:
1657 case OMP_CLAUSE_ALIGNED:
1658 case OMP_CLAUSE_DEPEND:
1659 case OMP_CLAUSE__LOOPTEMP_:
1660 case OMP_CLAUSE__REDUCTEMP_:
1661 case OMP_CLAUSE_TO:
1662 case OMP_CLAUSE_FROM:
1663 case OMP_CLAUSE_PRIORITY:
1664 case OMP_CLAUSE_GRAINSIZE:
1665 case OMP_CLAUSE_NUM_TASKS:
1666 case OMP_CLAUSE_THREADS:
1667 case OMP_CLAUSE_SIMD:
1668 case OMP_CLAUSE_NOGROUP:
1669 case OMP_CLAUSE_DEFAULTMAP:
1670 case OMP_CLAUSE_ORDER:
1671 case OMP_CLAUSE_BIND:
1672 case OMP_CLAUSE_USE_DEVICE_PTR:
1673 case OMP_CLAUSE_USE_DEVICE_ADDR:
1674 case OMP_CLAUSE_NONTEMPORAL:
1675 case OMP_CLAUSE_ASYNC:
1676 case OMP_CLAUSE_WAIT:
1677 case OMP_CLAUSE_NUM_GANGS:
1678 case OMP_CLAUSE_NUM_WORKERS:
1679 case OMP_CLAUSE_VECTOR_LENGTH:
1680 case OMP_CLAUSE_GANG:
1681 case OMP_CLAUSE_WORKER:
1682 case OMP_CLAUSE_VECTOR:
1683 case OMP_CLAUSE_INDEPENDENT:
1684 case OMP_CLAUSE_AUTO:
1685 case OMP_CLAUSE_SEQ:
1686 case OMP_CLAUSE_TILE:
1687 case OMP_CLAUSE__GRIDDIM_:
1688 case OMP_CLAUSE__SIMT_:
1689 case OMP_CLAUSE_IF_PRESENT:
1690 case OMP_CLAUSE_FINALIZE:
1691 case OMP_CLAUSE__CONDTEMP_:
1692 break;
1694 case OMP_CLAUSE__CACHE_:
1695 default:
1696 gcc_unreachable ();
1700 gcc_checking_assert (!scan_array_reductions
1701 || !is_gimple_omp_oacc (ctx->stmt));
1702 if (scan_array_reductions)
1704 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1705 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1706 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1707 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1708 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1710 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1711 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1713 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1714 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1715 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1716 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1717 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1718 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1722 /* Create a new name for omp child function. Returns an identifier. */
1724 static tree
1725 create_omp_child_function_name (bool task_copy)
1727 return clone_function_name_numbered (current_function_decl,
1728 task_copy ? "_omp_cpyfn" : "_omp_fn");
1731 /* Return true if CTX may belong to offloaded code: either if current function
1732 is offloaded, or any enclosing context corresponds to a target region. */
1734 static bool
1735 omp_maybe_offloaded_ctx (omp_context *ctx)
1737 if (cgraph_node::get (current_function_decl)->offloadable)
1738 return true;
1739 for (; ctx; ctx = ctx->outer)
1740 if (is_gimple_omp_offloaded (ctx->stmt))
1741 return true;
1742 return false;
1745 /* Build a decl for the omp child function. It'll not contain a body
1746 yet, just the bare decl. */
1748 static void
1749 create_omp_child_function (omp_context *ctx, bool task_copy)
1751 tree decl, type, name, t;
1753 name = create_omp_child_function_name (task_copy);
1754 if (task_copy)
1755 type = build_function_type_list (void_type_node, ptr_type_node,
1756 ptr_type_node, NULL_TREE);
1757 else
1758 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1760 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1762 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1763 || !task_copy);
1764 if (!task_copy)
1765 ctx->cb.dst_fn = decl;
1766 else
1767 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1769 TREE_STATIC (decl) = 1;
1770 TREE_USED (decl) = 1;
1771 DECL_ARTIFICIAL (decl) = 1;
1772 DECL_IGNORED_P (decl) = 0;
1773 TREE_PUBLIC (decl) = 0;
1774 DECL_UNINLINABLE (decl) = 1;
1775 DECL_EXTERNAL (decl) = 0;
1776 DECL_CONTEXT (decl) = NULL_TREE;
1777 DECL_INITIAL (decl) = make_node (BLOCK);
1778 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1779 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1780 /* Remove omp declare simd attribute from the new attributes. */
1781 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1783 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1784 a = a2;
1785 a = TREE_CHAIN (a);
1786 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1787 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1788 *p = TREE_CHAIN (*p);
1789 else
1791 tree chain = TREE_CHAIN (*p);
1792 *p = copy_node (*p);
1793 p = &TREE_CHAIN (*p);
1794 *p = chain;
1797 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1798 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1799 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1800 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1801 DECL_FUNCTION_VERSIONED (decl)
1802 = DECL_FUNCTION_VERSIONED (current_function_decl);
1804 if (omp_maybe_offloaded_ctx (ctx))
1806 cgraph_node::get_create (decl)->offloadable = 1;
1807 if (ENABLE_OFFLOADING)
1808 g->have_offload = true;
1811 if (cgraph_node::get_create (decl)->offloadable
1812 && !lookup_attribute ("omp declare target",
1813 DECL_ATTRIBUTES (current_function_decl)))
1815 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1816 ? "omp target entrypoint"
1817 : "omp declare target");
1818 DECL_ATTRIBUTES (decl)
1819 = tree_cons (get_identifier (target_attr),
1820 NULL_TREE, DECL_ATTRIBUTES (decl));
1823 t = build_decl (DECL_SOURCE_LOCATION (decl),
1824 RESULT_DECL, NULL_TREE, void_type_node);
1825 DECL_ARTIFICIAL (t) = 1;
1826 DECL_IGNORED_P (t) = 1;
1827 DECL_CONTEXT (t) = decl;
1828 DECL_RESULT (decl) = t;
1830 tree data_name = get_identifier (".omp_data_i");
1831 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1832 ptr_type_node);
1833 DECL_ARTIFICIAL (t) = 1;
1834 DECL_NAMELESS (t) = 1;
1835 DECL_ARG_TYPE (t) = ptr_type_node;
1836 DECL_CONTEXT (t) = current_function_decl;
1837 TREE_USED (t) = 1;
1838 TREE_READONLY (t) = 1;
1839 DECL_ARGUMENTS (decl) = t;
1840 if (!task_copy)
1841 ctx->receiver_decl = t;
1842 else
1844 t = build_decl (DECL_SOURCE_LOCATION (decl),
1845 PARM_DECL, get_identifier (".omp_data_o"),
1846 ptr_type_node);
1847 DECL_ARTIFICIAL (t) = 1;
1848 DECL_NAMELESS (t) = 1;
1849 DECL_ARG_TYPE (t) = ptr_type_node;
1850 DECL_CONTEXT (t) = current_function_decl;
1851 TREE_USED (t) = 1;
1852 TREE_ADDRESSABLE (t) = 1;
1853 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1854 DECL_ARGUMENTS (decl) = t;
1857 /* Allocate memory for the function structure. The call to
1858 allocate_struct_function clobbers CFUN, so we need to restore
1859 it afterward. */
1860 push_struct_function (decl);
1861 cfun->function_end_locus = gimple_location (ctx->stmt);
1862 init_tree_ssa (cfun);
1863 pop_cfun ();
1866 /* Callback for walk_gimple_seq. Check if combined parallel
1867 contains gimple_omp_for_combined_into_p OMP_FOR. */
1869 tree
1870 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1871 bool *handled_ops_p,
1872 struct walk_stmt_info *wi)
1874 gimple *stmt = gsi_stmt (*gsi_p);
1876 *handled_ops_p = true;
1877 switch (gimple_code (stmt))
1879 WALK_SUBSTMTS;
1881 case GIMPLE_OMP_FOR:
1882 if (gimple_omp_for_combined_into_p (stmt)
1883 && gimple_omp_for_kind (stmt)
1884 == *(const enum gf_mask *) (wi->info))
1886 wi->info = stmt;
1887 return integer_zero_node;
1889 break;
1890 default:
1891 break;
1893 return NULL;
1896 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1898 static void
1899 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1900 omp_context *outer_ctx)
1902 struct walk_stmt_info wi;
1904 memset (&wi, 0, sizeof (wi));
1905 wi.val_only = true;
1906 wi.info = (void *) &msk;
1907 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1908 if (wi.info != (void *) &msk)
1910 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1911 struct omp_for_data fd;
1912 omp_extract_for_data (for_stmt, &fd, NULL);
1913 /* We need two temporaries with fd.loop.v type (istart/iend)
1914 and then (fd.collapse - 1) temporaries with the same
1915 type for count2 ... countN-1 vars if not constant. */
1916 size_t count = 2, i;
1917 tree type = fd.iter_type;
1918 if (fd.collapse > 1
1919 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1921 count += fd.collapse - 1;
1922 /* If there are lastprivate clauses on the inner
1923 GIMPLE_OMP_FOR, add one more temporaries for the total number
1924 of iterations (product of count1 ... countN-1). */
1925 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1926 OMP_CLAUSE_LASTPRIVATE))
1927 count++;
1928 else if (msk == GF_OMP_FOR_KIND_FOR
1929 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1930 OMP_CLAUSE_LASTPRIVATE))
1931 count++;
1933 for (i = 0; i < count; i++)
1935 tree temp = create_tmp_var (type);
1936 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1937 insert_decl_map (&outer_ctx->cb, temp, temp);
1938 OMP_CLAUSE_DECL (c) = temp;
1939 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1940 gimple_omp_taskreg_set_clauses (stmt, c);
1943 if (msk == GF_OMP_FOR_KIND_TASKLOOP
1944 && omp_find_clause (gimple_omp_task_clauses (stmt),
1945 OMP_CLAUSE_REDUCTION))
1947 tree type = build_pointer_type (pointer_sized_int_node);
1948 tree temp = create_tmp_var (type);
1949 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1950 insert_decl_map (&outer_ctx->cb, temp, temp);
1951 OMP_CLAUSE_DECL (c) = temp;
1952 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
1953 gimple_omp_task_set_clauses (stmt, c);
1957 /* Scan an OpenMP parallel directive. */
1959 static void
1960 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1962 omp_context *ctx;
1963 tree name;
1964 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1966 /* Ignore parallel directives with empty bodies, unless there
1967 are copyin clauses. */
1968 if (optimize > 0
1969 && empty_body_p (gimple_omp_body (stmt))
1970 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1971 OMP_CLAUSE_COPYIN) == NULL)
1973 gsi_replace (gsi, gimple_build_nop (), false);
1974 return;
1977 if (gimple_omp_parallel_combined_p (stmt))
1978 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1979 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
1980 OMP_CLAUSE_REDUCTION);
1981 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
1982 if (OMP_CLAUSE_REDUCTION_TASK (c))
1984 tree type = build_pointer_type (pointer_sized_int_node);
1985 tree temp = create_tmp_var (type);
1986 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1987 if (outer_ctx)
1988 insert_decl_map (&outer_ctx->cb, temp, temp);
1989 OMP_CLAUSE_DECL (c) = temp;
1990 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1991 gimple_omp_parallel_set_clauses (stmt, c);
1992 break;
1994 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
1995 break;
1997 ctx = new_omp_context (stmt, outer_ctx);
1998 taskreg_contexts.safe_push (ctx);
1999 if (taskreg_nesting_level > 1)
2000 ctx->is_nested = true;
2001 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2002 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2003 name = create_tmp_var_name (".omp_data_s");
2004 name = build_decl (gimple_location (stmt),
2005 TYPE_DECL, name, ctx->record_type);
2006 DECL_ARTIFICIAL (name) = 1;
2007 DECL_NAMELESS (name) = 1;
2008 TYPE_NAME (ctx->record_type) = name;
2009 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2010 if (!gimple_omp_parallel_grid_phony (stmt))
2012 create_omp_child_function (ctx, false);
2013 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2016 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2017 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2019 if (TYPE_FIELDS (ctx->record_type) == NULL)
2020 ctx->record_type = ctx->receiver_decl = NULL;
2023 /* Scan an OpenMP task directive. */
2025 static void
2026 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2028 omp_context *ctx;
2029 tree name, t;
2030 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2032 /* Ignore task directives with empty bodies, unless they have depend
2033 clause. */
2034 if (optimize > 0
2035 && gimple_omp_body (stmt)
2036 && empty_body_p (gimple_omp_body (stmt))
2037 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2039 gsi_replace (gsi, gimple_build_nop (), false);
2040 return;
2043 if (gimple_omp_task_taskloop_p (stmt))
2044 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2046 ctx = new_omp_context (stmt, outer_ctx);
2048 if (gimple_omp_task_taskwait_p (stmt))
2050 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2051 return;
2054 taskreg_contexts.safe_push (ctx);
2055 if (taskreg_nesting_level > 1)
2056 ctx->is_nested = true;
2057 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2058 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2059 name = create_tmp_var_name (".omp_data_s");
2060 name = build_decl (gimple_location (stmt),
2061 TYPE_DECL, name, ctx->record_type);
2062 DECL_ARTIFICIAL (name) = 1;
2063 DECL_NAMELESS (name) = 1;
2064 TYPE_NAME (ctx->record_type) = name;
2065 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2066 create_omp_child_function (ctx, false);
2067 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2069 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2071 if (ctx->srecord_type)
2073 name = create_tmp_var_name (".omp_data_a");
2074 name = build_decl (gimple_location (stmt),
2075 TYPE_DECL, name, ctx->srecord_type);
2076 DECL_ARTIFICIAL (name) = 1;
2077 DECL_NAMELESS (name) = 1;
2078 TYPE_NAME (ctx->srecord_type) = name;
2079 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2080 create_omp_child_function (ctx, true);
2083 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2085 if (TYPE_FIELDS (ctx->record_type) == NULL)
2087 ctx->record_type = ctx->receiver_decl = NULL;
2088 t = build_int_cst (long_integer_type_node, 0);
2089 gimple_omp_task_set_arg_size (stmt, t);
2090 t = build_int_cst (long_integer_type_node, 1);
2091 gimple_omp_task_set_arg_align (stmt, t);
2095 /* Helper function for finish_taskreg_scan, called through walk_tree.
2096 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2097 tree, replace it in the expression. */
2099 static tree
2100 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2102 if (VAR_P (*tp))
2104 omp_context *ctx = (omp_context *) data;
2105 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2106 if (t != *tp)
2108 if (DECL_HAS_VALUE_EXPR_P (t))
2109 t = unshare_expr (DECL_VALUE_EXPR (t));
2110 *tp = t;
2112 *walk_subtrees = 0;
2114 else if (IS_TYPE_OR_DECL_P (*tp))
2115 *walk_subtrees = 0;
2116 return NULL_TREE;
2119 /* If any decls have been made addressable during scan_omp,
2120 adjust their fields if needed, and layout record types
2121 of parallel/task constructs. */
2123 static void
2124 finish_taskreg_scan (omp_context *ctx)
2126 if (ctx->record_type == NULL_TREE)
2127 return;
2129 /* If any task_shared_vars were needed, verify all
2130 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2131 statements if use_pointer_for_field hasn't changed
2132 because of that. If it did, update field types now. */
2133 if (task_shared_vars)
2135 tree c;
2137 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2138 c; c = OMP_CLAUSE_CHAIN (c))
2139 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2140 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2142 tree decl = OMP_CLAUSE_DECL (c);
2144 /* Global variables don't need to be copied,
2145 the receiver side will use them directly. */
2146 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2147 continue;
2148 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2149 || !use_pointer_for_field (decl, ctx))
2150 continue;
2151 tree field = lookup_field (decl, ctx);
2152 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2153 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2154 continue;
2155 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2156 TREE_THIS_VOLATILE (field) = 0;
2157 DECL_USER_ALIGN (field) = 0;
2158 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2159 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2160 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2161 if (ctx->srecord_type)
2163 tree sfield = lookup_sfield (decl, ctx);
2164 TREE_TYPE (sfield) = TREE_TYPE (field);
2165 TREE_THIS_VOLATILE (sfield) = 0;
2166 DECL_USER_ALIGN (sfield) = 0;
2167 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2168 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2169 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2174 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2176 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2177 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2178 if (c)
2180 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2181 expects to find it at the start of data. */
2182 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2183 tree *p = &TYPE_FIELDS (ctx->record_type);
2184 while (*p)
2185 if (*p == f)
2187 *p = DECL_CHAIN (*p);
2188 break;
2190 else
2191 p = &DECL_CHAIN (*p);
2192 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2193 TYPE_FIELDS (ctx->record_type) = f;
2195 layout_type (ctx->record_type);
2196 fixup_child_record_type (ctx);
2198 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2200 layout_type (ctx->record_type);
2201 fixup_child_record_type (ctx);
2203 else
2205 location_t loc = gimple_location (ctx->stmt);
2206 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2207 /* Move VLA fields to the end. */
2208 p = &TYPE_FIELDS (ctx->record_type);
2209 while (*p)
2210 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2211 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2213 *q = *p;
2214 *p = TREE_CHAIN (*p);
2215 TREE_CHAIN (*q) = NULL_TREE;
2216 q = &TREE_CHAIN (*q);
2218 else
2219 p = &DECL_CHAIN (*p);
2220 *p = vla_fields;
2221 if (gimple_omp_task_taskloop_p (ctx->stmt))
2223 /* Move fields corresponding to first and second _looptemp_
2224 clause first. There are filled by GOMP_taskloop
2225 and thus need to be in specific positions. */
2226 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2227 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2228 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2229 OMP_CLAUSE__LOOPTEMP_);
2230 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2231 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2232 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2233 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2234 p = &TYPE_FIELDS (ctx->record_type);
2235 while (*p)
2236 if (*p == f1 || *p == f2 || *p == f3)
2237 *p = DECL_CHAIN (*p);
2238 else
2239 p = &DECL_CHAIN (*p);
2240 DECL_CHAIN (f1) = f2;
2241 if (c3)
2243 DECL_CHAIN (f2) = f3;
2244 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2246 else
2247 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2248 TYPE_FIELDS (ctx->record_type) = f1;
2249 if (ctx->srecord_type)
2251 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2252 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2253 if (c3)
2254 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2255 p = &TYPE_FIELDS (ctx->srecord_type);
2256 while (*p)
2257 if (*p == f1 || *p == f2 || *p == f3)
2258 *p = DECL_CHAIN (*p);
2259 else
2260 p = &DECL_CHAIN (*p);
2261 DECL_CHAIN (f1) = f2;
2262 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2263 if (c3)
2265 DECL_CHAIN (f2) = f3;
2266 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2268 else
2269 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2270 TYPE_FIELDS (ctx->srecord_type) = f1;
2273 layout_type (ctx->record_type);
2274 fixup_child_record_type (ctx);
2275 if (ctx->srecord_type)
2276 layout_type (ctx->srecord_type);
2277 tree t = fold_convert_loc (loc, long_integer_type_node,
2278 TYPE_SIZE_UNIT (ctx->record_type));
2279 if (TREE_CODE (t) != INTEGER_CST)
2281 t = unshare_expr (t);
2282 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2284 gimple_omp_task_set_arg_size (ctx->stmt, t);
2285 t = build_int_cst (long_integer_type_node,
2286 TYPE_ALIGN_UNIT (ctx->record_type));
2287 gimple_omp_task_set_arg_align (ctx->stmt, t);
2291 /* Find the enclosing offload context. */
2293 static omp_context *
2294 enclosing_target_ctx (omp_context *ctx)
2296 for (; ctx; ctx = ctx->outer)
2297 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2298 break;
2300 return ctx;
2303 /* Return true if ctx is part of an oacc kernels region. */
2305 static bool
2306 ctx_in_oacc_kernels_region (omp_context *ctx)
2308 for (;ctx != NULL; ctx = ctx->outer)
2310 gimple *stmt = ctx->stmt;
2311 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2312 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2313 return true;
2316 return false;
2319 /* Check the parallelism clauses inside a kernels regions.
2320 Until kernels handling moves to use the same loop indirection
2321 scheme as parallel, we need to do this checking early. */
2323 static unsigned
2324 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2326 bool checking = true;
2327 unsigned outer_mask = 0;
2328 unsigned this_mask = 0;
2329 bool has_seq = false, has_auto = false;
2331 if (ctx->outer)
2332 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2333 if (!stmt)
2335 checking = false;
2336 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2337 return outer_mask;
2338 stmt = as_a <gomp_for *> (ctx->stmt);
2341 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2343 switch (OMP_CLAUSE_CODE (c))
2345 case OMP_CLAUSE_GANG:
2346 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2347 break;
2348 case OMP_CLAUSE_WORKER:
2349 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2350 break;
2351 case OMP_CLAUSE_VECTOR:
2352 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2353 break;
2354 case OMP_CLAUSE_SEQ:
2355 has_seq = true;
2356 break;
2357 case OMP_CLAUSE_AUTO:
2358 has_auto = true;
2359 break;
2360 default:
2361 break;
2365 if (checking)
2367 if (has_seq && (this_mask || has_auto))
2368 error_at (gimple_location (stmt), "%<seq%> overrides other"
2369 " OpenACC loop specifiers");
2370 else if (has_auto && this_mask)
2371 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2372 " OpenACC loop specifiers");
2374 if (this_mask & outer_mask)
2375 error_at (gimple_location (stmt), "inner loop uses same"
2376 " OpenACC parallelism as containing loop");
2379 return outer_mask | this_mask;
2382 /* Scan a GIMPLE_OMP_FOR. */
2384 static omp_context *
2385 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2387 omp_context *ctx;
2388 size_t i;
2389 tree clauses = gimple_omp_for_clauses (stmt);
2391 ctx = new_omp_context (stmt, outer_ctx);
2393 if (is_gimple_omp_oacc (stmt))
2395 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2397 if (!tgt || is_oacc_parallel_or_serial (tgt))
2398 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2400 char const *check = NULL;
2402 switch (OMP_CLAUSE_CODE (c))
2404 case OMP_CLAUSE_GANG:
2405 check = "gang";
2406 break;
2408 case OMP_CLAUSE_WORKER:
2409 check = "worker";
2410 break;
2412 case OMP_CLAUSE_VECTOR:
2413 check = "vector";
2414 break;
2416 default:
2417 break;
2420 if (check && OMP_CLAUSE_OPERAND (c, 0))
2421 error_at (gimple_location (stmt),
2422 "argument not permitted on %qs clause in"
2423 " OpenACC %<parallel%> or %<serial%>", check);
2426 if (tgt && is_oacc_kernels (tgt))
2428 /* Strip out reductions, as they are not handled yet. */
2429 tree *prev_ptr = &clauses;
2431 while (tree probe = *prev_ptr)
2433 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2435 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2436 *prev_ptr = *next_ptr;
2437 else
2438 prev_ptr = next_ptr;
2441 gimple_omp_for_set_clauses (stmt, clauses);
2442 check_oacc_kernel_gwv (stmt, ctx);
2445 /* Collect all variables named in reductions on this loop. Ensure
2446 that, if this loop has a reduction on some variable v, and there is
2447 a reduction on v somewhere in an outer context, then there is a
2448 reduction on v on all intervening loops as well. */
2449 tree local_reduction_clauses = NULL;
2450 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2452 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2453 local_reduction_clauses
2454 = tree_cons (NULL, c, local_reduction_clauses);
2456 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2457 ctx->outer_reduction_clauses
2458 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2459 ctx->outer->outer_reduction_clauses);
2460 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2461 tree local_iter = local_reduction_clauses;
2462 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2464 tree local_clause = TREE_VALUE (local_iter);
2465 tree local_var = OMP_CLAUSE_DECL (local_clause);
2466 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2467 bool have_outer_reduction = false;
2468 tree ctx_iter = outer_reduction_clauses;
2469 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2471 tree outer_clause = TREE_VALUE (ctx_iter);
2472 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2473 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2474 if (outer_var == local_var && outer_op != local_op)
2476 warning_at (gimple_location (stmt), 0,
2477 "conflicting reduction operations for %qE",
2478 local_var);
2479 inform (OMP_CLAUSE_LOCATION (outer_clause),
2480 "location of the previous reduction for %qE",
2481 outer_var);
2483 if (outer_var == local_var)
2485 have_outer_reduction = true;
2486 break;
2489 if (have_outer_reduction)
2491 /* There is a reduction on outer_var both on this loop and on
2492 some enclosing loop. Walk up the context tree until such a
2493 loop with a reduction on outer_var is found, and complain
2494 about all intervening loops that do not have such a
2495 reduction. */
2496 struct omp_context *curr_loop = ctx->outer;
2497 bool found = false;
2498 while (curr_loop != NULL)
2500 tree curr_iter = curr_loop->local_reduction_clauses;
2501 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2503 tree curr_clause = TREE_VALUE (curr_iter);
2504 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2505 if (curr_var == local_var)
2507 found = true;
2508 break;
2511 if (!found)
2512 warning_at (gimple_location (curr_loop->stmt), 0,
2513 "nested loop in reduction needs "
2514 "reduction clause for %qE",
2515 local_var);
2516 else
2517 break;
2518 curr_loop = curr_loop->outer;
2522 ctx->local_reduction_clauses = local_reduction_clauses;
2523 ctx->outer_reduction_clauses
2524 = chainon (unshare_expr (ctx->local_reduction_clauses),
2525 ctx->outer_reduction_clauses);
2528 scan_sharing_clauses (clauses, ctx);
2530 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2531 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2533 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2534 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2535 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2536 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2538 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2539 return ctx;
2542 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2544 static void
2545 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2546 omp_context *outer_ctx)
2548 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2549 gsi_replace (gsi, bind, false);
2550 gimple_seq seq = NULL;
2551 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2552 tree cond = create_tmp_var_raw (integer_type_node);
2553 DECL_CONTEXT (cond) = current_function_decl;
2554 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2555 gimple_bind_set_vars (bind, cond);
2556 gimple_call_set_lhs (g, cond);
2557 gimple_seq_add_stmt (&seq, g);
2558 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2559 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2560 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2561 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2562 gimple_seq_add_stmt (&seq, g);
2563 g = gimple_build_label (lab1);
2564 gimple_seq_add_stmt (&seq, g);
2565 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2566 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2567 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2568 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2569 gimple_omp_for_set_clauses (new_stmt, clause);
2570 gimple_seq_add_stmt (&seq, new_stmt);
2571 g = gimple_build_goto (lab3);
2572 gimple_seq_add_stmt (&seq, g);
2573 g = gimple_build_label (lab2);
2574 gimple_seq_add_stmt (&seq, g);
2575 gimple_seq_add_stmt (&seq, stmt);
2576 g = gimple_build_label (lab3);
2577 gimple_seq_add_stmt (&seq, g);
2578 gimple_bind_set_body (bind, seq);
2579 update_stmt (bind);
2580 scan_omp_for (new_stmt, outer_ctx);
2581 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2584 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2585 struct walk_stmt_info *);
2586 static omp_context *maybe_lookup_ctx (gimple *);
2588 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2589 for scan phase loop. */
2591 static void
2592 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2593 omp_context *outer_ctx)
2595 /* The only change between inclusive and exclusive scan will be
2596 within the first simd loop, so just use inclusive in the
2597 worksharing loop. */
2598 outer_ctx->scan_inclusive = true;
2599 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2600 OMP_CLAUSE_DECL (c) = integer_zero_node;
2602 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2603 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2604 gsi_replace (gsi, input_stmt, false);
2605 gimple_seq input_body = NULL;
2606 gimple_seq_add_stmt (&input_body, stmt);
2607 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2609 gimple_stmt_iterator input1_gsi = gsi_none ();
2610 struct walk_stmt_info wi;
2611 memset (&wi, 0, sizeof (wi));
2612 wi.val_only = true;
2613 wi.info = (void *) &input1_gsi;
2614 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2615 gcc_assert (!gsi_end_p (input1_gsi));
2617 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2618 gsi_next (&input1_gsi);
2619 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2620 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2621 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2622 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2623 std::swap (input_stmt1, scan_stmt1);
2625 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2626 gimple_omp_set_body (input_stmt1, NULL);
2628 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2629 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2631 gimple_omp_set_body (input_stmt1, input_body1);
2632 gimple_omp_set_body (scan_stmt1, NULL);
2634 gimple_stmt_iterator input2_gsi = gsi_none ();
2635 memset (&wi, 0, sizeof (wi));
2636 wi.val_only = true;
2637 wi.info = (void *) &input2_gsi;
2638 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2639 NULL, &wi);
2640 gcc_assert (!gsi_end_p (input2_gsi));
2642 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2643 gsi_next (&input2_gsi);
2644 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2645 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2646 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2647 std::swap (input_stmt2, scan_stmt2);
2649 gimple_omp_set_body (input_stmt2, NULL);
2651 gimple_omp_set_body (input_stmt, input_body);
2652 gimple_omp_set_body (scan_stmt, scan_body);
2654 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2655 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2657 ctx = new_omp_context (scan_stmt, outer_ctx);
2658 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2660 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2663 /* Scan an OpenMP sections directive. */
2665 static void
2666 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2668 omp_context *ctx;
2670 ctx = new_omp_context (stmt, outer_ctx);
2671 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2672 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2675 /* Scan an OpenMP single directive. */
2677 static void
2678 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2680 omp_context *ctx;
2681 tree name;
2683 ctx = new_omp_context (stmt, outer_ctx);
2684 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2685 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2686 name = create_tmp_var_name (".omp_copy_s");
2687 name = build_decl (gimple_location (stmt),
2688 TYPE_DECL, name, ctx->record_type);
2689 TYPE_NAME (ctx->record_type) = name;
2691 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2692 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2694 if (TYPE_FIELDS (ctx->record_type) == NULL)
2695 ctx->record_type = NULL;
2696 else
2697 layout_type (ctx->record_type);
2700 /* Scan a GIMPLE_OMP_TARGET. */
2702 static void
2703 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2705 omp_context *ctx;
2706 tree name;
2707 bool offloaded = is_gimple_omp_offloaded (stmt);
2708 tree clauses = gimple_omp_target_clauses (stmt);
2710 ctx = new_omp_context (stmt, outer_ctx);
2711 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2712 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2713 name = create_tmp_var_name (".omp_data_t");
2714 name = build_decl (gimple_location (stmt),
2715 TYPE_DECL, name, ctx->record_type);
2716 DECL_ARTIFICIAL (name) = 1;
2717 DECL_NAMELESS (name) = 1;
2718 TYPE_NAME (ctx->record_type) = name;
2719 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2721 if (offloaded)
2723 create_omp_child_function (ctx, false);
2724 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2727 scan_sharing_clauses (clauses, ctx);
2728 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2730 if (TYPE_FIELDS (ctx->record_type) == NULL)
2731 ctx->record_type = ctx->receiver_decl = NULL;
2732 else
2734 TYPE_FIELDS (ctx->record_type)
2735 = nreverse (TYPE_FIELDS (ctx->record_type));
2736 if (flag_checking)
2738 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2739 for (tree field = TYPE_FIELDS (ctx->record_type);
2740 field;
2741 field = DECL_CHAIN (field))
2742 gcc_assert (DECL_ALIGN (field) == align);
2744 layout_type (ctx->record_type);
2745 if (offloaded)
2746 fixup_child_record_type (ctx);
2750 /* Scan an OpenMP teams directive. */
2752 static void
2753 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2755 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2757 if (!gimple_omp_teams_host (stmt))
2759 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2760 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2761 return;
2763 taskreg_contexts.safe_push (ctx);
2764 gcc_assert (taskreg_nesting_level == 1);
2765 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2766 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2767 tree name = create_tmp_var_name (".omp_data_s");
2768 name = build_decl (gimple_location (stmt),
2769 TYPE_DECL, name, ctx->record_type);
2770 DECL_ARTIFICIAL (name) = 1;
2771 DECL_NAMELESS (name) = 1;
2772 TYPE_NAME (ctx->record_type) = name;
2773 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2774 create_omp_child_function (ctx, false);
2775 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2777 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2778 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2780 if (TYPE_FIELDS (ctx->record_type) == NULL)
2781 ctx->record_type = ctx->receiver_decl = NULL;
2784 /* Check nesting restrictions. */
2785 static bool
2786 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2788 tree c;
2790 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2791 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2792 the original copy of its contents. */
2793 return true;
2795 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2796 inside an OpenACC CTX. */
2797 if (!(is_gimple_omp (stmt)
2798 && is_gimple_omp_oacc (stmt))
2799 /* Except for atomic codes that we share with OpenMP. */
2800 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2801 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2803 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2805 error_at (gimple_location (stmt),
2806 "non-OpenACC construct inside of OpenACC routine");
2807 return false;
2809 else
2810 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2811 if (is_gimple_omp (octx->stmt)
2812 && is_gimple_omp_oacc (octx->stmt))
2814 error_at (gimple_location (stmt),
2815 "non-OpenACC construct inside of OpenACC region");
2816 return false;
2820 if (ctx != NULL)
2822 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
2823 && ctx->outer
2824 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
2825 ctx = ctx->outer;
2826 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2827 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
2828 && !ctx->loop_p)
2830 c = NULL_TREE;
2831 if (ctx->order_concurrent
2832 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
2833 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2834 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2836 error_at (gimple_location (stmt),
2837 "OpenMP constructs other than %<parallel%>, %<loop%>"
2838 " or %<simd%> may not be nested inside a region with"
2839 " the %<order(concurrent)%> clause");
2840 return false;
2842 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2844 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2845 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2847 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2848 && (ctx->outer == NULL
2849 || !gimple_omp_for_combined_into_p (ctx->stmt)
2850 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2851 || (gimple_omp_for_kind (ctx->outer->stmt)
2852 != GF_OMP_FOR_KIND_FOR)
2853 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2855 error_at (gimple_location (stmt),
2856 "%<ordered simd threads%> must be closely "
2857 "nested inside of %<for simd%> region");
2858 return false;
2860 return true;
2863 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2864 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
2865 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
2866 return true;
2867 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
2868 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
2869 return true;
2870 error_at (gimple_location (stmt),
2871 "OpenMP constructs other than "
2872 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
2873 "not be nested inside %<simd%> region");
2874 return false;
2876 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2878 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2879 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
2880 && gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
2881 && omp_find_clause (gimple_omp_for_clauses (stmt),
2882 OMP_CLAUSE_BIND) == NULL_TREE))
2883 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2885 error_at (gimple_location (stmt),
2886 "only %<distribute%>, %<parallel%> or %<loop%> "
2887 "regions are allowed to be strictly nested inside "
2888 "%<teams%> region");
2889 return false;
2892 else if (ctx->order_concurrent
2893 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
2894 && (gimple_code (stmt) != GIMPLE_OMP_FOR
2895 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
2896 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
2898 if (ctx->loop_p)
2899 error_at (gimple_location (stmt),
2900 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2901 "%<simd%> may not be nested inside a %<loop%> region");
2902 else
2903 error_at (gimple_location (stmt),
2904 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2905 "%<simd%> may not be nested inside a region with "
2906 "the %<order(concurrent)%> clause");
2907 return false;
2910 switch (gimple_code (stmt))
2912 case GIMPLE_OMP_FOR:
2913 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
2914 return true;
2915 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2917 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2919 error_at (gimple_location (stmt),
2920 "%<distribute%> region must be strictly nested "
2921 "inside %<teams%> construct");
2922 return false;
2924 return true;
2926 /* We split taskloop into task and nested taskloop in it. */
2927 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2928 return true;
2929 /* For now, hope this will change and loop bind(parallel) will not
2930 be allowed in lots of contexts. */
2931 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
2932 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
2933 return true;
2934 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2936 bool ok = false;
2938 if (ctx)
2939 switch (gimple_code (ctx->stmt))
2941 case GIMPLE_OMP_FOR:
2942 ok = (gimple_omp_for_kind (ctx->stmt)
2943 == GF_OMP_FOR_KIND_OACC_LOOP);
2944 break;
2946 case GIMPLE_OMP_TARGET:
2947 switch (gimple_omp_target_kind (ctx->stmt))
2949 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2950 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2951 case GF_OMP_TARGET_KIND_OACC_SERIAL:
2952 ok = true;
2953 break;
2955 default:
2956 break;
2959 default:
2960 break;
2962 else if (oacc_get_fn_attrib (current_function_decl))
2963 ok = true;
2964 if (!ok)
2966 error_at (gimple_location (stmt),
2967 "OpenACC loop directive must be associated with"
2968 " an OpenACC compute region");
2969 return false;
2972 /* FALLTHRU */
2973 case GIMPLE_CALL:
2974 if (is_gimple_call (stmt)
2975 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2976 == BUILT_IN_GOMP_CANCEL
2977 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2978 == BUILT_IN_GOMP_CANCELLATION_POINT))
2980 const char *bad = NULL;
2981 const char *kind = NULL;
2982 const char *construct
2983 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2984 == BUILT_IN_GOMP_CANCEL)
2985 ? "cancel"
2986 : "cancellation point";
2987 if (ctx == NULL)
2989 error_at (gimple_location (stmt), "orphaned %qs construct",
2990 construct);
2991 return false;
2993 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2994 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2995 : 0)
2997 case 1:
2998 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2999 bad = "parallel";
3000 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3001 == BUILT_IN_GOMP_CANCEL
3002 && !integer_zerop (gimple_call_arg (stmt, 1)))
3003 ctx->cancellable = true;
3004 kind = "parallel";
3005 break;
3006 case 2:
3007 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3008 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3009 bad = "for";
3010 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3011 == BUILT_IN_GOMP_CANCEL
3012 && !integer_zerop (gimple_call_arg (stmt, 1)))
3014 ctx->cancellable = true;
3015 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3016 OMP_CLAUSE_NOWAIT))
3017 warning_at (gimple_location (stmt), 0,
3018 "%<cancel for%> inside "
3019 "%<nowait%> for construct");
3020 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3021 OMP_CLAUSE_ORDERED))
3022 warning_at (gimple_location (stmt), 0,
3023 "%<cancel for%> inside "
3024 "%<ordered%> for construct");
3026 kind = "for";
3027 break;
3028 case 4:
3029 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3030 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3031 bad = "sections";
3032 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3033 == BUILT_IN_GOMP_CANCEL
3034 && !integer_zerop (gimple_call_arg (stmt, 1)))
3036 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3038 ctx->cancellable = true;
3039 if (omp_find_clause (gimple_omp_sections_clauses
3040 (ctx->stmt),
3041 OMP_CLAUSE_NOWAIT))
3042 warning_at (gimple_location (stmt), 0,
3043 "%<cancel sections%> inside "
3044 "%<nowait%> sections construct");
3046 else
3048 gcc_assert (ctx->outer
3049 && gimple_code (ctx->outer->stmt)
3050 == GIMPLE_OMP_SECTIONS);
3051 ctx->outer->cancellable = true;
3052 if (omp_find_clause (gimple_omp_sections_clauses
3053 (ctx->outer->stmt),
3054 OMP_CLAUSE_NOWAIT))
3055 warning_at (gimple_location (stmt), 0,
3056 "%<cancel sections%> inside "
3057 "%<nowait%> sections construct");
3060 kind = "sections";
3061 break;
3062 case 8:
3063 if (!is_task_ctx (ctx)
3064 && (!is_taskloop_ctx (ctx)
3065 || ctx->outer == NULL
3066 || !is_task_ctx (ctx->outer)))
3067 bad = "task";
3068 else
3070 for (omp_context *octx = ctx->outer;
3071 octx; octx = octx->outer)
3073 switch (gimple_code (octx->stmt))
3075 case GIMPLE_OMP_TASKGROUP:
3076 break;
3077 case GIMPLE_OMP_TARGET:
3078 if (gimple_omp_target_kind (octx->stmt)
3079 != GF_OMP_TARGET_KIND_REGION)
3080 continue;
3081 /* FALLTHRU */
3082 case GIMPLE_OMP_PARALLEL:
3083 case GIMPLE_OMP_TEAMS:
3084 error_at (gimple_location (stmt),
3085 "%<%s taskgroup%> construct not closely "
3086 "nested inside of %<taskgroup%> region",
3087 construct);
3088 return false;
3089 case GIMPLE_OMP_TASK:
3090 if (gimple_omp_task_taskloop_p (octx->stmt)
3091 && octx->outer
3092 && is_taskloop_ctx (octx->outer))
3094 tree clauses
3095 = gimple_omp_for_clauses (octx->outer->stmt);
3096 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3097 break;
3099 continue;
3100 default:
3101 continue;
3103 break;
3105 ctx->cancellable = true;
3107 kind = "taskgroup";
3108 break;
3109 default:
3110 error_at (gimple_location (stmt), "invalid arguments");
3111 return false;
3113 if (bad)
3115 error_at (gimple_location (stmt),
3116 "%<%s %s%> construct not closely nested inside of %qs",
3117 construct, kind, bad);
3118 return false;
3121 /* FALLTHRU */
3122 case GIMPLE_OMP_SECTIONS:
3123 case GIMPLE_OMP_SINGLE:
3124 for (; ctx != NULL; ctx = ctx->outer)
3125 switch (gimple_code (ctx->stmt))
3127 case GIMPLE_OMP_FOR:
3128 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3129 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3130 break;
3131 /* FALLTHRU */
3132 case GIMPLE_OMP_SECTIONS:
3133 case GIMPLE_OMP_SINGLE:
3134 case GIMPLE_OMP_ORDERED:
3135 case GIMPLE_OMP_MASTER:
3136 case GIMPLE_OMP_TASK:
3137 case GIMPLE_OMP_CRITICAL:
3138 if (is_gimple_call (stmt))
3140 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3141 != BUILT_IN_GOMP_BARRIER)
3142 return true;
3143 error_at (gimple_location (stmt),
3144 "barrier region may not be closely nested inside "
3145 "of work-sharing, %<loop%>, %<critical%>, "
3146 "%<ordered%>, %<master%>, explicit %<task%> or "
3147 "%<taskloop%> region");
3148 return false;
3150 error_at (gimple_location (stmt),
3151 "work-sharing region may not be closely nested inside "
3152 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3153 "%<master%>, explicit %<task%> or %<taskloop%> region");
3154 return false;
3155 case GIMPLE_OMP_PARALLEL:
3156 case GIMPLE_OMP_TEAMS:
3157 return true;
3158 case GIMPLE_OMP_TARGET:
3159 if (gimple_omp_target_kind (ctx->stmt)
3160 == GF_OMP_TARGET_KIND_REGION)
3161 return true;
3162 break;
3163 default:
3164 break;
3166 break;
3167 case GIMPLE_OMP_MASTER:
3168 for (; ctx != NULL; ctx = ctx->outer)
3169 switch (gimple_code (ctx->stmt))
3171 case GIMPLE_OMP_FOR:
3172 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3173 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3174 break;
3175 /* FALLTHRU */
3176 case GIMPLE_OMP_SECTIONS:
3177 case GIMPLE_OMP_SINGLE:
3178 case GIMPLE_OMP_TASK:
3179 error_at (gimple_location (stmt),
3180 "%<master%> region may not be closely nested inside "
3181 "of work-sharing, %<loop%>, explicit %<task%> or "
3182 "%<taskloop%> region");
3183 return false;
3184 case GIMPLE_OMP_PARALLEL:
3185 case GIMPLE_OMP_TEAMS:
3186 return true;
3187 case GIMPLE_OMP_TARGET:
3188 if (gimple_omp_target_kind (ctx->stmt)
3189 == GF_OMP_TARGET_KIND_REGION)
3190 return true;
3191 break;
3192 default:
3193 break;
3195 break;
3196 case GIMPLE_OMP_TASK:
3197 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3198 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3199 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3200 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3202 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3203 error_at (OMP_CLAUSE_LOCATION (c),
3204 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3205 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3206 return false;
3208 break;
3209 case GIMPLE_OMP_ORDERED:
3210 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3211 c; c = OMP_CLAUSE_CHAIN (c))
3213 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3215 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3216 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3217 continue;
3219 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3220 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3221 || kind == OMP_CLAUSE_DEPEND_SINK)
3223 tree oclause;
3224 /* Look for containing ordered(N) loop. */
3225 if (ctx == NULL
3226 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3227 || (oclause
3228 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3229 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3231 error_at (OMP_CLAUSE_LOCATION (c),
3232 "%<ordered%> construct with %<depend%> clause "
3233 "must be closely nested inside an %<ordered%> "
3234 "loop");
3235 return false;
3237 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3239 error_at (OMP_CLAUSE_LOCATION (c),
3240 "%<ordered%> construct with %<depend%> clause "
3241 "must be closely nested inside a loop with "
3242 "%<ordered%> clause with a parameter");
3243 return false;
3246 else
3248 error_at (OMP_CLAUSE_LOCATION (c),
3249 "invalid depend kind in omp %<ordered%> %<depend%>");
3250 return false;
3253 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3254 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3256 /* ordered simd must be closely nested inside of simd region,
3257 and simd region must not encounter constructs other than
3258 ordered simd, therefore ordered simd may be either orphaned,
3259 or ctx->stmt must be simd. The latter case is handled already
3260 earlier. */
3261 if (ctx != NULL)
3263 error_at (gimple_location (stmt),
3264 "%<ordered%> %<simd%> must be closely nested inside "
3265 "%<simd%> region");
3266 return false;
3269 for (; ctx != NULL; ctx = ctx->outer)
3270 switch (gimple_code (ctx->stmt))
3272 case GIMPLE_OMP_CRITICAL:
3273 case GIMPLE_OMP_TASK:
3274 case GIMPLE_OMP_ORDERED:
3275 ordered_in_taskloop:
3276 error_at (gimple_location (stmt),
3277 "%<ordered%> region may not be closely nested inside "
3278 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3279 "%<taskloop%> region");
3280 return false;
3281 case GIMPLE_OMP_FOR:
3282 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3283 goto ordered_in_taskloop;
3284 tree o;
3285 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3286 OMP_CLAUSE_ORDERED);
3287 if (o == NULL)
3289 error_at (gimple_location (stmt),
3290 "%<ordered%> region must be closely nested inside "
3291 "a loop region with an %<ordered%> clause");
3292 return false;
3294 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3295 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3297 error_at (gimple_location (stmt),
3298 "%<ordered%> region without %<depend%> clause may "
3299 "not be closely nested inside a loop region with "
3300 "an %<ordered%> clause with a parameter");
3301 return false;
3303 return true;
3304 case GIMPLE_OMP_TARGET:
3305 if (gimple_omp_target_kind (ctx->stmt)
3306 != GF_OMP_TARGET_KIND_REGION)
3307 break;
3308 /* FALLTHRU */
3309 case GIMPLE_OMP_PARALLEL:
3310 case GIMPLE_OMP_TEAMS:
3311 error_at (gimple_location (stmt),
3312 "%<ordered%> region must be closely nested inside "
3313 "a loop region with an %<ordered%> clause");
3314 return false;
3315 default:
3316 break;
3318 break;
3319 case GIMPLE_OMP_CRITICAL:
3321 tree this_stmt_name
3322 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3323 for (; ctx != NULL; ctx = ctx->outer)
3324 if (gomp_critical *other_crit
3325 = dyn_cast <gomp_critical *> (ctx->stmt))
3326 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3328 error_at (gimple_location (stmt),
3329 "%<critical%> region may not be nested inside "
3330 "a %<critical%> region with the same name");
3331 return false;
3334 break;
3335 case GIMPLE_OMP_TEAMS:
3336 if (ctx == NULL)
3337 break;
3338 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3339 || (gimple_omp_target_kind (ctx->stmt)
3340 != GF_OMP_TARGET_KIND_REGION))
3342 /* Teams construct can appear either strictly nested inside of
3343 target construct with no intervening stmts, or can be encountered
3344 only by initial task (so must not appear inside any OpenMP
3345 construct. */
3346 error_at (gimple_location (stmt),
3347 "%<teams%> construct must be closely nested inside of "
3348 "%<target%> construct or not nested in any OpenMP "
3349 "construct");
3350 return false;
3352 break;
3353 case GIMPLE_OMP_TARGET:
3354 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3355 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3356 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3357 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3359 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3360 error_at (OMP_CLAUSE_LOCATION (c),
3361 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3362 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3363 return false;
3365 if (is_gimple_omp_offloaded (stmt)
3366 && oacc_get_fn_attrib (cfun->decl) != NULL)
3368 error_at (gimple_location (stmt),
3369 "OpenACC region inside of OpenACC routine, nested "
3370 "parallelism not supported yet");
3371 return false;
3373 for (; ctx != NULL; ctx = ctx->outer)
3375 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3377 if (is_gimple_omp (stmt)
3378 && is_gimple_omp_oacc (stmt)
3379 && is_gimple_omp (ctx->stmt))
3381 error_at (gimple_location (stmt),
3382 "OpenACC construct inside of non-OpenACC region");
3383 return false;
3385 continue;
3388 const char *stmt_name, *ctx_stmt_name;
3389 switch (gimple_omp_target_kind (stmt))
3391 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3392 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3393 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3394 case GF_OMP_TARGET_KIND_ENTER_DATA:
3395 stmt_name = "target enter data"; break;
3396 case GF_OMP_TARGET_KIND_EXIT_DATA:
3397 stmt_name = "target exit data"; break;
3398 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3399 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3400 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3401 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3402 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3403 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3404 stmt_name = "enter/exit data"; break;
3405 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3406 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3407 break;
3408 default: gcc_unreachable ();
3410 switch (gimple_omp_target_kind (ctx->stmt))
3412 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3413 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3414 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3415 ctx_stmt_name = "parallel"; break;
3416 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3417 ctx_stmt_name = "kernels"; break;
3418 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3419 ctx_stmt_name = "serial"; break;
3420 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3421 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3422 ctx_stmt_name = "host_data"; break;
3423 default: gcc_unreachable ();
3426 /* OpenACC/OpenMP mismatch? */
3427 if (is_gimple_omp_oacc (stmt)
3428 != is_gimple_omp_oacc (ctx->stmt))
3430 error_at (gimple_location (stmt),
3431 "%s %qs construct inside of %s %qs region",
3432 (is_gimple_omp_oacc (stmt)
3433 ? "OpenACC" : "OpenMP"), stmt_name,
3434 (is_gimple_omp_oacc (ctx->stmt)
3435 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3436 return false;
3438 if (is_gimple_omp_offloaded (ctx->stmt))
3440 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3441 if (is_gimple_omp_oacc (ctx->stmt))
3443 error_at (gimple_location (stmt),
3444 "%qs construct inside of %qs region",
3445 stmt_name, ctx_stmt_name);
3446 return false;
3448 else
3450 warning_at (gimple_location (stmt), 0,
3451 "%qs construct inside of %qs region",
3452 stmt_name, ctx_stmt_name);
3456 break;
3457 default:
3458 break;
3460 return true;
3464 /* Helper function scan_omp.
3466 Callback for walk_tree or operators in walk_gimple_stmt used to
3467 scan for OMP directives in TP. */
3469 static tree
3470 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3472 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3473 omp_context *ctx = (omp_context *) wi->info;
3474 tree t = *tp;
3476 switch (TREE_CODE (t))
3478 case VAR_DECL:
3479 case PARM_DECL:
3480 case LABEL_DECL:
3481 case RESULT_DECL:
3482 if (ctx)
3484 tree repl = remap_decl (t, &ctx->cb);
3485 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3486 *tp = repl;
3488 break;
3490 default:
3491 if (ctx && TYPE_P (t))
3492 *tp = remap_type (t, &ctx->cb);
3493 else if (!DECL_P (t))
3495 *walk_subtrees = 1;
3496 if (ctx)
3498 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3499 if (tem != TREE_TYPE (t))
3501 if (TREE_CODE (t) == INTEGER_CST)
3502 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3503 else
3504 TREE_TYPE (t) = tem;
3508 break;
3511 return NULL_TREE;
3514 /* Return true if FNDECL is a setjmp or a longjmp. */
3516 static bool
3517 setjmp_or_longjmp_p (const_tree fndecl)
3519 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3520 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3521 return true;
3523 tree declname = DECL_NAME (fndecl);
3524 if (!declname
3525 || (DECL_CONTEXT (fndecl) != NULL_TREE
3526 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3527 || !TREE_PUBLIC (fndecl))
3528 return false;
3530 const char *name = IDENTIFIER_POINTER (declname);
3531 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3534 /* Return true if FNDECL is an omp_* runtime API call. */
3536 static bool
3537 omp_runtime_api_call (const_tree fndecl)
3539 tree declname = DECL_NAME (fndecl);
3540 if (!declname
3541 || (DECL_CONTEXT (fndecl) != NULL_TREE
3542 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3543 || !TREE_PUBLIC (fndecl))
3544 return false;
3546 const char *name = IDENTIFIER_POINTER (declname);
3547 if (strncmp (name, "omp_", 4) != 0)
3548 return false;
3550 static const char *omp_runtime_apis[] =
3552 /* This array has 3 sections. First omp_* calls that don't
3553 have any suffixes. */
3554 "target_alloc",
3555 "target_associate_ptr",
3556 "target_disassociate_ptr",
3557 "target_free",
3558 "target_is_present",
3559 "target_memcpy",
3560 "target_memcpy_rect",
3561 NULL,
3562 /* Now omp_* calls that are available as omp_* and omp_*_. */
3563 "capture_affinity",
3564 "destroy_lock",
3565 "destroy_nest_lock",
3566 "display_affinity",
3567 "get_active_level",
3568 "get_affinity_format",
3569 "get_cancellation",
3570 "get_default_device",
3571 "get_dynamic",
3572 "get_initial_device",
3573 "get_level",
3574 "get_max_active_levels",
3575 "get_max_task_priority",
3576 "get_max_threads",
3577 "get_nested",
3578 "get_num_devices",
3579 "get_num_places",
3580 "get_num_procs",
3581 "get_num_teams",
3582 "get_num_threads",
3583 "get_partition_num_places",
3584 "get_place_num",
3585 "get_proc_bind",
3586 "get_team_num",
3587 "get_thread_limit",
3588 "get_thread_num",
3589 "get_wtick",
3590 "get_wtime",
3591 "in_final",
3592 "in_parallel",
3593 "init_lock",
3594 "init_nest_lock",
3595 "is_initial_device",
3596 "pause_resource",
3597 "pause_resource_all",
3598 "set_affinity_format",
3599 "set_lock",
3600 "set_nest_lock",
3601 "test_lock",
3602 "test_nest_lock",
3603 "unset_lock",
3604 "unset_nest_lock",
3605 NULL,
3606 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3607 "get_ancestor_thread_num",
3608 "get_partition_place_nums",
3609 "get_place_num_procs",
3610 "get_place_proc_ids",
3611 "get_schedule",
3612 "get_team_size",
3613 "set_default_device",
3614 "set_dynamic",
3615 "set_max_active_levels",
3616 "set_nested",
3617 "set_num_threads",
3618 "set_schedule"
3621 int mode = 0;
3622 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
3624 if (omp_runtime_apis[i] == NULL)
3626 mode++;
3627 continue;
3629 size_t len = strlen (omp_runtime_apis[i]);
3630 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
3631 && (name[4 + len] == '\0'
3632 || (mode > 0
3633 && name[4 + len] == '_'
3634 && (name[4 + len + 1] == '\0'
3635 || (mode > 1
3636 && strcmp (name + 4 + len + 1, "8_") == 0)))))
3637 return true;
3639 return false;
3642 /* Helper function for scan_omp.
3644 Callback for walk_gimple_stmt used to scan for OMP directives in
3645 the current statement in GSI. */
3647 static tree
3648 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3649 struct walk_stmt_info *wi)
3651 gimple *stmt = gsi_stmt (*gsi);
3652 omp_context *ctx = (omp_context *) wi->info;
3654 if (gimple_has_location (stmt))
3655 input_location = gimple_location (stmt);
3657 /* Check the nesting restrictions. */
3658 bool remove = false;
3659 if (is_gimple_omp (stmt))
3660 remove = !check_omp_nesting_restrictions (stmt, ctx);
3661 else if (is_gimple_call (stmt))
3663 tree fndecl = gimple_call_fndecl (stmt);
3664 if (fndecl)
3666 if (ctx
3667 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3668 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3669 && setjmp_or_longjmp_p (fndecl)
3670 && !ctx->loop_p)
3672 remove = true;
3673 error_at (gimple_location (stmt),
3674 "setjmp/longjmp inside %<simd%> construct");
3676 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3677 switch (DECL_FUNCTION_CODE (fndecl))
3679 case BUILT_IN_GOMP_BARRIER:
3680 case BUILT_IN_GOMP_CANCEL:
3681 case BUILT_IN_GOMP_CANCELLATION_POINT:
3682 case BUILT_IN_GOMP_TASKYIELD:
3683 case BUILT_IN_GOMP_TASKWAIT:
3684 case BUILT_IN_GOMP_TASKGROUP_START:
3685 case BUILT_IN_GOMP_TASKGROUP_END:
3686 remove = !check_omp_nesting_restrictions (stmt, ctx);
3687 break;
3688 default:
3689 break;
3691 else if (ctx)
3693 omp_context *octx = ctx;
3694 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
3695 octx = ctx->outer;
3696 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
3698 remove = true;
3699 error_at (gimple_location (stmt),
3700 "OpenMP runtime API call %qD in a region with "
3701 "%<order(concurrent)%> clause", fndecl);
3706 if (remove)
3708 stmt = gimple_build_nop ();
3709 gsi_replace (gsi, stmt, false);
3712 *handled_ops_p = true;
3714 switch (gimple_code (stmt))
3716 case GIMPLE_OMP_PARALLEL:
3717 taskreg_nesting_level++;
3718 scan_omp_parallel (gsi, ctx);
3719 taskreg_nesting_level--;
3720 break;
3722 case GIMPLE_OMP_TASK:
3723 taskreg_nesting_level++;
3724 scan_omp_task (gsi, ctx);
3725 taskreg_nesting_level--;
3726 break;
3728 case GIMPLE_OMP_FOR:
3729 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3730 == GF_OMP_FOR_KIND_SIMD)
3731 && gimple_omp_for_combined_into_p (stmt)
3732 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
3734 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
3735 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
3736 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
3738 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
3739 break;
3742 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3743 == GF_OMP_FOR_KIND_SIMD)
3744 && omp_maybe_offloaded_ctx (ctx)
3745 && omp_max_simt_vf ())
3746 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3747 else
3748 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3749 break;
3751 case GIMPLE_OMP_SECTIONS:
3752 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3753 break;
3755 case GIMPLE_OMP_SINGLE:
3756 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3757 break;
3759 case GIMPLE_OMP_SCAN:
3760 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
3762 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
3763 ctx->scan_inclusive = true;
3764 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
3765 ctx->scan_exclusive = true;
3767 /* FALLTHRU */
3768 case GIMPLE_OMP_SECTION:
3769 case GIMPLE_OMP_MASTER:
3770 case GIMPLE_OMP_ORDERED:
3771 case GIMPLE_OMP_CRITICAL:
3772 case GIMPLE_OMP_GRID_BODY:
3773 ctx = new_omp_context (stmt, ctx);
3774 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3775 break;
3777 case GIMPLE_OMP_TASKGROUP:
3778 ctx = new_omp_context (stmt, ctx);
3779 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3780 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3781 break;
3783 case GIMPLE_OMP_TARGET:
3784 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3785 break;
3787 case GIMPLE_OMP_TEAMS:
3788 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3790 taskreg_nesting_level++;
3791 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3792 taskreg_nesting_level--;
3794 else
3795 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3796 break;
3798 case GIMPLE_BIND:
3800 tree var;
3802 *handled_ops_p = false;
3803 if (ctx)
3804 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3805 var ;
3806 var = DECL_CHAIN (var))
3807 insert_decl_map (&ctx->cb, var, var);
3809 break;
3810 default:
3811 *handled_ops_p = false;
3812 break;
3815 return NULL_TREE;
3819 /* Scan all the statements starting at the current statement. CTX
3820 contains context information about the OMP directives and
3821 clauses found during the scan. */
3823 static void
3824 scan_omp (gimple_seq *body_p, omp_context *ctx)
3826 location_t saved_location;
3827 struct walk_stmt_info wi;
3829 memset (&wi, 0, sizeof (wi));
3830 wi.info = ctx;
3831 wi.want_locations = true;
3833 saved_location = input_location;
3834 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3835 input_location = saved_location;
3838 /* Re-gimplification and code generation routines. */
3840 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3841 of BIND if in a method. */
3843 static void
3844 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3846 if (DECL_ARGUMENTS (current_function_decl)
3847 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3848 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3849 == POINTER_TYPE))
3851 tree vars = gimple_bind_vars (bind);
3852 for (tree *pvar = &vars; *pvar; )
3853 if (omp_member_access_dummy_var (*pvar))
3854 *pvar = DECL_CHAIN (*pvar);
3855 else
3856 pvar = &DECL_CHAIN (*pvar);
3857 gimple_bind_set_vars (bind, vars);
3861 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3862 block and its subblocks. */
3864 static void
3865 remove_member_access_dummy_vars (tree block)
3867 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3868 if (omp_member_access_dummy_var (*pvar))
3869 *pvar = DECL_CHAIN (*pvar);
3870 else
3871 pvar = &DECL_CHAIN (*pvar);
3873 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3874 remove_member_access_dummy_vars (block);
3877 /* If a context was created for STMT when it was scanned, return it. */
3879 static omp_context *
3880 maybe_lookup_ctx (gimple *stmt)
3882 splay_tree_node n;
3883 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3884 return n ? (omp_context *) n->value : NULL;
3888 /* Find the mapping for DECL in CTX or the immediately enclosing
3889 context that has a mapping for DECL.
3891 If CTX is a nested parallel directive, we may have to use the decl
3892 mappings created in CTX's parent context. Suppose that we have the
3893 following parallel nesting (variable UIDs showed for clarity):
3895 iD.1562 = 0;
3896 #omp parallel shared(iD.1562) -> outer parallel
3897 iD.1562 = iD.1562 + 1;
3899 #omp parallel shared (iD.1562) -> inner parallel
3900 iD.1562 = iD.1562 - 1;
3902 Each parallel structure will create a distinct .omp_data_s structure
3903 for copying iD.1562 in/out of the directive:
3905 outer parallel .omp_data_s.1.i -> iD.1562
3906 inner parallel .omp_data_s.2.i -> iD.1562
3908 A shared variable mapping will produce a copy-out operation before
3909 the parallel directive and a copy-in operation after it. So, in
3910 this case we would have:
3912 iD.1562 = 0;
3913 .omp_data_o.1.i = iD.1562;
3914 #omp parallel shared(iD.1562) -> outer parallel
3915 .omp_data_i.1 = &.omp_data_o.1
3916 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3918 .omp_data_o.2.i = iD.1562; -> **
3919 #omp parallel shared(iD.1562) -> inner parallel
3920 .omp_data_i.2 = &.omp_data_o.2
3921 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3924 ** This is a problem. The symbol iD.1562 cannot be referenced
3925 inside the body of the outer parallel region. But since we are
3926 emitting this copy operation while expanding the inner parallel
3927 directive, we need to access the CTX structure of the outer
3928 parallel directive to get the correct mapping:
3930 .omp_data_o.2.i = .omp_data_i.1->i
3932 Since there may be other workshare or parallel directives enclosing
3933 the parallel directive, it may be necessary to walk up the context
3934 parent chain. This is not a problem in general because nested
3935 parallelism happens only rarely. */
3937 static tree
3938 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3940 tree t;
3941 omp_context *up;
3943 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3944 t = maybe_lookup_decl (decl, up);
3946 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3948 return t ? t : decl;
3952 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3953 in outer contexts. */
3955 static tree
3956 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3958 tree t = NULL;
3959 omp_context *up;
3961 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3962 t = maybe_lookup_decl (decl, up);
3964 return t ? t : decl;
3968 /* Construct the initialization value for reduction operation OP. */
3970 tree
3971 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3973 switch (op)
3975 case PLUS_EXPR:
3976 case MINUS_EXPR:
3977 case BIT_IOR_EXPR:
3978 case BIT_XOR_EXPR:
3979 case TRUTH_OR_EXPR:
3980 case TRUTH_ORIF_EXPR:
3981 case TRUTH_XOR_EXPR:
3982 case NE_EXPR:
3983 return build_zero_cst (type);
3985 case MULT_EXPR:
3986 case TRUTH_AND_EXPR:
3987 case TRUTH_ANDIF_EXPR:
3988 case EQ_EXPR:
3989 return fold_convert_loc (loc, type, integer_one_node);
3991 case BIT_AND_EXPR:
3992 return fold_convert_loc (loc, type, integer_minus_one_node);
3994 case MAX_EXPR:
3995 if (SCALAR_FLOAT_TYPE_P (type))
3997 REAL_VALUE_TYPE max, min;
3998 if (HONOR_INFINITIES (type))
4000 real_inf (&max);
4001 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
4003 else
4004 real_maxval (&min, 1, TYPE_MODE (type));
4005 return build_real (type, min);
4007 else if (POINTER_TYPE_P (type))
4009 wide_int min
4010 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4011 return wide_int_to_tree (type, min);
4013 else
4015 gcc_assert (INTEGRAL_TYPE_P (type));
4016 return TYPE_MIN_VALUE (type);
4019 case MIN_EXPR:
4020 if (SCALAR_FLOAT_TYPE_P (type))
4022 REAL_VALUE_TYPE max;
4023 if (HONOR_INFINITIES (type))
4024 real_inf (&max);
4025 else
4026 real_maxval (&max, 0, TYPE_MODE (type));
4027 return build_real (type, max);
4029 else if (POINTER_TYPE_P (type))
4031 wide_int max
4032 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4033 return wide_int_to_tree (type, max);
4035 else
4037 gcc_assert (INTEGRAL_TYPE_P (type));
4038 return TYPE_MAX_VALUE (type);
4041 default:
4042 gcc_unreachable ();
4046 /* Construct the initialization value for reduction CLAUSE. */
4048 tree
4049 omp_reduction_init (tree clause, tree type)
4051 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4052 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4055 /* Return alignment to be assumed for var in CLAUSE, which should be
4056 OMP_CLAUSE_ALIGNED. */
4058 static tree
4059 omp_clause_aligned_alignment (tree clause)
4061 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4062 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4064 /* Otherwise return implementation defined alignment. */
4065 unsigned int al = 1;
4066 opt_scalar_mode mode_iter;
4067 auto_vector_modes modes;
4068 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4069 static enum mode_class classes[]
4070 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4071 for (int i = 0; i < 4; i += 2)
4072 /* The for loop above dictates that we only walk through scalar classes. */
4073 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4075 scalar_mode mode = mode_iter.require ();
4076 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4077 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4078 continue;
4079 machine_mode alt_vmode;
4080 for (unsigned int j = 0; j < modes.length (); ++j)
4081 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4082 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4083 vmode = alt_vmode;
4085 tree type = lang_hooks.types.type_for_mode (mode, 1);
4086 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4087 continue;
4088 type = build_vector_type_for_mode (type, vmode);
4089 /* The functions above are not allowed to return invalid modes. */
4090 gcc_assert (TYPE_MODE (type) == vmode);
4091 if (TYPE_ALIGN_UNIT (type) > al)
4092 al = TYPE_ALIGN_UNIT (type);
4094 return build_int_cst (integer_type_node, al);
4098 /* This structure is part of the interface between lower_rec_simd_input_clauses
4099 and lower_rec_input_clauses. */
4101 class omplow_simd_context {
4102 public:
4103 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4104 tree idx;
4105 tree lane;
4106 tree lastlane;
4107 vec<tree, va_heap> simt_eargs;
4108 gimple_seq simt_dlist;
4109 poly_uint64_pod max_vf;
4110 bool is_simt;
4113 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4114 privatization. */
4116 static bool
4117 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4118 omplow_simd_context *sctx, tree &ivar,
4119 tree &lvar, tree *rvar = NULL,
4120 tree *rvar2 = NULL)
4122 if (known_eq (sctx->max_vf, 0U))
4124 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4125 if (maybe_gt (sctx->max_vf, 1U))
4127 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4128 OMP_CLAUSE_SAFELEN);
4129 if (c)
4131 poly_uint64 safe_len;
4132 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4133 || maybe_lt (safe_len, 1U))
4134 sctx->max_vf = 1;
4135 else
4136 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4139 if (maybe_gt (sctx->max_vf, 1U))
4141 sctx->idx = create_tmp_var (unsigned_type_node);
4142 sctx->lane = create_tmp_var (unsigned_type_node);
4145 if (known_eq (sctx->max_vf, 1U))
4146 return false;
4148 if (sctx->is_simt)
4150 if (is_gimple_reg (new_var))
4152 ivar = lvar = new_var;
4153 return true;
4155 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4156 ivar = lvar = create_tmp_var (type);
4157 TREE_ADDRESSABLE (ivar) = 1;
4158 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4159 NULL, DECL_ATTRIBUTES (ivar));
4160 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4161 tree clobber = build_clobber (type);
4162 gimple *g = gimple_build_assign (ivar, clobber);
4163 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4165 else
4167 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4168 tree avar = create_tmp_var_raw (atype);
4169 if (TREE_ADDRESSABLE (new_var))
4170 TREE_ADDRESSABLE (avar) = 1;
4171 DECL_ATTRIBUTES (avar)
4172 = tree_cons (get_identifier ("omp simd array"), NULL,
4173 DECL_ATTRIBUTES (avar));
4174 gimple_add_tmp_var (avar);
4175 tree iavar = avar;
4176 if (rvar && !ctx->for_simd_scan_phase)
4178 /* For inscan reductions, create another array temporary,
4179 which will hold the reduced value. */
4180 iavar = create_tmp_var_raw (atype);
4181 if (TREE_ADDRESSABLE (new_var))
4182 TREE_ADDRESSABLE (iavar) = 1;
4183 DECL_ATTRIBUTES (iavar)
4184 = tree_cons (get_identifier ("omp simd array"), NULL,
4185 tree_cons (get_identifier ("omp simd inscan"), NULL,
4186 DECL_ATTRIBUTES (iavar)));
4187 gimple_add_tmp_var (iavar);
4188 ctx->cb.decl_map->put (avar, iavar);
4189 if (sctx->lastlane == NULL_TREE)
4190 sctx->lastlane = create_tmp_var (unsigned_type_node);
4191 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4192 sctx->lastlane, NULL_TREE, NULL_TREE);
4193 TREE_THIS_NOTRAP (*rvar) = 1;
4195 if (ctx->scan_exclusive)
4197 /* And for exclusive scan yet another one, which will
4198 hold the value during the scan phase. */
4199 tree savar = create_tmp_var_raw (atype);
4200 if (TREE_ADDRESSABLE (new_var))
4201 TREE_ADDRESSABLE (savar) = 1;
4202 DECL_ATTRIBUTES (savar)
4203 = tree_cons (get_identifier ("omp simd array"), NULL,
4204 tree_cons (get_identifier ("omp simd inscan "
4205 "exclusive"), NULL,
4206 DECL_ATTRIBUTES (savar)));
4207 gimple_add_tmp_var (savar);
4208 ctx->cb.decl_map->put (iavar, savar);
4209 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4210 sctx->idx, NULL_TREE, NULL_TREE);
4211 TREE_THIS_NOTRAP (*rvar2) = 1;
4214 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4215 NULL_TREE, NULL_TREE);
4216 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4217 NULL_TREE, NULL_TREE);
4218 TREE_THIS_NOTRAP (ivar) = 1;
4219 TREE_THIS_NOTRAP (lvar) = 1;
4221 if (DECL_P (new_var))
4223 SET_DECL_VALUE_EXPR (new_var, lvar);
4224 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4226 return true;
4229 /* Helper function of lower_rec_input_clauses. For a reference
4230 in simd reduction, add an underlying variable it will reference. */
4232 static void
4233 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4235 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4236 if (TREE_CONSTANT (z))
4238 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4239 get_name (new_vard));
4240 gimple_add_tmp_var (z);
4241 TREE_ADDRESSABLE (z) = 1;
4242 z = build_fold_addr_expr_loc (loc, z);
4243 gimplify_assign (new_vard, z, ilist);
4247 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4248 code to emit (type) (tskred_temp[idx]). */
4250 static tree
4251 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4252 unsigned idx)
4254 unsigned HOST_WIDE_INT sz
4255 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4256 tree r = build2 (MEM_REF, pointer_sized_int_node,
4257 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4258 idx * sz));
4259 tree v = create_tmp_var (pointer_sized_int_node);
4260 gimple *g = gimple_build_assign (v, r);
4261 gimple_seq_add_stmt (ilist, g);
4262 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4264 v = create_tmp_var (type);
4265 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4266 gimple_seq_add_stmt (ilist, g);
4268 return v;
4271 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4272 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4273 private variables. Initialization statements go in ILIST, while calls
4274 to destructors go in DLIST. */
4276 static void
4277 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4278 omp_context *ctx, struct omp_for_data *fd)
4280 tree c, copyin_seq, x, ptr;
4281 bool copyin_by_ref = false;
4282 bool lastprivate_firstprivate = false;
4283 bool reduction_omp_orig_ref = false;
4284 int pass;
4285 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4286 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4287 omplow_simd_context sctx = omplow_simd_context ();
4288 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4289 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4290 gimple_seq llist[4] = { };
4291 tree nonconst_simd_if = NULL_TREE;
4293 copyin_seq = NULL;
4294 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4296 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4297 with data sharing clauses referencing variable sized vars. That
4298 is unnecessarily hard to support and very unlikely to result in
4299 vectorized code anyway. */
4300 if (is_simd)
4301 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4302 switch (OMP_CLAUSE_CODE (c))
4304 case OMP_CLAUSE_LINEAR:
4305 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4306 sctx.max_vf = 1;
4307 /* FALLTHRU */
4308 case OMP_CLAUSE_PRIVATE:
4309 case OMP_CLAUSE_FIRSTPRIVATE:
4310 case OMP_CLAUSE_LASTPRIVATE:
4311 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4312 sctx.max_vf = 1;
4313 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4315 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4316 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4317 sctx.max_vf = 1;
4319 break;
4320 case OMP_CLAUSE_REDUCTION:
4321 case OMP_CLAUSE_IN_REDUCTION:
4322 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4323 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4324 sctx.max_vf = 1;
4325 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4327 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4328 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4329 sctx.max_vf = 1;
4331 break;
4332 case OMP_CLAUSE_IF:
4333 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4334 sctx.max_vf = 1;
4335 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4336 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4337 break;
4338 case OMP_CLAUSE_SIMDLEN:
4339 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4340 sctx.max_vf = 1;
4341 break;
4342 case OMP_CLAUSE__CONDTEMP_:
4343 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4344 if (sctx.is_simt)
4345 sctx.max_vf = 1;
4346 break;
4347 default:
4348 continue;
4351 /* Add a placeholder for simduid. */
4352 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4353 sctx.simt_eargs.safe_push (NULL_TREE);
4355 unsigned task_reduction_cnt = 0;
4356 unsigned task_reduction_cntorig = 0;
4357 unsigned task_reduction_cnt_full = 0;
4358 unsigned task_reduction_cntorig_full = 0;
4359 unsigned task_reduction_other_cnt = 0;
4360 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4361 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4362 /* Do all the fixed sized types in the first pass, and the variable sized
4363 types in the second pass. This makes sure that the scalar arguments to
4364 the variable sized types are processed before we use them in the
4365 variable sized operations. For task reductions we use 4 passes, in the
4366 first two we ignore them, in the third one gather arguments for
4367 GOMP_task_reduction_remap call and in the last pass actually handle
4368 the task reductions. */
4369 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4370 ? 4 : 2); ++pass)
4372 if (pass == 2 && task_reduction_cnt)
4374 tskred_atype
4375 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4376 + task_reduction_cntorig);
4377 tskred_avar = create_tmp_var_raw (tskred_atype);
4378 gimple_add_tmp_var (tskred_avar);
4379 TREE_ADDRESSABLE (tskred_avar) = 1;
4380 task_reduction_cnt_full = task_reduction_cnt;
4381 task_reduction_cntorig_full = task_reduction_cntorig;
4383 else if (pass == 3 && task_reduction_cnt)
4385 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4386 gimple *g
4387 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4388 size_int (task_reduction_cntorig),
4389 build_fold_addr_expr (tskred_avar));
4390 gimple_seq_add_stmt (ilist, g);
4392 if (pass == 3 && task_reduction_other_cnt)
4394 /* For reduction clauses, build
4395 tskred_base = (void *) tskred_temp[2]
4396 + omp_get_thread_num () * tskred_temp[1]
4397 or if tskred_temp[1] is known to be constant, that constant
4398 directly. This is the start of the private reduction copy block
4399 for the current thread. */
4400 tree v = create_tmp_var (integer_type_node);
4401 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4402 gimple *g = gimple_build_call (x, 0);
4403 gimple_call_set_lhs (g, v);
4404 gimple_seq_add_stmt (ilist, g);
4405 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4406 tskred_temp = OMP_CLAUSE_DECL (c);
4407 if (is_taskreg_ctx (ctx))
4408 tskred_temp = lookup_decl (tskred_temp, ctx);
4409 tree v2 = create_tmp_var (sizetype);
4410 g = gimple_build_assign (v2, NOP_EXPR, v);
4411 gimple_seq_add_stmt (ilist, g);
4412 if (ctx->task_reductions[0])
4413 v = fold_convert (sizetype, ctx->task_reductions[0]);
4414 else
4415 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4416 tree v3 = create_tmp_var (sizetype);
4417 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4418 gimple_seq_add_stmt (ilist, g);
4419 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4420 tskred_base = create_tmp_var (ptr_type_node);
4421 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4422 gimple_seq_add_stmt (ilist, g);
4424 task_reduction_cnt = 0;
4425 task_reduction_cntorig = 0;
4426 task_reduction_other_cnt = 0;
4427 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4429 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4430 tree var, new_var;
4431 bool by_ref;
4432 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4433 bool task_reduction_p = false;
4434 bool task_reduction_needs_orig_p = false;
4435 tree cond = NULL_TREE;
4437 switch (c_kind)
4439 case OMP_CLAUSE_PRIVATE:
4440 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4441 continue;
4442 break;
4443 case OMP_CLAUSE_SHARED:
4444 /* Ignore shared directives in teams construct inside
4445 of target construct. */
4446 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4447 && !is_host_teams_ctx (ctx))
4448 continue;
4449 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4451 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4452 || is_global_var (OMP_CLAUSE_DECL (c)));
4453 continue;
4455 case OMP_CLAUSE_FIRSTPRIVATE:
4456 case OMP_CLAUSE_COPYIN:
4457 break;
4458 case OMP_CLAUSE_LINEAR:
4459 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4460 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4461 lastprivate_firstprivate = true;
4462 break;
4463 case OMP_CLAUSE_REDUCTION:
4464 case OMP_CLAUSE_IN_REDUCTION:
4465 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
4467 task_reduction_p = true;
4468 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4470 task_reduction_other_cnt++;
4471 if (pass == 2)
4472 continue;
4474 else
4475 task_reduction_cnt++;
4476 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4478 var = OMP_CLAUSE_DECL (c);
4479 /* If var is a global variable that isn't privatized
4480 in outer contexts, we don't need to look up the
4481 original address, it is always the address of the
4482 global variable itself. */
4483 if (!DECL_P (var)
4484 || omp_is_reference (var)
4485 || !is_global_var
4486 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4488 task_reduction_needs_orig_p = true;
4489 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4490 task_reduction_cntorig++;
4494 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4495 reduction_omp_orig_ref = true;
4496 break;
4497 case OMP_CLAUSE__REDUCTEMP_:
4498 if (!is_taskreg_ctx (ctx))
4499 continue;
4500 /* FALLTHRU */
4501 case OMP_CLAUSE__LOOPTEMP_:
4502 /* Handle _looptemp_/_reductemp_ clauses only on
4503 parallel/task. */
4504 if (fd)
4505 continue;
4506 break;
4507 case OMP_CLAUSE_LASTPRIVATE:
4508 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4510 lastprivate_firstprivate = true;
4511 if (pass != 0 || is_taskloop_ctx (ctx))
4512 continue;
4514 /* Even without corresponding firstprivate, if
4515 decl is Fortran allocatable, it needs outer var
4516 reference. */
4517 else if (pass == 0
4518 && lang_hooks.decls.omp_private_outer_ref
4519 (OMP_CLAUSE_DECL (c)))
4520 lastprivate_firstprivate = true;
4521 break;
4522 case OMP_CLAUSE_ALIGNED:
4523 if (pass != 1)
4524 continue;
4525 var = OMP_CLAUSE_DECL (c);
4526 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4527 && !is_global_var (var))
4529 new_var = maybe_lookup_decl (var, ctx);
4530 if (new_var == NULL_TREE)
4531 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4532 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4533 tree alarg = omp_clause_aligned_alignment (c);
4534 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4535 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4536 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4537 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4538 gimplify_and_add (x, ilist);
4540 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4541 && is_global_var (var))
4543 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4544 new_var = lookup_decl (var, ctx);
4545 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4546 t = build_fold_addr_expr_loc (clause_loc, t);
4547 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4548 tree alarg = omp_clause_aligned_alignment (c);
4549 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4550 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4551 t = fold_convert_loc (clause_loc, ptype, t);
4552 x = create_tmp_var (ptype);
4553 t = build2 (MODIFY_EXPR, ptype, x, t);
4554 gimplify_and_add (t, ilist);
4555 t = build_simple_mem_ref_loc (clause_loc, x);
4556 SET_DECL_VALUE_EXPR (new_var, t);
4557 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4559 continue;
4560 case OMP_CLAUSE__CONDTEMP_:
4561 if (is_parallel_ctx (ctx)
4562 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
4563 break;
4564 continue;
4565 default:
4566 continue;
4569 if (task_reduction_p != (pass >= 2))
4570 continue;
4572 new_var = var = OMP_CLAUSE_DECL (c);
4573 if ((c_kind == OMP_CLAUSE_REDUCTION
4574 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4575 && TREE_CODE (var) == MEM_REF)
4577 var = TREE_OPERAND (var, 0);
4578 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4579 var = TREE_OPERAND (var, 0);
4580 if (TREE_CODE (var) == INDIRECT_REF
4581 || TREE_CODE (var) == ADDR_EXPR)
4582 var = TREE_OPERAND (var, 0);
4583 if (is_variable_sized (var))
4585 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4586 var = DECL_VALUE_EXPR (var);
4587 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4588 var = TREE_OPERAND (var, 0);
4589 gcc_assert (DECL_P (var));
4591 new_var = var;
4593 if (c_kind != OMP_CLAUSE_COPYIN)
4594 new_var = lookup_decl (var, ctx);
4596 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4598 if (pass != 0)
4599 continue;
4601 /* C/C++ array section reductions. */
4602 else if ((c_kind == OMP_CLAUSE_REDUCTION
4603 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4604 && var != OMP_CLAUSE_DECL (c))
4606 if (pass == 0)
4607 continue;
4609 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4610 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4612 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4614 tree b = TREE_OPERAND (orig_var, 1);
4615 b = maybe_lookup_decl (b, ctx);
4616 if (b == NULL)
4618 b = TREE_OPERAND (orig_var, 1);
4619 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4621 if (integer_zerop (bias))
4622 bias = b;
4623 else
4625 bias = fold_convert_loc (clause_loc,
4626 TREE_TYPE (b), bias);
4627 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4628 TREE_TYPE (b), b, bias);
4630 orig_var = TREE_OPERAND (orig_var, 0);
4632 if (pass == 2)
4634 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4635 if (is_global_var (out)
4636 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4637 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4638 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4639 != POINTER_TYPE)))
4640 x = var;
4641 else
4643 bool by_ref = use_pointer_for_field (var, NULL);
4644 x = build_receiver_ref (var, by_ref, ctx);
4645 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4646 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4647 == POINTER_TYPE))
4648 x = build_fold_addr_expr (x);
4650 if (TREE_CODE (orig_var) == INDIRECT_REF)
4651 x = build_simple_mem_ref (x);
4652 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4654 if (var == TREE_OPERAND (orig_var, 0))
4655 x = build_fold_addr_expr (x);
4657 bias = fold_convert (sizetype, bias);
4658 x = fold_convert (ptr_type_node, x);
4659 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4660 TREE_TYPE (x), x, bias);
4661 unsigned cnt = task_reduction_cnt - 1;
4662 if (!task_reduction_needs_orig_p)
4663 cnt += (task_reduction_cntorig_full
4664 - task_reduction_cntorig);
4665 else
4666 cnt = task_reduction_cntorig - 1;
4667 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4668 size_int (cnt), NULL_TREE, NULL_TREE);
4669 gimplify_assign (r, x, ilist);
4670 continue;
4673 if (TREE_CODE (orig_var) == INDIRECT_REF
4674 || TREE_CODE (orig_var) == ADDR_EXPR)
4675 orig_var = TREE_OPERAND (orig_var, 0);
4676 tree d = OMP_CLAUSE_DECL (c);
4677 tree type = TREE_TYPE (d);
4678 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4679 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4680 const char *name = get_name (orig_var);
4681 if (pass == 3)
4683 tree xv = create_tmp_var (ptr_type_node);
4684 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4686 unsigned cnt = task_reduction_cnt - 1;
4687 if (!task_reduction_needs_orig_p)
4688 cnt += (task_reduction_cntorig_full
4689 - task_reduction_cntorig);
4690 else
4691 cnt = task_reduction_cntorig - 1;
4692 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4693 size_int (cnt), NULL_TREE, NULL_TREE);
4695 gimple *g = gimple_build_assign (xv, x);
4696 gimple_seq_add_stmt (ilist, g);
4698 else
4700 unsigned int idx = *ctx->task_reduction_map->get (c);
4701 tree off;
4702 if (ctx->task_reductions[1 + idx])
4703 off = fold_convert (sizetype,
4704 ctx->task_reductions[1 + idx]);
4705 else
4706 off = task_reduction_read (ilist, tskred_temp, sizetype,
4707 7 + 3 * idx + 1);
4708 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4709 tskred_base, off);
4710 gimple_seq_add_stmt (ilist, g);
4712 x = fold_convert (build_pointer_type (boolean_type_node),
4713 xv);
4714 if (TREE_CONSTANT (v))
4715 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4716 TYPE_SIZE_UNIT (type));
4717 else
4719 tree t = maybe_lookup_decl (v, ctx);
4720 if (t)
4721 v = t;
4722 else
4723 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4724 gimplify_expr (&v, ilist, NULL, is_gimple_val,
4725 fb_rvalue);
4726 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4727 TREE_TYPE (v), v,
4728 build_int_cst (TREE_TYPE (v), 1));
4729 t = fold_build2_loc (clause_loc, MULT_EXPR,
4730 TREE_TYPE (v), t,
4731 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4732 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4734 cond = create_tmp_var (TREE_TYPE (x));
4735 gimplify_assign (cond, x, ilist);
4736 x = xv;
4738 else if (TREE_CONSTANT (v))
4740 x = create_tmp_var_raw (type, name);
4741 gimple_add_tmp_var (x);
4742 TREE_ADDRESSABLE (x) = 1;
4743 x = build_fold_addr_expr_loc (clause_loc, x);
4745 else
4747 tree atmp
4748 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4749 tree t = maybe_lookup_decl (v, ctx);
4750 if (t)
4751 v = t;
4752 else
4753 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4754 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4755 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4756 TREE_TYPE (v), v,
4757 build_int_cst (TREE_TYPE (v), 1));
4758 t = fold_build2_loc (clause_loc, MULT_EXPR,
4759 TREE_TYPE (v), t,
4760 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4761 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4762 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4765 tree ptype = build_pointer_type (TREE_TYPE (type));
4766 x = fold_convert_loc (clause_loc, ptype, x);
4767 tree y = create_tmp_var (ptype, name);
4768 gimplify_assign (y, x, ilist);
4769 x = y;
4770 tree yb = y;
4772 if (!integer_zerop (bias))
4774 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4775 bias);
4776 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4778 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4779 pointer_sized_int_node, yb, bias);
4780 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
4781 yb = create_tmp_var (ptype, name);
4782 gimplify_assign (yb, x, ilist);
4783 x = yb;
4786 d = TREE_OPERAND (d, 0);
4787 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4788 d = TREE_OPERAND (d, 0);
4789 if (TREE_CODE (d) == ADDR_EXPR)
4791 if (orig_var != var)
4793 gcc_assert (is_variable_sized (orig_var));
4794 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4796 gimplify_assign (new_var, x, ilist);
4797 tree new_orig_var = lookup_decl (orig_var, ctx);
4798 tree t = build_fold_indirect_ref (new_var);
4799 DECL_IGNORED_P (new_var) = 0;
4800 TREE_THIS_NOTRAP (t) = 1;
4801 SET_DECL_VALUE_EXPR (new_orig_var, t);
4802 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4804 else
4806 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4807 build_int_cst (ptype, 0));
4808 SET_DECL_VALUE_EXPR (new_var, x);
4809 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4812 else
4814 gcc_assert (orig_var == var);
4815 if (TREE_CODE (d) == INDIRECT_REF)
4817 x = create_tmp_var (ptype, name);
4818 TREE_ADDRESSABLE (x) = 1;
4819 gimplify_assign (x, yb, ilist);
4820 x = build_fold_addr_expr_loc (clause_loc, x);
4822 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4823 gimplify_assign (new_var, x, ilist);
4825 /* GOMP_taskgroup_reduction_register memsets the whole
4826 array to zero. If the initializer is zero, we don't
4827 need to initialize it again, just mark it as ever
4828 used unconditionally, i.e. cond = true. */
4829 if (cond
4830 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4831 && initializer_zerop (omp_reduction_init (c,
4832 TREE_TYPE (type))))
4834 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4835 boolean_true_node);
4836 gimple_seq_add_stmt (ilist, g);
4837 continue;
4839 tree end = create_artificial_label (UNKNOWN_LOCATION);
4840 if (cond)
4842 gimple *g;
4843 if (!is_parallel_ctx (ctx))
4845 tree condv = create_tmp_var (boolean_type_node);
4846 g = gimple_build_assign (condv,
4847 build_simple_mem_ref (cond));
4848 gimple_seq_add_stmt (ilist, g);
4849 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4850 g = gimple_build_cond (NE_EXPR, condv,
4851 boolean_false_node, end, lab1);
4852 gimple_seq_add_stmt (ilist, g);
4853 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4855 g = gimple_build_assign (build_simple_mem_ref (cond),
4856 boolean_true_node);
4857 gimple_seq_add_stmt (ilist, g);
4860 tree y1 = create_tmp_var (ptype);
4861 gimplify_assign (y1, y, ilist);
4862 tree i2 = NULL_TREE, y2 = NULL_TREE;
4863 tree body2 = NULL_TREE, end2 = NULL_TREE;
4864 tree y3 = NULL_TREE, y4 = NULL_TREE;
4865 if (task_reduction_needs_orig_p)
4867 y3 = create_tmp_var (ptype);
4868 tree ref;
4869 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4870 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4871 size_int (task_reduction_cnt_full
4872 + task_reduction_cntorig - 1),
4873 NULL_TREE, NULL_TREE);
4874 else
4876 unsigned int idx = *ctx->task_reduction_map->get (c);
4877 ref = task_reduction_read (ilist, tskred_temp, ptype,
4878 7 + 3 * idx);
4880 gimplify_assign (y3, ref, ilist);
4882 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4884 if (pass != 3)
4886 y2 = create_tmp_var (ptype);
4887 gimplify_assign (y2, y, ilist);
4889 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4891 tree ref = build_outer_var_ref (var, ctx);
4892 /* For ref build_outer_var_ref already performs this. */
4893 if (TREE_CODE (d) == INDIRECT_REF)
4894 gcc_assert (omp_is_reference (var));
4895 else if (TREE_CODE (d) == ADDR_EXPR)
4896 ref = build_fold_addr_expr (ref);
4897 else if (omp_is_reference (var))
4898 ref = build_fold_addr_expr (ref);
4899 ref = fold_convert_loc (clause_loc, ptype, ref);
4900 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4901 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4903 y3 = create_tmp_var (ptype);
4904 gimplify_assign (y3, unshare_expr (ref), ilist);
4906 if (is_simd)
4908 y4 = create_tmp_var (ptype);
4909 gimplify_assign (y4, ref, dlist);
4913 tree i = create_tmp_var (TREE_TYPE (v));
4914 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
4915 tree body = create_artificial_label (UNKNOWN_LOCATION);
4916 gimple_seq_add_stmt (ilist, gimple_build_label (body));
4917 if (y2)
4919 i2 = create_tmp_var (TREE_TYPE (v));
4920 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
4921 body2 = create_artificial_label (UNKNOWN_LOCATION);
4922 end2 = create_artificial_label (UNKNOWN_LOCATION);
4923 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
4925 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4927 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4928 tree decl_placeholder
4929 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
4930 SET_DECL_VALUE_EXPR (decl_placeholder,
4931 build_simple_mem_ref (y1));
4932 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4933 SET_DECL_VALUE_EXPR (placeholder,
4934 y3 ? build_simple_mem_ref (y3)
4935 : error_mark_node);
4936 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4937 x = lang_hooks.decls.omp_clause_default_ctor
4938 (c, build_simple_mem_ref (y1),
4939 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4940 if (x)
4941 gimplify_and_add (x, ilist);
4942 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4944 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4945 lower_omp (&tseq, ctx);
4946 gimple_seq_add_seq (ilist, tseq);
4948 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4949 if (is_simd)
4951 SET_DECL_VALUE_EXPR (decl_placeholder,
4952 build_simple_mem_ref (y2));
4953 SET_DECL_VALUE_EXPR (placeholder,
4954 build_simple_mem_ref (y4));
4955 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4956 lower_omp (&tseq, ctx);
4957 gimple_seq_add_seq (dlist, tseq);
4958 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4960 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4961 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
4962 if (y2)
4964 x = lang_hooks.decls.omp_clause_dtor
4965 (c, build_simple_mem_ref (y2));
4966 if (x)
4967 gimplify_and_add (x, dlist);
4970 else
4972 x = omp_reduction_init (c, TREE_TYPE (type));
4973 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4975 /* reduction(-:var) sums up the partial results, so it
4976 acts identically to reduction(+:var). */
4977 if (code == MINUS_EXPR)
4978 code = PLUS_EXPR;
4980 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4981 if (is_simd)
4983 x = build2 (code, TREE_TYPE (type),
4984 build_simple_mem_ref (y4),
4985 build_simple_mem_ref (y2));
4986 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4989 gimple *g
4990 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4991 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4992 gimple_seq_add_stmt (ilist, g);
4993 if (y3)
4995 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4996 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4997 gimple_seq_add_stmt (ilist, g);
4999 g = gimple_build_assign (i, PLUS_EXPR, i,
5000 build_int_cst (TREE_TYPE (i), 1));
5001 gimple_seq_add_stmt (ilist, g);
5002 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5003 gimple_seq_add_stmt (ilist, g);
5004 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5005 if (y2)
5007 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5008 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5009 gimple_seq_add_stmt (dlist, g);
5010 if (y4)
5012 g = gimple_build_assign
5013 (y4, POINTER_PLUS_EXPR, y4,
5014 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5015 gimple_seq_add_stmt (dlist, g);
5017 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5018 build_int_cst (TREE_TYPE (i2), 1));
5019 gimple_seq_add_stmt (dlist, g);
5020 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5021 gimple_seq_add_stmt (dlist, g);
5022 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5024 continue;
5026 else if (pass == 2)
5028 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5029 x = var;
5030 else
5032 bool by_ref = use_pointer_for_field (var, ctx);
5033 x = build_receiver_ref (var, by_ref, ctx);
5035 if (!omp_is_reference (var))
5036 x = build_fold_addr_expr (x);
5037 x = fold_convert (ptr_type_node, x);
5038 unsigned cnt = task_reduction_cnt - 1;
5039 if (!task_reduction_needs_orig_p)
5040 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5041 else
5042 cnt = task_reduction_cntorig - 1;
5043 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5044 size_int (cnt), NULL_TREE, NULL_TREE);
5045 gimplify_assign (r, x, ilist);
5046 continue;
5048 else if (pass == 3)
5050 tree type = TREE_TYPE (new_var);
5051 if (!omp_is_reference (var))
5052 type = build_pointer_type (type);
5053 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5055 unsigned cnt = task_reduction_cnt - 1;
5056 if (!task_reduction_needs_orig_p)
5057 cnt += (task_reduction_cntorig_full
5058 - task_reduction_cntorig);
5059 else
5060 cnt = task_reduction_cntorig - 1;
5061 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5062 size_int (cnt), NULL_TREE, NULL_TREE);
5064 else
5066 unsigned int idx = *ctx->task_reduction_map->get (c);
5067 tree off;
5068 if (ctx->task_reductions[1 + idx])
5069 off = fold_convert (sizetype,
5070 ctx->task_reductions[1 + idx]);
5071 else
5072 off = task_reduction_read (ilist, tskred_temp, sizetype,
5073 7 + 3 * idx + 1);
5074 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5075 tskred_base, off);
5077 x = fold_convert (type, x);
5078 tree t;
5079 if (omp_is_reference (var))
5081 gimplify_assign (new_var, x, ilist);
5082 t = new_var;
5083 new_var = build_simple_mem_ref (new_var);
5085 else
5087 t = create_tmp_var (type);
5088 gimplify_assign (t, x, ilist);
5089 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5090 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5092 t = fold_convert (build_pointer_type (boolean_type_node), t);
5093 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5094 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5095 cond = create_tmp_var (TREE_TYPE (t));
5096 gimplify_assign (cond, t, ilist);
5098 else if (is_variable_sized (var))
5100 /* For variable sized types, we need to allocate the
5101 actual storage here. Call alloca and store the
5102 result in the pointer decl that we created elsewhere. */
5103 if (pass == 0)
5104 continue;
5106 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5108 gcall *stmt;
5109 tree tmp, atmp;
5111 ptr = DECL_VALUE_EXPR (new_var);
5112 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5113 ptr = TREE_OPERAND (ptr, 0);
5114 gcc_assert (DECL_P (ptr));
5115 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5117 /* void *tmp = __builtin_alloca */
5118 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5119 stmt = gimple_build_call (atmp, 2, x,
5120 size_int (DECL_ALIGN (var)));
5121 tmp = create_tmp_var_raw (ptr_type_node);
5122 gimple_add_tmp_var (tmp);
5123 gimple_call_set_lhs (stmt, tmp);
5125 gimple_seq_add_stmt (ilist, stmt);
5127 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5128 gimplify_assign (ptr, x, ilist);
5131 else if (omp_is_reference (var)
5132 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5133 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5135 /* For references that are being privatized for Fortran,
5136 allocate new backing storage for the new pointer
5137 variable. This allows us to avoid changing all the
5138 code that expects a pointer to something that expects
5139 a direct variable. */
5140 if (pass == 0)
5141 continue;
5143 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5144 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5146 x = build_receiver_ref (var, false, ctx);
5147 x = build_fold_addr_expr_loc (clause_loc, x);
5149 else if (TREE_CONSTANT (x))
5151 /* For reduction in SIMD loop, defer adding the
5152 initialization of the reference, because if we decide
5153 to use SIMD array for it, the initilization could cause
5154 expansion ICE. Ditto for other privatization clauses. */
5155 if (is_simd)
5156 x = NULL_TREE;
5157 else
5159 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5160 get_name (var));
5161 gimple_add_tmp_var (x);
5162 TREE_ADDRESSABLE (x) = 1;
5163 x = build_fold_addr_expr_loc (clause_loc, x);
5166 else
5168 tree atmp
5169 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5170 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5171 tree al = size_int (TYPE_ALIGN (rtype));
5172 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5175 if (x)
5177 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5178 gimplify_assign (new_var, x, ilist);
5181 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5183 else if ((c_kind == OMP_CLAUSE_REDUCTION
5184 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5185 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5187 if (pass == 0)
5188 continue;
5190 else if (pass != 0)
5191 continue;
5193 switch (OMP_CLAUSE_CODE (c))
5195 case OMP_CLAUSE_SHARED:
5196 /* Ignore shared directives in teams construct inside
5197 target construct. */
5198 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5199 && !is_host_teams_ctx (ctx))
5200 continue;
5201 /* Shared global vars are just accessed directly. */
5202 if (is_global_var (new_var))
5203 break;
5204 /* For taskloop firstprivate/lastprivate, represented
5205 as firstprivate and shared clause on the task, new_var
5206 is the firstprivate var. */
5207 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5208 break;
5209 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5210 needs to be delayed until after fixup_child_record_type so
5211 that we get the correct type during the dereference. */
5212 by_ref = use_pointer_for_field (var, ctx);
5213 x = build_receiver_ref (var, by_ref, ctx);
5214 SET_DECL_VALUE_EXPR (new_var, x);
5215 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5217 /* ??? If VAR is not passed by reference, and the variable
5218 hasn't been initialized yet, then we'll get a warning for
5219 the store into the omp_data_s structure. Ideally, we'd be
5220 able to notice this and not store anything at all, but
5221 we're generating code too early. Suppress the warning. */
5222 if (!by_ref)
5223 TREE_NO_WARNING (var) = 1;
5224 break;
5226 case OMP_CLAUSE__CONDTEMP_:
5227 if (is_parallel_ctx (ctx))
5229 x = build_receiver_ref (var, false, ctx);
5230 SET_DECL_VALUE_EXPR (new_var, x);
5231 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5233 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5235 x = build_zero_cst (TREE_TYPE (var));
5236 goto do_private;
5238 break;
5240 case OMP_CLAUSE_LASTPRIVATE:
5241 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5242 break;
5243 /* FALLTHRU */
5245 case OMP_CLAUSE_PRIVATE:
5246 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5247 x = build_outer_var_ref (var, ctx);
5248 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5250 if (is_task_ctx (ctx))
5251 x = build_receiver_ref (var, false, ctx);
5252 else
5253 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5255 else
5256 x = NULL;
5257 do_private:
5258 tree nx;
5259 bool copy_ctor;
5260 copy_ctor = false;
5261 nx = unshare_expr (new_var);
5262 if (is_simd
5263 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5264 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5265 copy_ctor = true;
5266 if (copy_ctor)
5267 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5268 else
5269 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5270 if (is_simd)
5272 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5273 if ((TREE_ADDRESSABLE (new_var) || nx || y
5274 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5275 && (gimple_omp_for_collapse (ctx->stmt) != 1
5276 || (gimple_omp_for_index (ctx->stmt, 0)
5277 != new_var)))
5278 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5279 || omp_is_reference (var))
5280 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5281 ivar, lvar))
5283 if (omp_is_reference (var))
5285 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5286 tree new_vard = TREE_OPERAND (new_var, 0);
5287 gcc_assert (DECL_P (new_vard));
5288 SET_DECL_VALUE_EXPR (new_vard,
5289 build_fold_addr_expr (lvar));
5290 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5293 if (nx)
5295 tree iv = unshare_expr (ivar);
5296 if (copy_ctor)
5297 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5299 else
5300 x = lang_hooks.decls.omp_clause_default_ctor (c,
5304 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5306 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5307 unshare_expr (ivar), x);
5308 nx = x;
5310 if (nx && x)
5311 gimplify_and_add (x, &llist[0]);
5312 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5313 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5315 tree v = new_var;
5316 if (!DECL_P (v))
5318 gcc_assert (TREE_CODE (v) == MEM_REF);
5319 v = TREE_OPERAND (v, 0);
5320 gcc_assert (DECL_P (v));
5322 v = *ctx->lastprivate_conditional_map->get (v);
5323 tree t = create_tmp_var (TREE_TYPE (v));
5324 tree z = build_zero_cst (TREE_TYPE (v));
5325 tree orig_v
5326 = build_outer_var_ref (var, ctx,
5327 OMP_CLAUSE_LASTPRIVATE);
5328 gimple_seq_add_stmt (dlist,
5329 gimple_build_assign (t, z));
5330 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5331 tree civar = DECL_VALUE_EXPR (v);
5332 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5333 civar = unshare_expr (civar);
5334 TREE_OPERAND (civar, 1) = sctx.idx;
5335 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5336 unshare_expr (civar));
5337 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5338 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5339 orig_v, unshare_expr (ivar)));
5340 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5341 civar);
5342 x = build3 (COND_EXPR, void_type_node, cond, x,
5343 void_node);
5344 gimple_seq tseq = NULL;
5345 gimplify_and_add (x, &tseq);
5346 if (ctx->outer)
5347 lower_omp (&tseq, ctx->outer);
5348 gimple_seq_add_seq (&llist[1], tseq);
5350 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5351 && ctx->for_simd_scan_phase)
5353 x = unshare_expr (ivar);
5354 tree orig_v
5355 = build_outer_var_ref (var, ctx,
5356 OMP_CLAUSE_LASTPRIVATE);
5357 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5358 orig_v);
5359 gimplify_and_add (x, &llist[0]);
5361 if (y)
5363 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5364 if (y)
5365 gimplify_and_add (y, &llist[1]);
5367 break;
5369 if (omp_is_reference (var))
5371 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5372 tree new_vard = TREE_OPERAND (new_var, 0);
5373 gcc_assert (DECL_P (new_vard));
5374 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5375 x = TYPE_SIZE_UNIT (type);
5376 if (TREE_CONSTANT (x))
5378 x = create_tmp_var_raw (type, get_name (var));
5379 gimple_add_tmp_var (x);
5380 TREE_ADDRESSABLE (x) = 1;
5381 x = build_fold_addr_expr_loc (clause_loc, x);
5382 x = fold_convert_loc (clause_loc,
5383 TREE_TYPE (new_vard), x);
5384 gimplify_assign (new_vard, x, ilist);
5388 if (nx)
5389 gimplify_and_add (nx, ilist);
5390 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5391 && is_simd
5392 && ctx->for_simd_scan_phase)
5394 tree orig_v = build_outer_var_ref (var, ctx,
5395 OMP_CLAUSE_LASTPRIVATE);
5396 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5397 orig_v);
5398 gimplify_and_add (x, ilist);
5400 /* FALLTHRU */
5402 do_dtor:
5403 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
5404 if (x)
5405 gimplify_and_add (x, dlist);
5406 break;
5408 case OMP_CLAUSE_LINEAR:
5409 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
5410 goto do_firstprivate;
5411 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5412 x = NULL;
5413 else
5414 x = build_outer_var_ref (var, ctx);
5415 goto do_private;
5417 case OMP_CLAUSE_FIRSTPRIVATE:
5418 if (is_task_ctx (ctx))
5420 if ((omp_is_reference (var)
5421 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
5422 || is_variable_sized (var))
5423 goto do_dtor;
5424 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
5425 ctx))
5426 || use_pointer_for_field (var, NULL))
5428 x = build_receiver_ref (var, false, ctx);
5429 SET_DECL_VALUE_EXPR (new_var, x);
5430 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5431 goto do_dtor;
5434 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
5435 && omp_is_reference (var))
5437 x = build_outer_var_ref (var, ctx);
5438 gcc_assert (TREE_CODE (x) == MEM_REF
5439 && integer_zerop (TREE_OPERAND (x, 1)));
5440 x = TREE_OPERAND (x, 0);
5441 x = lang_hooks.decls.omp_clause_copy_ctor
5442 (c, unshare_expr (new_var), x);
5443 gimplify_and_add (x, ilist);
5444 goto do_dtor;
5446 do_firstprivate:
5447 x = build_outer_var_ref (var, ctx);
5448 if (is_simd)
5450 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5451 && gimple_omp_for_combined_into_p (ctx->stmt))
5453 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5454 tree stept = TREE_TYPE (t);
5455 tree ct = omp_find_clause (clauses,
5456 OMP_CLAUSE__LOOPTEMP_);
5457 gcc_assert (ct);
5458 tree l = OMP_CLAUSE_DECL (ct);
5459 tree n1 = fd->loop.n1;
5460 tree step = fd->loop.step;
5461 tree itype = TREE_TYPE (l);
5462 if (POINTER_TYPE_P (itype))
5463 itype = signed_type_for (itype);
5464 l = fold_build2 (MINUS_EXPR, itype, l, n1);
5465 if (TYPE_UNSIGNED (itype)
5466 && fd->loop.cond_code == GT_EXPR)
5467 l = fold_build2 (TRUNC_DIV_EXPR, itype,
5468 fold_build1 (NEGATE_EXPR, itype, l),
5469 fold_build1 (NEGATE_EXPR,
5470 itype, step));
5471 else
5472 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
5473 t = fold_build2 (MULT_EXPR, stept,
5474 fold_convert (stept, l), t);
5476 if (OMP_CLAUSE_LINEAR_ARRAY (c))
5478 if (omp_is_reference (var))
5480 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5481 tree new_vard = TREE_OPERAND (new_var, 0);
5482 gcc_assert (DECL_P (new_vard));
5483 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5484 nx = TYPE_SIZE_UNIT (type);
5485 if (TREE_CONSTANT (nx))
5487 nx = create_tmp_var_raw (type,
5488 get_name (var));
5489 gimple_add_tmp_var (nx);
5490 TREE_ADDRESSABLE (nx) = 1;
5491 nx = build_fold_addr_expr_loc (clause_loc,
5492 nx);
5493 nx = fold_convert_loc (clause_loc,
5494 TREE_TYPE (new_vard),
5495 nx);
5496 gimplify_assign (new_vard, nx, ilist);
5500 x = lang_hooks.decls.omp_clause_linear_ctor
5501 (c, new_var, x, t);
5502 gimplify_and_add (x, ilist);
5503 goto do_dtor;
5506 if (POINTER_TYPE_P (TREE_TYPE (x)))
5507 x = fold_build2 (POINTER_PLUS_EXPR,
5508 TREE_TYPE (x), x, t);
5509 else
5510 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
5513 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
5514 || TREE_ADDRESSABLE (new_var)
5515 || omp_is_reference (var))
5516 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5517 ivar, lvar))
5519 if (omp_is_reference (var))
5521 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5522 tree new_vard = TREE_OPERAND (new_var, 0);
5523 gcc_assert (DECL_P (new_vard));
5524 SET_DECL_VALUE_EXPR (new_vard,
5525 build_fold_addr_expr (lvar));
5526 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5528 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
5530 tree iv = create_tmp_var (TREE_TYPE (new_var));
5531 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
5532 gimplify_and_add (x, ilist);
5533 gimple_stmt_iterator gsi
5534 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5535 gassign *g
5536 = gimple_build_assign (unshare_expr (lvar), iv);
5537 gsi_insert_before_without_update (&gsi, g,
5538 GSI_SAME_STMT);
5539 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5540 enum tree_code code = PLUS_EXPR;
5541 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
5542 code = POINTER_PLUS_EXPR;
5543 g = gimple_build_assign (iv, code, iv, t);
5544 gsi_insert_before_without_update (&gsi, g,
5545 GSI_SAME_STMT);
5546 break;
5548 x = lang_hooks.decls.omp_clause_copy_ctor
5549 (c, unshare_expr (ivar), x);
5550 gimplify_and_add (x, &llist[0]);
5551 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5552 if (x)
5553 gimplify_and_add (x, &llist[1]);
5554 break;
5556 if (omp_is_reference (var))
5558 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5559 tree new_vard = TREE_OPERAND (new_var, 0);
5560 gcc_assert (DECL_P (new_vard));
5561 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5562 nx = TYPE_SIZE_UNIT (type);
5563 if (TREE_CONSTANT (nx))
5565 nx = create_tmp_var_raw (type, get_name (var));
5566 gimple_add_tmp_var (nx);
5567 TREE_ADDRESSABLE (nx) = 1;
5568 nx = build_fold_addr_expr_loc (clause_loc, nx);
5569 nx = fold_convert_loc (clause_loc,
5570 TREE_TYPE (new_vard), nx);
5571 gimplify_assign (new_vard, nx, ilist);
5575 x = lang_hooks.decls.omp_clause_copy_ctor
5576 (c, unshare_expr (new_var), x);
5577 gimplify_and_add (x, ilist);
5578 goto do_dtor;
5580 case OMP_CLAUSE__LOOPTEMP_:
5581 case OMP_CLAUSE__REDUCTEMP_:
5582 gcc_assert (is_taskreg_ctx (ctx));
5583 x = build_outer_var_ref (var, ctx);
5584 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5585 gimplify_and_add (x, ilist);
5586 break;
5588 case OMP_CLAUSE_COPYIN:
5589 by_ref = use_pointer_for_field (var, NULL);
5590 x = build_receiver_ref (var, by_ref, ctx);
5591 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
5592 append_to_statement_list (x, &copyin_seq);
5593 copyin_by_ref |= by_ref;
5594 break;
5596 case OMP_CLAUSE_REDUCTION:
5597 case OMP_CLAUSE_IN_REDUCTION:
5598 /* OpenACC reductions are initialized using the
5599 GOACC_REDUCTION internal function. */
5600 if (is_gimple_omp_oacc (ctx->stmt))
5601 break;
5602 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5604 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5605 gimple *tseq;
5606 tree ptype = TREE_TYPE (placeholder);
5607 if (cond)
5609 x = error_mark_node;
5610 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
5611 && !task_reduction_needs_orig_p)
5612 x = var;
5613 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5615 tree pptype = build_pointer_type (ptype);
5616 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5617 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5618 size_int (task_reduction_cnt_full
5619 + task_reduction_cntorig - 1),
5620 NULL_TREE, NULL_TREE);
5621 else
5623 unsigned int idx
5624 = *ctx->task_reduction_map->get (c);
5625 x = task_reduction_read (ilist, tskred_temp,
5626 pptype, 7 + 3 * idx);
5628 x = fold_convert (pptype, x);
5629 x = build_simple_mem_ref (x);
5632 else
5634 x = build_outer_var_ref (var, ctx);
5636 if (omp_is_reference (var)
5637 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
5638 x = build_fold_addr_expr_loc (clause_loc, x);
5640 SET_DECL_VALUE_EXPR (placeholder, x);
5641 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5642 tree new_vard = new_var;
5643 if (omp_is_reference (var))
5645 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5646 new_vard = TREE_OPERAND (new_var, 0);
5647 gcc_assert (DECL_P (new_vard));
5649 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5650 if (is_simd
5651 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5652 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5653 rvarp = &rvar;
5654 if (is_simd
5655 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5656 ivar, lvar, rvarp,
5657 &rvar2))
5659 if (new_vard == new_var)
5661 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
5662 SET_DECL_VALUE_EXPR (new_var, ivar);
5664 else
5666 SET_DECL_VALUE_EXPR (new_vard,
5667 build_fold_addr_expr (ivar));
5668 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5670 x = lang_hooks.decls.omp_clause_default_ctor
5671 (c, unshare_expr (ivar),
5672 build_outer_var_ref (var, ctx));
5673 if (rvarp && ctx->for_simd_scan_phase)
5675 if (x)
5676 gimplify_and_add (x, &llist[0]);
5677 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5678 if (x)
5679 gimplify_and_add (x, &llist[1]);
5680 break;
5682 else if (rvarp)
5684 if (x)
5686 gimplify_and_add (x, &llist[0]);
5688 tree ivar2 = unshare_expr (lvar);
5689 TREE_OPERAND (ivar2, 1) = sctx.idx;
5690 x = lang_hooks.decls.omp_clause_default_ctor
5691 (c, ivar2, build_outer_var_ref (var, ctx));
5692 gimplify_and_add (x, &llist[0]);
5694 if (rvar2)
5696 x = lang_hooks.decls.omp_clause_default_ctor
5697 (c, unshare_expr (rvar2),
5698 build_outer_var_ref (var, ctx));
5699 gimplify_and_add (x, &llist[0]);
5702 /* For types that need construction, add another
5703 private var which will be default constructed
5704 and optionally initialized with
5705 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5706 loop we want to assign this value instead of
5707 constructing and destructing it in each
5708 iteration. */
5709 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
5710 gimple_add_tmp_var (nv);
5711 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
5712 ? rvar2
5713 : ivar, 0),
5714 nv);
5715 x = lang_hooks.decls.omp_clause_default_ctor
5716 (c, nv, build_outer_var_ref (var, ctx));
5717 gimplify_and_add (x, ilist);
5719 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5721 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5722 x = DECL_VALUE_EXPR (new_vard);
5723 tree vexpr = nv;
5724 if (new_vard != new_var)
5725 vexpr = build_fold_addr_expr (nv);
5726 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5727 lower_omp (&tseq, ctx);
5728 SET_DECL_VALUE_EXPR (new_vard, x);
5729 gimple_seq_add_seq (ilist, tseq);
5730 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5733 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5734 if (x)
5735 gimplify_and_add (x, dlist);
5738 tree ref = build_outer_var_ref (var, ctx);
5739 x = unshare_expr (ivar);
5740 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5741 ref);
5742 gimplify_and_add (x, &llist[0]);
5744 ref = build_outer_var_ref (var, ctx);
5745 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
5746 rvar);
5747 gimplify_and_add (x, &llist[3]);
5749 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5750 if (new_vard == new_var)
5751 SET_DECL_VALUE_EXPR (new_var, lvar);
5752 else
5753 SET_DECL_VALUE_EXPR (new_vard,
5754 build_fold_addr_expr (lvar));
5756 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5757 if (x)
5758 gimplify_and_add (x, &llist[1]);
5760 tree ivar2 = unshare_expr (lvar);
5761 TREE_OPERAND (ivar2, 1) = sctx.idx;
5762 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
5763 if (x)
5764 gimplify_and_add (x, &llist[1]);
5766 if (rvar2)
5768 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
5769 if (x)
5770 gimplify_and_add (x, &llist[1]);
5772 break;
5774 if (x)
5775 gimplify_and_add (x, &llist[0]);
5776 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5778 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5779 lower_omp (&tseq, ctx);
5780 gimple_seq_add_seq (&llist[0], tseq);
5782 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5783 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5784 lower_omp (&tseq, ctx);
5785 gimple_seq_add_seq (&llist[1], tseq);
5786 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5787 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5788 if (new_vard == new_var)
5789 SET_DECL_VALUE_EXPR (new_var, lvar);
5790 else
5791 SET_DECL_VALUE_EXPR (new_vard,
5792 build_fold_addr_expr (lvar));
5793 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5794 if (x)
5795 gimplify_and_add (x, &llist[1]);
5796 break;
5798 /* If this is a reference to constant size reduction var
5799 with placeholder, we haven't emitted the initializer
5800 for it because it is undesirable if SIMD arrays are used.
5801 But if they aren't used, we need to emit the deferred
5802 initialization now. */
5803 else if (omp_is_reference (var) && is_simd)
5804 handle_simd_reference (clause_loc, new_vard, ilist);
5806 tree lab2 = NULL_TREE;
5807 if (cond)
5809 gimple *g;
5810 if (!is_parallel_ctx (ctx))
5812 tree condv = create_tmp_var (boolean_type_node);
5813 tree m = build_simple_mem_ref (cond);
5814 g = gimple_build_assign (condv, m);
5815 gimple_seq_add_stmt (ilist, g);
5816 tree lab1
5817 = create_artificial_label (UNKNOWN_LOCATION);
5818 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5819 g = gimple_build_cond (NE_EXPR, condv,
5820 boolean_false_node,
5821 lab2, lab1);
5822 gimple_seq_add_stmt (ilist, g);
5823 gimple_seq_add_stmt (ilist,
5824 gimple_build_label (lab1));
5826 g = gimple_build_assign (build_simple_mem_ref (cond),
5827 boolean_true_node);
5828 gimple_seq_add_stmt (ilist, g);
5830 x = lang_hooks.decls.omp_clause_default_ctor
5831 (c, unshare_expr (new_var),
5832 cond ? NULL_TREE
5833 : build_outer_var_ref (var, ctx));
5834 if (x)
5835 gimplify_and_add (x, ilist);
5837 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5838 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5840 if (ctx->for_simd_scan_phase)
5841 goto do_dtor;
5842 if (x || (!is_simd
5843 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
5845 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
5846 gimple_add_tmp_var (nv);
5847 ctx->cb.decl_map->put (new_vard, nv);
5848 x = lang_hooks.decls.omp_clause_default_ctor
5849 (c, nv, build_outer_var_ref (var, ctx));
5850 if (x)
5851 gimplify_and_add (x, ilist);
5852 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5854 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5855 tree vexpr = nv;
5856 if (new_vard != new_var)
5857 vexpr = build_fold_addr_expr (nv);
5858 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5859 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5860 lower_omp (&tseq, ctx);
5861 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
5862 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
5863 gimple_seq_add_seq (ilist, tseq);
5865 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5866 if (is_simd && ctx->scan_exclusive)
5868 tree nv2
5869 = create_tmp_var_raw (TREE_TYPE (new_var));
5870 gimple_add_tmp_var (nv2);
5871 ctx->cb.decl_map->put (nv, nv2);
5872 x = lang_hooks.decls.omp_clause_default_ctor
5873 (c, nv2, build_outer_var_ref (var, ctx));
5874 gimplify_and_add (x, ilist);
5875 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5876 if (x)
5877 gimplify_and_add (x, dlist);
5879 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5880 if (x)
5881 gimplify_and_add (x, dlist);
5883 else if (is_simd
5884 && ctx->scan_exclusive
5885 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
5887 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
5888 gimple_add_tmp_var (nv2);
5889 ctx->cb.decl_map->put (new_vard, nv2);
5890 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5891 if (x)
5892 gimplify_and_add (x, dlist);
5894 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5895 goto do_dtor;
5898 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5900 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5901 lower_omp (&tseq, ctx);
5902 gimple_seq_add_seq (ilist, tseq);
5904 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5905 if (is_simd)
5907 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5908 lower_omp (&tseq, ctx);
5909 gimple_seq_add_seq (dlist, tseq);
5910 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5912 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5913 if (cond)
5915 if (lab2)
5916 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5917 break;
5919 goto do_dtor;
5921 else
5923 x = omp_reduction_init (c, TREE_TYPE (new_var));
5924 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
5925 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5927 if (cond)
5929 gimple *g;
5930 tree lab2 = NULL_TREE;
5931 /* GOMP_taskgroup_reduction_register memsets the whole
5932 array to zero. If the initializer is zero, we don't
5933 need to initialize it again, just mark it as ever
5934 used unconditionally, i.e. cond = true. */
5935 if (initializer_zerop (x))
5937 g = gimple_build_assign (build_simple_mem_ref (cond),
5938 boolean_true_node);
5939 gimple_seq_add_stmt (ilist, g);
5940 break;
5943 /* Otherwise, emit
5944 if (!cond) { cond = true; new_var = x; } */
5945 if (!is_parallel_ctx (ctx))
5947 tree condv = create_tmp_var (boolean_type_node);
5948 tree m = build_simple_mem_ref (cond);
5949 g = gimple_build_assign (condv, m);
5950 gimple_seq_add_stmt (ilist, g);
5951 tree lab1
5952 = create_artificial_label (UNKNOWN_LOCATION);
5953 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5954 g = gimple_build_cond (NE_EXPR, condv,
5955 boolean_false_node,
5956 lab2, lab1);
5957 gimple_seq_add_stmt (ilist, g);
5958 gimple_seq_add_stmt (ilist,
5959 gimple_build_label (lab1));
5961 g = gimple_build_assign (build_simple_mem_ref (cond),
5962 boolean_true_node);
5963 gimple_seq_add_stmt (ilist, g);
5964 gimplify_assign (new_var, x, ilist);
5965 if (lab2)
5966 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5967 break;
5970 /* reduction(-:var) sums up the partial results, so it
5971 acts identically to reduction(+:var). */
5972 if (code == MINUS_EXPR)
5973 code = PLUS_EXPR;
5975 tree new_vard = new_var;
5976 if (is_simd && omp_is_reference (var))
5978 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5979 new_vard = TREE_OPERAND (new_var, 0);
5980 gcc_assert (DECL_P (new_vard));
5982 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5983 if (is_simd
5984 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5985 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5986 rvarp = &rvar;
5987 if (is_simd
5988 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5989 ivar, lvar, rvarp,
5990 &rvar2))
5992 if (new_vard != new_var)
5994 SET_DECL_VALUE_EXPR (new_vard,
5995 build_fold_addr_expr (lvar));
5996 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5999 tree ref = build_outer_var_ref (var, ctx);
6001 if (rvarp)
6003 if (ctx->for_simd_scan_phase)
6004 break;
6005 gimplify_assign (ivar, ref, &llist[0]);
6006 ref = build_outer_var_ref (var, ctx);
6007 gimplify_assign (ref, rvar, &llist[3]);
6008 break;
6011 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6013 if (sctx.is_simt)
6015 if (!simt_lane)
6016 simt_lane = create_tmp_var (unsigned_type_node);
6017 x = build_call_expr_internal_loc
6018 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6019 TREE_TYPE (ivar), 2, ivar, simt_lane);
6020 x = build2 (code, TREE_TYPE (ivar), ivar, x);
6021 gimplify_assign (ivar, x, &llist[2]);
6023 x = build2 (code, TREE_TYPE (ref), ref, ivar);
6024 ref = build_outer_var_ref (var, ctx);
6025 gimplify_assign (ref, x, &llist[1]);
6028 else
6030 if (omp_is_reference (var) && is_simd)
6031 handle_simd_reference (clause_loc, new_vard, ilist);
6032 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6033 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6034 break;
6035 gimplify_assign (new_var, x, ilist);
6036 if (is_simd)
6038 tree ref = build_outer_var_ref (var, ctx);
6040 x = build2 (code, TREE_TYPE (ref), ref, new_var);
6041 ref = build_outer_var_ref (var, ctx);
6042 gimplify_assign (ref, x, dlist);
6046 break;
6048 default:
6049 gcc_unreachable ();
6053 if (tskred_avar)
6055 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6056 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6059 if (known_eq (sctx.max_vf, 1U))
6061 sctx.is_simt = false;
6062 if (ctx->lastprivate_conditional_map)
6064 if (gimple_omp_for_combined_into_p (ctx->stmt))
6066 /* Signal to lower_omp_1 that it should use parent context. */
6067 ctx->combined_into_simd_safelen1 = true;
6068 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6069 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6070 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6072 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6073 omp_context *outer = ctx->outer;
6074 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6075 outer = outer->outer;
6076 tree *v = ctx->lastprivate_conditional_map->get (o);
6077 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6078 tree *pv = outer->lastprivate_conditional_map->get (po);
6079 *v = *pv;
6082 else
6084 /* When not vectorized, treat lastprivate(conditional:) like
6085 normal lastprivate, as there will be just one simd lane
6086 writing the privatized variable. */
6087 delete ctx->lastprivate_conditional_map;
6088 ctx->lastprivate_conditional_map = NULL;
6093 if (nonconst_simd_if)
6095 if (sctx.lane == NULL_TREE)
6097 sctx.idx = create_tmp_var (unsigned_type_node);
6098 sctx.lane = create_tmp_var (unsigned_type_node);
6100 /* FIXME: For now. */
6101 sctx.is_simt = false;
6104 if (sctx.lane || sctx.is_simt)
6106 uid = create_tmp_var (ptr_type_node, "simduid");
6107 /* Don't want uninit warnings on simduid, it is always uninitialized,
6108 but we use it not for the value, but for the DECL_UID only. */
6109 TREE_NO_WARNING (uid) = 1;
6110 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6111 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6112 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6113 gimple_omp_for_set_clauses (ctx->stmt, c);
6115 /* Emit calls denoting privatized variables and initializing a pointer to
6116 structure that holds private variables as fields after ompdevlow pass. */
6117 if (sctx.is_simt)
6119 sctx.simt_eargs[0] = uid;
6120 gimple *g
6121 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6122 gimple_call_set_lhs (g, uid);
6123 gimple_seq_add_stmt (ilist, g);
6124 sctx.simt_eargs.release ();
6126 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6127 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6128 gimple_call_set_lhs (g, simtrec);
6129 gimple_seq_add_stmt (ilist, g);
6131 if (sctx.lane)
6133 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6134 2 + (nonconst_simd_if != NULL),
6135 uid, integer_zero_node,
6136 nonconst_simd_if);
6137 gimple_call_set_lhs (g, sctx.lane);
6138 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6139 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6140 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6141 build_int_cst (unsigned_type_node, 0));
6142 gimple_seq_add_stmt (ilist, g);
6143 if (sctx.lastlane)
6145 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6146 2, uid, sctx.lane);
6147 gimple_call_set_lhs (g, sctx.lastlane);
6148 gimple_seq_add_stmt (dlist, g);
6149 gimple_seq_add_seq (dlist, llist[3]);
6151 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6152 if (llist[2])
6154 tree simt_vf = create_tmp_var (unsigned_type_node);
6155 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6156 gimple_call_set_lhs (g, simt_vf);
6157 gimple_seq_add_stmt (dlist, g);
6159 tree t = build_int_cst (unsigned_type_node, 1);
6160 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6161 gimple_seq_add_stmt (dlist, g);
6163 t = build_int_cst (unsigned_type_node, 0);
6164 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6165 gimple_seq_add_stmt (dlist, g);
6167 tree body = create_artificial_label (UNKNOWN_LOCATION);
6168 tree header = create_artificial_label (UNKNOWN_LOCATION);
6169 tree end = create_artificial_label (UNKNOWN_LOCATION);
6170 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6171 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6173 gimple_seq_add_seq (dlist, llist[2]);
6175 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6176 gimple_seq_add_stmt (dlist, g);
6178 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6179 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6180 gimple_seq_add_stmt (dlist, g);
6182 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6184 for (int i = 0; i < 2; i++)
6185 if (llist[i])
6187 tree vf = create_tmp_var (unsigned_type_node);
6188 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6189 gimple_call_set_lhs (g, vf);
6190 gimple_seq *seq = i == 0 ? ilist : dlist;
6191 gimple_seq_add_stmt (seq, g);
6192 tree t = build_int_cst (unsigned_type_node, 0);
6193 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6194 gimple_seq_add_stmt (seq, g);
6195 tree body = create_artificial_label (UNKNOWN_LOCATION);
6196 tree header = create_artificial_label (UNKNOWN_LOCATION);
6197 tree end = create_artificial_label (UNKNOWN_LOCATION);
6198 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6199 gimple_seq_add_stmt (seq, gimple_build_label (body));
6200 gimple_seq_add_seq (seq, llist[i]);
6201 t = build_int_cst (unsigned_type_node, 1);
6202 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6203 gimple_seq_add_stmt (seq, g);
6204 gimple_seq_add_stmt (seq, gimple_build_label (header));
6205 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6206 gimple_seq_add_stmt (seq, g);
6207 gimple_seq_add_stmt (seq, gimple_build_label (end));
6210 if (sctx.is_simt)
6212 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6213 gimple *g
6214 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6215 gimple_seq_add_stmt (dlist, g);
6218 /* The copyin sequence is not to be executed by the main thread, since
6219 that would result in self-copies. Perhaps not visible to scalars,
6220 but it certainly is to C++ operator=. */
6221 if (copyin_seq)
6223 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6225 x = build2 (NE_EXPR, boolean_type_node, x,
6226 build_int_cst (TREE_TYPE (x), 0));
6227 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6228 gimplify_and_add (x, ilist);
6231 /* If any copyin variable is passed by reference, we must ensure the
6232 master thread doesn't modify it before it is copied over in all
6233 threads. Similarly for variables in both firstprivate and
6234 lastprivate clauses we need to ensure the lastprivate copying
6235 happens after firstprivate copying in all threads. And similarly
6236 for UDRs if initializer expression refers to omp_orig. */
6237 if (copyin_by_ref || lastprivate_firstprivate
6238 || (reduction_omp_orig_ref
6239 && !ctx->scan_inclusive
6240 && !ctx->scan_exclusive))
6242 /* Don't add any barrier for #pragma omp simd or
6243 #pragma omp distribute. */
6244 if (!is_task_ctx (ctx)
6245 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6246 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6247 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6250 /* If max_vf is non-zero, then we can use only a vectorization factor
6251 up to the max_vf we chose. So stick it into the safelen clause. */
6252 if (maybe_ne (sctx.max_vf, 0U))
6254 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6255 OMP_CLAUSE_SAFELEN);
6256 poly_uint64 safe_len;
6257 if (c == NULL_TREE
6258 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6259 && maybe_gt (safe_len, sctx.max_vf)))
6261 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6262 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6263 sctx.max_vf);
6264 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6265 gimple_omp_for_set_clauses (ctx->stmt, c);
6270 /* Create temporary variables for lastprivate(conditional:) implementation
6271 in context CTX with CLAUSES. */
6273 static void
6274 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6276 tree iter_type = NULL_TREE;
6277 tree cond_ptr = NULL_TREE;
6278 tree iter_var = NULL_TREE;
6279 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6280 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6281 tree next = *clauses;
6282 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6283 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6284 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6286 if (is_simd)
6288 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6289 gcc_assert (cc);
6290 if (iter_type == NULL_TREE)
6292 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
6293 iter_var = create_tmp_var_raw (iter_type);
6294 DECL_CONTEXT (iter_var) = current_function_decl;
6295 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6296 DECL_CHAIN (iter_var) = ctx->block_vars;
6297 ctx->block_vars = iter_var;
6298 tree c3
6299 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6300 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6301 OMP_CLAUSE_DECL (c3) = iter_var;
6302 OMP_CLAUSE_CHAIN (c3) = *clauses;
6303 *clauses = c3;
6304 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6306 next = OMP_CLAUSE_CHAIN (cc);
6307 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6308 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
6309 ctx->lastprivate_conditional_map->put (o, v);
6310 continue;
6312 if (iter_type == NULL)
6314 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
6316 struct omp_for_data fd;
6317 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
6318 NULL);
6319 iter_type = unsigned_type_for (fd.iter_type);
6321 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
6322 iter_type = unsigned_type_node;
6323 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
6324 if (c2)
6326 cond_ptr
6327 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
6328 OMP_CLAUSE_DECL (c2) = cond_ptr;
6330 else
6332 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
6333 DECL_CONTEXT (cond_ptr) = current_function_decl;
6334 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
6335 DECL_CHAIN (cond_ptr) = ctx->block_vars;
6336 ctx->block_vars = cond_ptr;
6337 c2 = build_omp_clause (UNKNOWN_LOCATION,
6338 OMP_CLAUSE__CONDTEMP_);
6339 OMP_CLAUSE_DECL (c2) = cond_ptr;
6340 OMP_CLAUSE_CHAIN (c2) = *clauses;
6341 *clauses = c2;
6343 iter_var = create_tmp_var_raw (iter_type);
6344 DECL_CONTEXT (iter_var) = current_function_decl;
6345 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6346 DECL_CHAIN (iter_var) = ctx->block_vars;
6347 ctx->block_vars = iter_var;
6348 tree c3
6349 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6350 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6351 OMP_CLAUSE_DECL (c3) = iter_var;
6352 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
6353 OMP_CLAUSE_CHAIN (c2) = c3;
6354 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6356 tree v = create_tmp_var_raw (iter_type);
6357 DECL_CONTEXT (v) = current_function_decl;
6358 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
6359 DECL_CHAIN (v) = ctx->block_vars;
6360 ctx->block_vars = v;
6361 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6362 ctx->lastprivate_conditional_map->put (o, v);
6367 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6368 both parallel and workshare constructs. PREDICATE may be NULL if it's
6369 always true. BODY_P is the sequence to insert early initialization
6370 if needed, STMT_LIST is where the non-conditional lastprivate handling
6371 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6372 section. */
6374 static void
6375 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
6376 gimple_seq *stmt_list, gimple_seq *cstmt_list,
6377 omp_context *ctx)
6379 tree x, c, label = NULL, orig_clauses = clauses;
6380 bool par_clauses = false;
6381 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
6382 unsigned HOST_WIDE_INT conditional_off = 0;
6383 gimple_seq post_stmt_list = NULL;
6385 /* Early exit if there are no lastprivate or linear clauses. */
6386 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
6387 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
6388 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
6389 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
6390 break;
6391 if (clauses == NULL)
6393 /* If this was a workshare clause, see if it had been combined
6394 with its parallel. In that case, look for the clauses on the
6395 parallel statement itself. */
6396 if (is_parallel_ctx (ctx))
6397 return;
6399 ctx = ctx->outer;
6400 if (ctx == NULL || !is_parallel_ctx (ctx))
6401 return;
6403 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6404 OMP_CLAUSE_LASTPRIVATE);
6405 if (clauses == NULL)
6406 return;
6407 par_clauses = true;
6410 bool maybe_simt = false;
6411 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6412 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6414 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
6415 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
6416 if (simduid)
6417 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
6420 if (predicate)
6422 gcond *stmt;
6423 tree label_true, arm1, arm2;
6424 enum tree_code pred_code = TREE_CODE (predicate);
6426 label = create_artificial_label (UNKNOWN_LOCATION);
6427 label_true = create_artificial_label (UNKNOWN_LOCATION);
6428 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
6430 arm1 = TREE_OPERAND (predicate, 0);
6431 arm2 = TREE_OPERAND (predicate, 1);
6432 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6433 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
6435 else
6437 arm1 = predicate;
6438 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6439 arm2 = boolean_false_node;
6440 pred_code = NE_EXPR;
6442 if (maybe_simt)
6444 c = build2 (pred_code, boolean_type_node, arm1, arm2);
6445 c = fold_convert (integer_type_node, c);
6446 simtcond = create_tmp_var (integer_type_node);
6447 gimplify_assign (simtcond, c, stmt_list);
6448 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
6449 1, simtcond);
6450 c = create_tmp_var (integer_type_node);
6451 gimple_call_set_lhs (g, c);
6452 gimple_seq_add_stmt (stmt_list, g);
6453 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
6454 label_true, label);
6456 else
6457 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
6458 gimple_seq_add_stmt (stmt_list, stmt);
6459 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
6462 tree cond_ptr = NULL_TREE;
6463 for (c = clauses; c ;)
6465 tree var, new_var;
6466 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6467 gimple_seq *this_stmt_list = stmt_list;
6468 tree lab2 = NULL_TREE;
6470 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6471 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6472 && ctx->lastprivate_conditional_map
6473 && !ctx->combined_into_simd_safelen1)
6475 gcc_assert (body_p);
6476 if (simduid)
6477 goto next;
6478 if (cond_ptr == NULL_TREE)
6480 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
6481 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
6483 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
6484 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6485 tree v = *ctx->lastprivate_conditional_map->get (o);
6486 gimplify_assign (v, build_zero_cst (type), body_p);
6487 this_stmt_list = cstmt_list;
6488 tree mem;
6489 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
6491 mem = build2 (MEM_REF, type, cond_ptr,
6492 build_int_cst (TREE_TYPE (cond_ptr),
6493 conditional_off));
6494 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
6496 else
6497 mem = build4 (ARRAY_REF, type, cond_ptr,
6498 size_int (conditional_off++), NULL_TREE, NULL_TREE);
6499 tree mem2 = copy_node (mem);
6500 gimple_seq seq = NULL;
6501 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
6502 gimple_seq_add_seq (this_stmt_list, seq);
6503 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
6504 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6505 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
6506 gimple_seq_add_stmt (this_stmt_list, g);
6507 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
6508 gimplify_assign (mem2, v, this_stmt_list);
6510 else if (predicate
6511 && ctx->combined_into_simd_safelen1
6512 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6513 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6514 && ctx->lastprivate_conditional_map)
6515 this_stmt_list = &post_stmt_list;
6517 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6518 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6519 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6521 var = OMP_CLAUSE_DECL (c);
6522 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6523 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6524 && is_taskloop_ctx (ctx))
6526 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
6527 new_var = lookup_decl (var, ctx->outer);
6529 else
6531 new_var = lookup_decl (var, ctx);
6532 /* Avoid uninitialized warnings for lastprivate and
6533 for linear iterators. */
6534 if (predicate
6535 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6536 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
6537 TREE_NO_WARNING (new_var) = 1;
6540 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
6542 tree val = DECL_VALUE_EXPR (new_var);
6543 if (TREE_CODE (val) == ARRAY_REF
6544 && VAR_P (TREE_OPERAND (val, 0))
6545 && lookup_attribute ("omp simd array",
6546 DECL_ATTRIBUTES (TREE_OPERAND (val,
6547 0))))
6549 if (lastlane == NULL)
6551 lastlane = create_tmp_var (unsigned_type_node);
6552 gcall *g
6553 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6554 2, simduid,
6555 TREE_OPERAND (val, 1));
6556 gimple_call_set_lhs (g, lastlane);
6557 gimple_seq_add_stmt (this_stmt_list, g);
6559 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
6560 TREE_OPERAND (val, 0), lastlane,
6561 NULL_TREE, NULL_TREE);
6562 TREE_THIS_NOTRAP (new_var) = 1;
6565 else if (maybe_simt)
6567 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
6568 ? DECL_VALUE_EXPR (new_var)
6569 : new_var);
6570 if (simtlast == NULL)
6572 simtlast = create_tmp_var (unsigned_type_node);
6573 gcall *g = gimple_build_call_internal
6574 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
6575 gimple_call_set_lhs (g, simtlast);
6576 gimple_seq_add_stmt (this_stmt_list, g);
6578 x = build_call_expr_internal_loc
6579 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
6580 TREE_TYPE (val), 2, val, simtlast);
6581 new_var = unshare_expr (new_var);
6582 gimplify_assign (new_var, x, this_stmt_list);
6583 new_var = unshare_expr (new_var);
6586 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6587 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
6589 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
6590 gimple_seq_add_seq (this_stmt_list,
6591 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6592 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
6594 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6595 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
6597 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
6598 gimple_seq_add_seq (this_stmt_list,
6599 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6600 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
6603 x = NULL_TREE;
6604 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6605 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
6606 && is_taskloop_ctx (ctx))
6608 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
6609 ctx->outer->outer);
6610 if (is_global_var (ovar))
6611 x = ovar;
6613 if (!x)
6614 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
6615 if (omp_is_reference (var))
6616 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6617 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
6618 gimplify_and_add (x, this_stmt_list);
6620 if (lab2)
6621 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
6624 next:
6625 c = OMP_CLAUSE_CHAIN (c);
6626 if (c == NULL && !par_clauses)
6628 /* If this was a workshare clause, see if it had been combined
6629 with its parallel. In that case, continue looking for the
6630 clauses also on the parallel statement itself. */
6631 if (is_parallel_ctx (ctx))
6632 break;
6634 ctx = ctx->outer;
6635 if (ctx == NULL || !is_parallel_ctx (ctx))
6636 break;
6638 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6639 OMP_CLAUSE_LASTPRIVATE);
6640 par_clauses = true;
6644 if (label)
6645 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
6646 gimple_seq_add_seq (stmt_list, post_stmt_list);
6649 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6650 (which might be a placeholder). INNER is true if this is an inner
6651 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6652 join markers. Generate the before-loop forking sequence in
6653 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6654 general form of these sequences is
6656 GOACC_REDUCTION_SETUP
6657 GOACC_FORK
6658 GOACC_REDUCTION_INIT
6660 GOACC_REDUCTION_FINI
6661 GOACC_JOIN
6662 GOACC_REDUCTION_TEARDOWN. */
6664 static void
6665 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
6666 gcall *fork, gcall *join, gimple_seq *fork_seq,
6667 gimple_seq *join_seq, omp_context *ctx)
6669 gimple_seq before_fork = NULL;
6670 gimple_seq after_fork = NULL;
6671 gimple_seq before_join = NULL;
6672 gimple_seq after_join = NULL;
6673 tree init_code = NULL_TREE, fini_code = NULL_TREE,
6674 setup_code = NULL_TREE, teardown_code = NULL_TREE;
6675 unsigned offset = 0;
6677 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6678 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
6680 tree orig = OMP_CLAUSE_DECL (c);
6681 tree var = maybe_lookup_decl (orig, ctx);
6682 tree ref_to_res = NULL_TREE;
6683 tree incoming, outgoing, v1, v2, v3;
6684 bool is_private = false;
6686 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
6687 if (rcode == MINUS_EXPR)
6688 rcode = PLUS_EXPR;
6689 else if (rcode == TRUTH_ANDIF_EXPR)
6690 rcode = BIT_AND_EXPR;
6691 else if (rcode == TRUTH_ORIF_EXPR)
6692 rcode = BIT_IOR_EXPR;
6693 tree op = build_int_cst (unsigned_type_node, rcode);
6695 if (!var)
6696 var = orig;
6698 incoming = outgoing = var;
6700 if (!inner)
6702 /* See if an outer construct also reduces this variable. */
6703 omp_context *outer = ctx;
6705 while (omp_context *probe = outer->outer)
6707 enum gimple_code type = gimple_code (probe->stmt);
6708 tree cls;
6710 switch (type)
6712 case GIMPLE_OMP_FOR:
6713 cls = gimple_omp_for_clauses (probe->stmt);
6714 break;
6716 case GIMPLE_OMP_TARGET:
6717 if ((gimple_omp_target_kind (probe->stmt)
6718 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
6719 && (gimple_omp_target_kind (probe->stmt)
6720 != GF_OMP_TARGET_KIND_OACC_SERIAL))
6721 goto do_lookup;
6723 cls = gimple_omp_target_clauses (probe->stmt);
6724 break;
6726 default:
6727 goto do_lookup;
6730 outer = probe;
6731 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
6732 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
6733 && orig == OMP_CLAUSE_DECL (cls))
6735 incoming = outgoing = lookup_decl (orig, probe);
6736 goto has_outer_reduction;
6738 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
6739 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
6740 && orig == OMP_CLAUSE_DECL (cls))
6742 is_private = true;
6743 goto do_lookup;
6747 do_lookup:
6748 /* This is the outermost construct with this reduction,
6749 see if there's a mapping for it. */
6750 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
6751 && maybe_lookup_field (orig, outer) && !is_private)
6753 ref_to_res = build_receiver_ref (orig, false, outer);
6754 if (omp_is_reference (orig))
6755 ref_to_res = build_simple_mem_ref (ref_to_res);
6757 tree type = TREE_TYPE (var);
6758 if (POINTER_TYPE_P (type))
6759 type = TREE_TYPE (type);
6761 outgoing = var;
6762 incoming = omp_reduction_init_op (loc, rcode, type);
6764 else
6766 /* Try to look at enclosing contexts for reduction var,
6767 use original if no mapping found. */
6768 tree t = NULL_TREE;
6769 omp_context *c = ctx->outer;
6770 while (c && !t)
6772 t = maybe_lookup_decl (orig, c);
6773 c = c->outer;
6775 incoming = outgoing = (t ? t : orig);
6778 has_outer_reduction:;
6781 if (!ref_to_res)
6782 ref_to_res = integer_zero_node;
6784 if (omp_is_reference (orig))
6786 tree type = TREE_TYPE (var);
6787 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
6789 if (!inner)
6791 tree x = create_tmp_var (TREE_TYPE (type), id);
6792 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
6795 v1 = create_tmp_var (type, id);
6796 v2 = create_tmp_var (type, id);
6797 v3 = create_tmp_var (type, id);
6799 gimplify_assign (v1, var, fork_seq);
6800 gimplify_assign (v2, var, fork_seq);
6801 gimplify_assign (v3, var, fork_seq);
6803 var = build_simple_mem_ref (var);
6804 v1 = build_simple_mem_ref (v1);
6805 v2 = build_simple_mem_ref (v2);
6806 v3 = build_simple_mem_ref (v3);
6807 outgoing = build_simple_mem_ref (outgoing);
6809 if (!TREE_CONSTANT (incoming))
6810 incoming = build_simple_mem_ref (incoming);
6812 else
6813 v1 = v2 = v3 = var;
6815 /* Determine position in reduction buffer, which may be used
6816 by target. The parser has ensured that this is not a
6817 variable-sized type. */
6818 fixed_size_mode mode
6819 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
6820 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6821 offset = (offset + align - 1) & ~(align - 1);
6822 tree off = build_int_cst (sizetype, offset);
6823 offset += GET_MODE_SIZE (mode);
6825 if (!init_code)
6827 init_code = build_int_cst (integer_type_node,
6828 IFN_GOACC_REDUCTION_INIT);
6829 fini_code = build_int_cst (integer_type_node,
6830 IFN_GOACC_REDUCTION_FINI);
6831 setup_code = build_int_cst (integer_type_node,
6832 IFN_GOACC_REDUCTION_SETUP);
6833 teardown_code = build_int_cst (integer_type_node,
6834 IFN_GOACC_REDUCTION_TEARDOWN);
6837 tree setup_call
6838 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6839 TREE_TYPE (var), 6, setup_code,
6840 unshare_expr (ref_to_res),
6841 incoming, level, op, off);
6842 tree init_call
6843 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6844 TREE_TYPE (var), 6, init_code,
6845 unshare_expr (ref_to_res),
6846 v1, level, op, off);
6847 tree fini_call
6848 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6849 TREE_TYPE (var), 6, fini_code,
6850 unshare_expr (ref_to_res),
6851 v2, level, op, off);
6852 tree teardown_call
6853 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6854 TREE_TYPE (var), 6, teardown_code,
6855 ref_to_res, v3, level, op, off);
6857 gimplify_assign (v1, setup_call, &before_fork);
6858 gimplify_assign (v2, init_call, &after_fork);
6859 gimplify_assign (v3, fini_call, &before_join);
6860 gimplify_assign (outgoing, teardown_call, &after_join);
6863 /* Now stitch things together. */
6864 gimple_seq_add_seq (fork_seq, before_fork);
6865 if (fork)
6866 gimple_seq_add_stmt (fork_seq, fork);
6867 gimple_seq_add_seq (fork_seq, after_fork);
6869 gimple_seq_add_seq (join_seq, before_join);
6870 if (join)
6871 gimple_seq_add_stmt (join_seq, join);
6872 gimple_seq_add_seq (join_seq, after_join);
6875 /* Generate code to implement the REDUCTION clauses, append it
6876 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6877 that should be emitted also inside of the critical section,
6878 in that case clear *CLIST afterwards, otherwise leave it as is
6879 and let the caller emit it itself. */
6881 static void
6882 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
6883 gimple_seq *clist, omp_context *ctx)
6885 gimple_seq sub_seq = NULL;
6886 gimple *stmt;
6887 tree x, c;
6888 int count = 0;
6890 /* OpenACC loop reductions are handled elsewhere. */
6891 if (is_gimple_omp_oacc (ctx->stmt))
6892 return;
6894 /* SIMD reductions are handled in lower_rec_input_clauses. */
6895 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6896 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6897 return;
6899 /* inscan reductions are handled elsewhere. */
6900 if (ctx->scan_inclusive || ctx->scan_exclusive)
6901 return;
6903 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6904 update in that case, otherwise use a lock. */
6905 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
6906 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6907 && !OMP_CLAUSE_REDUCTION_TASK (c))
6909 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
6910 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6912 /* Never use OMP_ATOMIC for array reductions or UDRs. */
6913 count = -1;
6914 break;
6916 count++;
6919 if (count == 0)
6920 return;
6922 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6924 tree var, ref, new_var, orig_var;
6925 enum tree_code code;
6926 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6928 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6929 || OMP_CLAUSE_REDUCTION_TASK (c))
6930 continue;
6932 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
6933 orig_var = var = OMP_CLAUSE_DECL (c);
6934 if (TREE_CODE (var) == MEM_REF)
6936 var = TREE_OPERAND (var, 0);
6937 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
6938 var = TREE_OPERAND (var, 0);
6939 if (TREE_CODE (var) == ADDR_EXPR)
6940 var = TREE_OPERAND (var, 0);
6941 else
6943 /* If this is a pointer or referenced based array
6944 section, the var could be private in the outer
6945 context e.g. on orphaned loop construct. Pretend this
6946 is private variable's outer reference. */
6947 ccode = OMP_CLAUSE_PRIVATE;
6948 if (TREE_CODE (var) == INDIRECT_REF)
6949 var = TREE_OPERAND (var, 0);
6951 orig_var = var;
6952 if (is_variable_sized (var))
6954 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
6955 var = DECL_VALUE_EXPR (var);
6956 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
6957 var = TREE_OPERAND (var, 0);
6958 gcc_assert (DECL_P (var));
6961 new_var = lookup_decl (var, ctx);
6962 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
6963 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6964 ref = build_outer_var_ref (var, ctx, ccode);
6965 code = OMP_CLAUSE_REDUCTION_CODE (c);
6967 /* reduction(-:var) sums up the partial results, so it acts
6968 identically to reduction(+:var). */
6969 if (code == MINUS_EXPR)
6970 code = PLUS_EXPR;
6972 if (count == 1)
6974 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
6976 addr = save_expr (addr);
6977 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
6978 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
6979 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
6980 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
6981 gimplify_and_add (x, stmt_seqp);
6982 return;
6984 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6986 tree d = OMP_CLAUSE_DECL (c);
6987 tree type = TREE_TYPE (d);
6988 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
6989 tree i = create_tmp_var (TREE_TYPE (v));
6990 tree ptype = build_pointer_type (TREE_TYPE (type));
6991 tree bias = TREE_OPERAND (d, 1);
6992 d = TREE_OPERAND (d, 0);
6993 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
6995 tree b = TREE_OPERAND (d, 1);
6996 b = maybe_lookup_decl (b, ctx);
6997 if (b == NULL)
6999 b = TREE_OPERAND (d, 1);
7000 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7002 if (integer_zerop (bias))
7003 bias = b;
7004 else
7006 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7007 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7008 TREE_TYPE (b), b, bias);
7010 d = TREE_OPERAND (d, 0);
7012 /* For ref build_outer_var_ref already performs this, so
7013 only new_var needs a dereference. */
7014 if (TREE_CODE (d) == INDIRECT_REF)
7016 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7017 gcc_assert (omp_is_reference (var) && var == orig_var);
7019 else if (TREE_CODE (d) == ADDR_EXPR)
7021 if (orig_var == var)
7023 new_var = build_fold_addr_expr (new_var);
7024 ref = build_fold_addr_expr (ref);
7027 else
7029 gcc_assert (orig_var == var);
7030 if (omp_is_reference (var))
7031 ref = build_fold_addr_expr (ref);
7033 if (DECL_P (v))
7035 tree t = maybe_lookup_decl (v, ctx);
7036 if (t)
7037 v = t;
7038 else
7039 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7040 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7042 if (!integer_zerop (bias))
7044 bias = fold_convert_loc (clause_loc, sizetype, bias);
7045 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7046 TREE_TYPE (new_var), new_var,
7047 unshare_expr (bias));
7048 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7049 TREE_TYPE (ref), ref, bias);
7051 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7052 ref = fold_convert_loc (clause_loc, ptype, ref);
7053 tree m = create_tmp_var (ptype);
7054 gimplify_assign (m, new_var, stmt_seqp);
7055 new_var = m;
7056 m = create_tmp_var (ptype);
7057 gimplify_assign (m, ref, stmt_seqp);
7058 ref = m;
7059 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7060 tree body = create_artificial_label (UNKNOWN_LOCATION);
7061 tree end = create_artificial_label (UNKNOWN_LOCATION);
7062 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7063 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7064 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7065 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7067 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7068 tree decl_placeholder
7069 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7070 SET_DECL_VALUE_EXPR (placeholder, out);
7071 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7072 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7073 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7074 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7075 gimple_seq_add_seq (&sub_seq,
7076 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7077 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7078 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7079 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7081 else
7083 x = build2 (code, TREE_TYPE (out), out, priv);
7084 out = unshare_expr (out);
7085 gimplify_assign (out, x, &sub_seq);
7087 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7088 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7089 gimple_seq_add_stmt (&sub_seq, g);
7090 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7091 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7092 gimple_seq_add_stmt (&sub_seq, g);
7093 g = gimple_build_assign (i, PLUS_EXPR, i,
7094 build_int_cst (TREE_TYPE (i), 1));
7095 gimple_seq_add_stmt (&sub_seq, g);
7096 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7097 gimple_seq_add_stmt (&sub_seq, g);
7098 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7100 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7102 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7104 if (omp_is_reference (var)
7105 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7106 TREE_TYPE (ref)))
7107 ref = build_fold_addr_expr_loc (clause_loc, ref);
7108 SET_DECL_VALUE_EXPR (placeholder, ref);
7109 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7110 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7111 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7112 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7113 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7115 else
7117 x = build2 (code, TREE_TYPE (ref), ref, new_var);
7118 ref = build_outer_var_ref (var, ctx);
7119 gimplify_assign (ref, x, &sub_seq);
7123 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7125 gimple_seq_add_stmt (stmt_seqp, stmt);
7127 gimple_seq_add_seq (stmt_seqp, sub_seq);
7129 if (clist)
7131 gimple_seq_add_seq (stmt_seqp, *clist);
7132 *clist = NULL;
7135 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7137 gimple_seq_add_stmt (stmt_seqp, stmt);
7141 /* Generate code to implement the COPYPRIVATE clauses. */
7143 static void
7144 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7145 omp_context *ctx)
7147 tree c;
7149 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7151 tree var, new_var, ref, x;
7152 bool by_ref;
7153 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7155 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7156 continue;
7158 var = OMP_CLAUSE_DECL (c);
7159 by_ref = use_pointer_for_field (var, NULL);
7161 ref = build_sender_ref (var, ctx);
7162 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7163 if (by_ref)
7165 x = build_fold_addr_expr_loc (clause_loc, new_var);
7166 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7168 gimplify_assign (ref, x, slist);
7170 ref = build_receiver_ref (var, false, ctx);
7171 if (by_ref)
7173 ref = fold_convert_loc (clause_loc,
7174 build_pointer_type (TREE_TYPE (new_var)),
7175 ref);
7176 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7178 if (omp_is_reference (var))
7180 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7181 ref = build_simple_mem_ref_loc (clause_loc, ref);
7182 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7184 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7185 gimplify_and_add (x, rlist);
7190 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7191 and REDUCTION from the sender (aka parent) side. */
7193 static void
7194 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7195 omp_context *ctx)
7197 tree c, t;
7198 int ignored_looptemp = 0;
7199 bool is_taskloop = false;
7201 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7202 by GOMP_taskloop. */
7203 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7205 ignored_looptemp = 2;
7206 is_taskloop = true;
7209 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7211 tree val, ref, x, var;
7212 bool by_ref, do_in = false, do_out = false;
7213 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7215 switch (OMP_CLAUSE_CODE (c))
7217 case OMP_CLAUSE_PRIVATE:
7218 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7219 break;
7220 continue;
7221 case OMP_CLAUSE_FIRSTPRIVATE:
7222 case OMP_CLAUSE_COPYIN:
7223 case OMP_CLAUSE_LASTPRIVATE:
7224 case OMP_CLAUSE_IN_REDUCTION:
7225 case OMP_CLAUSE__REDUCTEMP_:
7226 break;
7227 case OMP_CLAUSE_REDUCTION:
7228 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7229 continue;
7230 break;
7231 case OMP_CLAUSE_SHARED:
7232 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7233 break;
7234 continue;
7235 case OMP_CLAUSE__LOOPTEMP_:
7236 if (ignored_looptemp)
7238 ignored_looptemp--;
7239 continue;
7241 break;
7242 default:
7243 continue;
7246 val = OMP_CLAUSE_DECL (c);
7247 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7248 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
7249 && TREE_CODE (val) == MEM_REF)
7251 val = TREE_OPERAND (val, 0);
7252 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
7253 val = TREE_OPERAND (val, 0);
7254 if (TREE_CODE (val) == INDIRECT_REF
7255 || TREE_CODE (val) == ADDR_EXPR)
7256 val = TREE_OPERAND (val, 0);
7257 if (is_variable_sized (val))
7258 continue;
7261 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7262 outer taskloop region. */
7263 omp_context *ctx_for_o = ctx;
7264 if (is_taskloop
7265 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7266 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7267 ctx_for_o = ctx->outer;
7269 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
7271 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
7272 && is_global_var (var)
7273 && (val == OMP_CLAUSE_DECL (c)
7274 || !is_task_ctx (ctx)
7275 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
7276 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
7277 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
7278 != POINTER_TYPE)))))
7279 continue;
7281 t = omp_member_access_dummy_var (var);
7282 if (t)
7284 var = DECL_VALUE_EXPR (var);
7285 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
7286 if (o != t)
7287 var = unshare_and_remap (var, t, o);
7288 else
7289 var = unshare_expr (var);
7292 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
7294 /* Handle taskloop firstprivate/lastprivate, where the
7295 lastprivate on GIMPLE_OMP_TASK is represented as
7296 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7297 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
7298 x = omp_build_component_ref (ctx->sender_decl, f);
7299 if (use_pointer_for_field (val, ctx))
7300 var = build_fold_addr_expr (var);
7301 gimplify_assign (x, var, ilist);
7302 DECL_ABSTRACT_ORIGIN (f) = NULL;
7303 continue;
7306 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7307 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
7308 || val == OMP_CLAUSE_DECL (c))
7309 && is_variable_sized (val))
7310 continue;
7311 by_ref = use_pointer_for_field (val, NULL);
7313 switch (OMP_CLAUSE_CODE (c))
7315 case OMP_CLAUSE_FIRSTPRIVATE:
7316 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
7317 && !by_ref
7318 && is_task_ctx (ctx))
7319 TREE_NO_WARNING (var) = 1;
7320 do_in = true;
7321 break;
7323 case OMP_CLAUSE_PRIVATE:
7324 case OMP_CLAUSE_COPYIN:
7325 case OMP_CLAUSE__LOOPTEMP_:
7326 case OMP_CLAUSE__REDUCTEMP_:
7327 do_in = true;
7328 break;
7330 case OMP_CLAUSE_LASTPRIVATE:
7331 if (by_ref || omp_is_reference (val))
7333 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7334 continue;
7335 do_in = true;
7337 else
7339 do_out = true;
7340 if (lang_hooks.decls.omp_private_outer_ref (val))
7341 do_in = true;
7343 break;
7345 case OMP_CLAUSE_REDUCTION:
7346 case OMP_CLAUSE_IN_REDUCTION:
7347 do_in = true;
7348 if (val == OMP_CLAUSE_DECL (c))
7350 if (is_task_ctx (ctx))
7351 by_ref = use_pointer_for_field (val, ctx);
7352 else
7353 do_out = !(by_ref || omp_is_reference (val));
7355 else
7356 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
7357 break;
7359 default:
7360 gcc_unreachable ();
7363 if (do_in)
7365 ref = build_sender_ref (val, ctx);
7366 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
7367 gimplify_assign (ref, x, ilist);
7368 if (is_task_ctx (ctx))
7369 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
7372 if (do_out)
7374 ref = build_sender_ref (val, ctx);
7375 gimplify_assign (var, ref, olist);
7380 /* Generate code to implement SHARED from the sender (aka parent)
7381 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7382 list things that got automatically shared. */
7384 static void
7385 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
7387 tree var, ovar, nvar, t, f, x, record_type;
7389 if (ctx->record_type == NULL)
7390 return;
7392 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
7393 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7395 ovar = DECL_ABSTRACT_ORIGIN (f);
7396 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
7397 continue;
7399 nvar = maybe_lookup_decl (ovar, ctx);
7400 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
7401 continue;
7403 /* If CTX is a nested parallel directive. Find the immediately
7404 enclosing parallel or workshare construct that contains a
7405 mapping for OVAR. */
7406 var = lookup_decl_in_outer_ctx (ovar, ctx);
7408 t = omp_member_access_dummy_var (var);
7409 if (t)
7411 var = DECL_VALUE_EXPR (var);
7412 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
7413 if (o != t)
7414 var = unshare_and_remap (var, t, o);
7415 else
7416 var = unshare_expr (var);
7419 if (use_pointer_for_field (ovar, ctx))
7421 x = build_sender_ref (ovar, ctx);
7422 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
7423 && TREE_TYPE (f) == TREE_TYPE (ovar))
7425 gcc_assert (is_parallel_ctx (ctx)
7426 && DECL_ARTIFICIAL (ovar));
7427 /* _condtemp_ clause. */
7428 var = build_constructor (TREE_TYPE (x), NULL);
7430 else
7431 var = build_fold_addr_expr (var);
7432 gimplify_assign (x, var, ilist);
7434 else
7436 x = build_sender_ref (ovar, ctx);
7437 gimplify_assign (x, var, ilist);
7439 if (!TREE_READONLY (var)
7440 /* We don't need to receive a new reference to a result
7441 or parm decl. In fact we may not store to it as we will
7442 invalidate any pending RSO and generate wrong gimple
7443 during inlining. */
7444 && !((TREE_CODE (var) == RESULT_DECL
7445 || TREE_CODE (var) == PARM_DECL)
7446 && DECL_BY_REFERENCE (var)))
7448 x = build_sender_ref (ovar, ctx);
7449 gimplify_assign (var, x, olist);
7455 /* Emit an OpenACC head marker call, encapulating the partitioning and
7456 other information that must be processed by the target compiler.
7457 Return the maximum number of dimensions the associated loop might
7458 be partitioned over. */
7460 static unsigned
7461 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
7462 gimple_seq *seq, omp_context *ctx)
7464 unsigned levels = 0;
7465 unsigned tag = 0;
7466 tree gang_static = NULL_TREE;
7467 auto_vec<tree, 5> args;
7469 args.quick_push (build_int_cst
7470 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
7471 args.quick_push (ddvar);
7472 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7474 switch (OMP_CLAUSE_CODE (c))
7476 case OMP_CLAUSE_GANG:
7477 tag |= OLF_DIM_GANG;
7478 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
7479 /* static:* is represented by -1, and we can ignore it, as
7480 scheduling is always static. */
7481 if (gang_static && integer_minus_onep (gang_static))
7482 gang_static = NULL_TREE;
7483 levels++;
7484 break;
7486 case OMP_CLAUSE_WORKER:
7487 tag |= OLF_DIM_WORKER;
7488 levels++;
7489 break;
7491 case OMP_CLAUSE_VECTOR:
7492 tag |= OLF_DIM_VECTOR;
7493 levels++;
7494 break;
7496 case OMP_CLAUSE_SEQ:
7497 tag |= OLF_SEQ;
7498 break;
7500 case OMP_CLAUSE_AUTO:
7501 tag |= OLF_AUTO;
7502 break;
7504 case OMP_CLAUSE_INDEPENDENT:
7505 tag |= OLF_INDEPENDENT;
7506 break;
7508 case OMP_CLAUSE_TILE:
7509 tag |= OLF_TILE;
7510 break;
7512 default:
7513 continue;
7517 if (gang_static)
7519 if (DECL_P (gang_static))
7520 gang_static = build_outer_var_ref (gang_static, ctx);
7521 tag |= OLF_GANG_STATIC;
7524 /* In a parallel region, loops are implicitly INDEPENDENT. */
7525 omp_context *tgt = enclosing_target_ctx (ctx);
7526 if (!tgt || is_oacc_parallel_or_serial (tgt))
7527 tag |= OLF_INDEPENDENT;
7529 if (tag & OLF_TILE)
7530 /* Tiling could use all 3 levels. */
7531 levels = 3;
7532 else
7534 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7535 Ensure at least one level, or 2 for possible auto
7536 partitioning */
7537 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
7538 << OLF_DIM_BASE) | OLF_SEQ));
7540 if (levels < 1u + maybe_auto)
7541 levels = 1u + maybe_auto;
7544 args.quick_push (build_int_cst (integer_type_node, levels));
7545 args.quick_push (build_int_cst (integer_type_node, tag));
7546 if (gang_static)
7547 args.quick_push (gang_static);
7549 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
7550 gimple_set_location (call, loc);
7551 gimple_set_lhs (call, ddvar);
7552 gimple_seq_add_stmt (seq, call);
7554 return levels;
7557 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7558 partitioning level of the enclosed region. */
7560 static void
7561 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
7562 tree tofollow, gimple_seq *seq)
7564 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
7565 : IFN_UNIQUE_OACC_TAIL_MARK);
7566 tree marker = build_int_cst (integer_type_node, marker_kind);
7567 int nargs = 2 + (tofollow != NULL_TREE);
7568 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
7569 marker, ddvar, tofollow);
7570 gimple_set_location (call, loc);
7571 gimple_set_lhs (call, ddvar);
7572 gimple_seq_add_stmt (seq, call);
7575 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7576 the loop clauses, from which we extract reductions. Initialize
7577 HEAD and TAIL. */
7579 static void
7580 lower_oacc_head_tail (location_t loc, tree clauses,
7581 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
7583 bool inner = false;
7584 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
7585 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
7587 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
7588 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
7589 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
7591 gcc_assert (count);
7592 for (unsigned done = 1; count; count--, done++)
7594 gimple_seq fork_seq = NULL;
7595 gimple_seq join_seq = NULL;
7597 tree place = build_int_cst (integer_type_node, -1);
7598 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
7599 fork_kind, ddvar, place);
7600 gimple_set_location (fork, loc);
7601 gimple_set_lhs (fork, ddvar);
7603 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
7604 join_kind, ddvar, place);
7605 gimple_set_location (join, loc);
7606 gimple_set_lhs (join, ddvar);
7608 /* Mark the beginning of this level sequence. */
7609 if (inner)
7610 lower_oacc_loop_marker (loc, ddvar, true,
7611 build_int_cst (integer_type_node, count),
7612 &fork_seq);
7613 lower_oacc_loop_marker (loc, ddvar, false,
7614 build_int_cst (integer_type_node, done),
7615 &join_seq);
7617 lower_oacc_reductions (loc, clauses, place, inner,
7618 fork, join, &fork_seq, &join_seq, ctx);
7620 /* Append this level to head. */
7621 gimple_seq_add_seq (head, fork_seq);
7622 /* Prepend it to tail. */
7623 gimple_seq_add_seq (&join_seq, *tail);
7624 *tail = join_seq;
7626 inner = true;
7629 /* Mark the end of the sequence. */
7630 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
7631 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
7634 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7635 catch handler and return it. This prevents programs from violating the
7636 structured block semantics with throws. */
7638 static gimple_seq
7639 maybe_catch_exception (gimple_seq body)
7641 gimple *g;
7642 tree decl;
7644 if (!flag_exceptions)
7645 return body;
7647 if (lang_hooks.eh_protect_cleanup_actions != NULL)
7648 decl = lang_hooks.eh_protect_cleanup_actions ();
7649 else
7650 decl = builtin_decl_explicit (BUILT_IN_TRAP);
7652 g = gimple_build_eh_must_not_throw (decl);
7653 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
7654 GIMPLE_TRY_CATCH);
7656 return gimple_seq_alloc_with_stmt (g);
7660 /* Routines to lower OMP directives into OMP-GIMPLE. */
7662 /* If ctx is a worksharing context inside of a cancellable parallel
7663 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7664 and conditional branch to parallel's cancel_label to handle
7665 cancellation in the implicit barrier. */
7667 static void
7668 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
7669 gimple_seq *body)
7671 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
7672 if (gimple_omp_return_nowait_p (omp_return))
7673 return;
7674 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7675 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7676 && outer->cancellable)
7678 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
7679 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
7680 tree lhs = create_tmp_var (c_bool_type);
7681 gimple_omp_return_set_lhs (omp_return, lhs);
7682 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
7683 gimple *g = gimple_build_cond (NE_EXPR, lhs,
7684 fold_convert (c_bool_type,
7685 boolean_false_node),
7686 outer->cancel_label, fallthru_label);
7687 gimple_seq_add_stmt (body, g);
7688 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
7690 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7691 return;
7694 /* Find the first task_reduction or reduction clause or return NULL
7695 if there are none. */
7697 static inline tree
7698 omp_task_reductions_find_first (tree clauses, enum tree_code code,
7699 enum omp_clause_code ccode)
7701 while (1)
7703 clauses = omp_find_clause (clauses, ccode);
7704 if (clauses == NULL_TREE)
7705 return NULL_TREE;
7706 if (ccode != OMP_CLAUSE_REDUCTION
7707 || code == OMP_TASKLOOP
7708 || OMP_CLAUSE_REDUCTION_TASK (clauses))
7709 return clauses;
7710 clauses = OMP_CLAUSE_CHAIN (clauses);
7714 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
7715 gimple_seq *, gimple_seq *);
7717 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7718 CTX is the enclosing OMP context for the current statement. */
7720 static void
7721 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7723 tree block, control;
7724 gimple_stmt_iterator tgsi;
7725 gomp_sections *stmt;
7726 gimple *t;
7727 gbind *new_stmt, *bind;
7728 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
7730 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
7732 push_gimplify_context ();
7734 dlist = NULL;
7735 ilist = NULL;
7737 tree rclauses
7738 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
7739 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
7740 tree rtmp = NULL_TREE;
7741 if (rclauses)
7743 tree type = build_pointer_type (pointer_sized_int_node);
7744 tree temp = create_tmp_var (type);
7745 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
7746 OMP_CLAUSE_DECL (c) = temp;
7747 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
7748 gimple_omp_sections_set_clauses (stmt, c);
7749 lower_omp_task_reductions (ctx, OMP_SECTIONS,
7750 gimple_omp_sections_clauses (stmt),
7751 &ilist, &tred_dlist);
7752 rclauses = c;
7753 rtmp = make_ssa_name (type);
7754 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
7757 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
7758 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
7760 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
7761 &ilist, &dlist, ctx, NULL);
7763 control = create_tmp_var (unsigned_type_node, ".section");
7764 gimple_omp_sections_set_control (stmt, control);
7766 new_body = gimple_omp_body (stmt);
7767 gimple_omp_set_body (stmt, NULL);
7768 tgsi = gsi_start (new_body);
7769 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
7771 omp_context *sctx;
7772 gimple *sec_start;
7774 sec_start = gsi_stmt (tgsi);
7775 sctx = maybe_lookup_ctx (sec_start);
7776 gcc_assert (sctx);
7778 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
7779 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
7780 GSI_CONTINUE_LINKING);
7781 gimple_omp_set_body (sec_start, NULL);
7783 if (gsi_one_before_end_p (tgsi))
7785 gimple_seq l = NULL;
7786 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
7787 &ilist, &l, &clist, ctx);
7788 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
7789 gimple_omp_section_set_last (sec_start);
7792 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
7793 GSI_CONTINUE_LINKING);
7796 block = make_node (BLOCK);
7797 bind = gimple_build_bind (NULL, new_body, block);
7799 olist = NULL;
7800 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
7801 &clist, ctx);
7802 if (clist)
7804 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
7805 gcall *g = gimple_build_call (fndecl, 0);
7806 gimple_seq_add_stmt (&olist, g);
7807 gimple_seq_add_seq (&olist, clist);
7808 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
7809 g = gimple_build_call (fndecl, 0);
7810 gimple_seq_add_stmt (&olist, g);
7813 block = make_node (BLOCK);
7814 new_stmt = gimple_build_bind (NULL, NULL, block);
7815 gsi_replace (gsi_p, new_stmt, true);
7817 pop_gimplify_context (new_stmt);
7818 gimple_bind_append_vars (new_stmt, ctx->block_vars);
7819 BLOCK_VARS (block) = gimple_bind_vars (bind);
7820 if (BLOCK_VARS (block))
7821 TREE_USED (block) = 1;
7823 new_body = NULL;
7824 gimple_seq_add_seq (&new_body, ilist);
7825 gimple_seq_add_stmt (&new_body, stmt);
7826 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
7827 gimple_seq_add_stmt (&new_body, bind);
7829 t = gimple_build_omp_continue (control, control);
7830 gimple_seq_add_stmt (&new_body, t);
7832 gimple_seq_add_seq (&new_body, olist);
7833 if (ctx->cancellable)
7834 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7835 gimple_seq_add_seq (&new_body, dlist);
7837 new_body = maybe_catch_exception (new_body);
7839 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
7840 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7841 t = gimple_build_omp_return (nowait);
7842 gimple_seq_add_stmt (&new_body, t);
7843 gimple_seq_add_seq (&new_body, tred_dlist);
7844 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
7846 if (rclauses)
7847 OMP_CLAUSE_DECL (rclauses) = rtmp;
7849 gimple_bind_set_body (new_stmt, new_body);
7853 /* A subroutine of lower_omp_single. Expand the simple form of
7854 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7856 if (GOMP_single_start ())
7857 BODY;
7858 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7860 FIXME. It may be better to delay expanding the logic of this until
7861 pass_expand_omp. The expanded logic may make the job more difficult
7862 to a synchronization analysis pass. */
7864 static void
7865 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
7867 location_t loc = gimple_location (single_stmt);
7868 tree tlabel = create_artificial_label (loc);
7869 tree flabel = create_artificial_label (loc);
7870 gimple *call, *cond;
7871 tree lhs, decl;
7873 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
7874 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
7875 call = gimple_build_call (decl, 0);
7876 gimple_call_set_lhs (call, lhs);
7877 gimple_seq_add_stmt (pre_p, call);
7879 cond = gimple_build_cond (EQ_EXPR, lhs,
7880 fold_convert_loc (loc, TREE_TYPE (lhs),
7881 boolean_true_node),
7882 tlabel, flabel);
7883 gimple_seq_add_stmt (pre_p, cond);
7884 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
7885 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7886 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
7890 /* A subroutine of lower_omp_single. Expand the simple form of
7891 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7893 #pragma omp single copyprivate (a, b, c)
7895 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7898 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7900 BODY;
7901 copyout.a = a;
7902 copyout.b = b;
7903 copyout.c = c;
7904 GOMP_single_copy_end (&copyout);
7906 else
7908 a = copyout_p->a;
7909 b = copyout_p->b;
7910 c = copyout_p->c;
7912 GOMP_barrier ();
7915 FIXME. It may be better to delay expanding the logic of this until
7916 pass_expand_omp. The expanded logic may make the job more difficult
7917 to a synchronization analysis pass. */
7919 static void
7920 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
7921 omp_context *ctx)
7923 tree ptr_type, t, l0, l1, l2, bfn_decl;
7924 gimple_seq copyin_seq;
7925 location_t loc = gimple_location (single_stmt);
7927 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
7929 ptr_type = build_pointer_type (ctx->record_type);
7930 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
7932 l0 = create_artificial_label (loc);
7933 l1 = create_artificial_label (loc);
7934 l2 = create_artificial_label (loc);
7936 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
7937 t = build_call_expr_loc (loc, bfn_decl, 0);
7938 t = fold_convert_loc (loc, ptr_type, t);
7939 gimplify_assign (ctx->receiver_decl, t, pre_p);
7941 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
7942 build_int_cst (ptr_type, 0));
7943 t = build3 (COND_EXPR, void_type_node, t,
7944 build_and_jump (&l0), build_and_jump (&l1));
7945 gimplify_and_add (t, pre_p);
7947 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
7949 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7951 copyin_seq = NULL;
7952 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
7953 &copyin_seq, ctx);
7955 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7956 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
7957 t = build_call_expr_loc (loc, bfn_decl, 1, t);
7958 gimplify_and_add (t, pre_p);
7960 t = build_and_jump (&l2);
7961 gimplify_and_add (t, pre_p);
7963 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
7965 gimple_seq_add_seq (pre_p, copyin_seq);
7967 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
7971 /* Expand code for an OpenMP single directive. */
7973 static void
7974 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7976 tree block;
7977 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
7978 gbind *bind;
7979 gimple_seq bind_body, bind_body_tail = NULL, dlist;
7981 push_gimplify_context ();
7983 block = make_node (BLOCK);
7984 bind = gimple_build_bind (NULL, NULL, block);
7985 gsi_replace (gsi_p, bind, true);
7986 bind_body = NULL;
7987 dlist = NULL;
7988 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
7989 &bind_body, &dlist, ctx, NULL);
7990 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
7992 gimple_seq_add_stmt (&bind_body, single_stmt);
7994 if (ctx->record_type)
7995 lower_omp_single_copy (single_stmt, &bind_body, ctx);
7996 else
7997 lower_omp_single_simple (single_stmt, &bind_body);
7999 gimple_omp_set_body (single_stmt, NULL);
8001 gimple_seq_add_seq (&bind_body, dlist);
8003 bind_body = maybe_catch_exception (bind_body);
8005 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8006 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8007 gimple *g = gimple_build_omp_return (nowait);
8008 gimple_seq_add_stmt (&bind_body_tail, g);
8009 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8010 if (ctx->record_type)
8012 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8013 tree clobber = build_clobber (ctx->record_type);
8014 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8015 clobber), GSI_SAME_STMT);
8017 gimple_seq_add_seq (&bind_body, bind_body_tail);
8018 gimple_bind_set_body (bind, bind_body);
8020 pop_gimplify_context (bind);
8022 gimple_bind_append_vars (bind, ctx->block_vars);
8023 BLOCK_VARS (block) = ctx->block_vars;
8024 if (BLOCK_VARS (block))
8025 TREE_USED (block) = 1;
8029 /* Expand code for an OpenMP master directive. */
8031 static void
8032 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8034 tree block, lab = NULL, x, bfn_decl;
8035 gimple *stmt = gsi_stmt (*gsi_p);
8036 gbind *bind;
8037 location_t loc = gimple_location (stmt);
8038 gimple_seq tseq;
8040 push_gimplify_context ();
8042 block = make_node (BLOCK);
8043 bind = gimple_build_bind (NULL, NULL, block);
8044 gsi_replace (gsi_p, bind, true);
8045 gimple_bind_add_stmt (bind, stmt);
8047 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8048 x = build_call_expr_loc (loc, bfn_decl, 0);
8049 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
8050 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8051 tseq = NULL;
8052 gimplify_and_add (x, &tseq);
8053 gimple_bind_add_seq (bind, tseq);
8055 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8056 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8057 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8058 gimple_omp_set_body (stmt, NULL);
8060 gimple_bind_add_stmt (bind, gimple_build_label (lab));
8062 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8064 pop_gimplify_context (bind);
8066 gimple_bind_append_vars (bind, ctx->block_vars);
8067 BLOCK_VARS (block) = ctx->block_vars;
8070 /* Helper function for lower_omp_task_reductions. For a specific PASS
8071 find out the current clause it should be processed, or return false
8072 if all have been processed already. */
8074 static inline bool
8075 omp_task_reduction_iterate (int pass, enum tree_code code,
8076 enum omp_clause_code ccode, tree *c, tree *decl,
8077 tree *type, tree *next)
8079 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
8081 if (ccode == OMP_CLAUSE_REDUCTION
8082 && code != OMP_TASKLOOP
8083 && !OMP_CLAUSE_REDUCTION_TASK (*c))
8084 continue;
8085 *decl = OMP_CLAUSE_DECL (*c);
8086 *type = TREE_TYPE (*decl);
8087 if (TREE_CODE (*decl) == MEM_REF)
8089 if (pass != 1)
8090 continue;
8092 else
8094 if (omp_is_reference (*decl))
8095 *type = TREE_TYPE (*type);
8096 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
8097 continue;
8099 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
8100 return true;
8102 *decl = NULL_TREE;
8103 *type = NULL_TREE;
8104 *next = NULL_TREE;
8105 return false;
8108 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8109 OMP_TASKGROUP only with task modifier). Register mapping of those in
8110 START sequence and reducing them and unregister them in the END sequence. */
8112 static void
8113 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
8114 gimple_seq *start, gimple_seq *end)
8116 enum omp_clause_code ccode
8117 = (code == OMP_TASKGROUP
8118 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
8119 tree cancellable = NULL_TREE;
8120 clauses = omp_task_reductions_find_first (clauses, code, ccode);
8121 if (clauses == NULL_TREE)
8122 return;
8123 if (code == OMP_FOR || code == OMP_SECTIONS)
8125 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8126 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8127 && outer->cancellable)
8129 cancellable = error_mark_node;
8130 break;
8132 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
8133 break;
8135 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
8136 tree *last = &TYPE_FIELDS (record_type);
8137 unsigned cnt = 0;
8138 if (cancellable)
8140 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8141 ptr_type_node);
8142 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8143 integer_type_node);
8144 *last = field;
8145 DECL_CHAIN (field) = ifield;
8146 last = &DECL_CHAIN (ifield);
8147 DECL_CONTEXT (field) = record_type;
8148 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8149 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8150 DECL_CONTEXT (ifield) = record_type;
8151 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
8152 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
8154 for (int pass = 0; pass < 2; pass++)
8156 tree decl, type, next;
8157 for (tree c = clauses;
8158 omp_task_reduction_iterate (pass, code, ccode,
8159 &c, &decl, &type, &next); c = next)
8161 ++cnt;
8162 tree new_type = type;
8163 if (ctx->outer)
8164 new_type = remap_type (type, &ctx->outer->cb);
8165 tree field
8166 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
8167 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
8168 new_type);
8169 if (DECL_P (decl) && type == TREE_TYPE (decl))
8171 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
8172 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
8173 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
8175 else
8176 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
8177 DECL_CONTEXT (field) = record_type;
8178 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8179 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8180 *last = field;
8181 last = &DECL_CHAIN (field);
8182 tree bfield
8183 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
8184 boolean_type_node);
8185 DECL_CONTEXT (bfield) = record_type;
8186 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
8187 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
8188 *last = bfield;
8189 last = &DECL_CHAIN (bfield);
8192 *last = NULL_TREE;
8193 layout_type (record_type);
8195 /* Build up an array which registers with the runtime all the reductions
8196 and deregisters them at the end. Format documented in libgomp/task.c. */
8197 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
8198 tree avar = create_tmp_var_raw (atype);
8199 gimple_add_tmp_var (avar);
8200 TREE_ADDRESSABLE (avar) = 1;
8201 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
8202 NULL_TREE, NULL_TREE);
8203 tree t = build_int_cst (pointer_sized_int_node, cnt);
8204 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8205 gimple_seq seq = NULL;
8206 tree sz = fold_convert (pointer_sized_int_node,
8207 TYPE_SIZE_UNIT (record_type));
8208 int cachesz = 64;
8209 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
8210 build_int_cst (pointer_sized_int_node, cachesz - 1));
8211 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
8212 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
8213 ctx->task_reductions.create (1 + cnt);
8214 ctx->task_reduction_map = new hash_map<tree, unsigned>;
8215 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
8216 ? sz : NULL_TREE);
8217 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
8218 gimple_seq_add_seq (start, seq);
8219 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
8220 NULL_TREE, NULL_TREE);
8221 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
8222 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8223 NULL_TREE, NULL_TREE);
8224 t = build_int_cst (pointer_sized_int_node,
8225 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
8226 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8227 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
8228 NULL_TREE, NULL_TREE);
8229 t = build_int_cst (pointer_sized_int_node, -1);
8230 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8231 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
8232 NULL_TREE, NULL_TREE);
8233 t = build_int_cst (pointer_sized_int_node, 0);
8234 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8236 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8237 and for each task reduction checks a bool right after the private variable
8238 within that thread's chunk; if the bool is clear, it hasn't been
8239 initialized and thus isn't going to be reduced nor destructed, otherwise
8240 reduce and destruct it. */
8241 tree idx = create_tmp_var (size_type_node);
8242 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
8243 tree num_thr_sz = create_tmp_var (size_type_node);
8244 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
8245 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
8246 tree lab3 = NULL_TREE;
8247 gimple *g;
8248 if (code == OMP_FOR || code == OMP_SECTIONS)
8250 /* For worksharing constructs, only perform it in the master thread,
8251 with the exception of cancelled implicit barriers - then only handle
8252 the current thread. */
8253 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8254 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8255 tree thr_num = create_tmp_var (integer_type_node);
8256 g = gimple_build_call (t, 0);
8257 gimple_call_set_lhs (g, thr_num);
8258 gimple_seq_add_stmt (end, g);
8259 if (cancellable)
8261 tree c;
8262 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8263 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
8264 lab3 = create_artificial_label (UNKNOWN_LOCATION);
8265 if (code == OMP_FOR)
8266 c = gimple_omp_for_clauses (ctx->stmt);
8267 else /* if (code == OMP_SECTIONS) */
8268 c = gimple_omp_sections_clauses (ctx->stmt);
8269 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
8270 cancellable = c;
8271 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
8272 lab5, lab6);
8273 gimple_seq_add_stmt (end, g);
8274 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8275 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
8276 gimple_seq_add_stmt (end, g);
8277 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
8278 build_one_cst (TREE_TYPE (idx)));
8279 gimple_seq_add_stmt (end, g);
8280 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
8281 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8283 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
8284 gimple_seq_add_stmt (end, g);
8285 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8287 if (code != OMP_PARALLEL)
8289 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
8290 tree num_thr = create_tmp_var (integer_type_node);
8291 g = gimple_build_call (t, 0);
8292 gimple_call_set_lhs (g, num_thr);
8293 gimple_seq_add_stmt (end, g);
8294 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
8295 gimple_seq_add_stmt (end, g);
8296 if (cancellable)
8297 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8299 else
8301 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
8302 OMP_CLAUSE__REDUCTEMP_);
8303 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
8304 t = fold_convert (size_type_node, t);
8305 gimplify_assign (num_thr_sz, t, end);
8307 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8308 NULL_TREE, NULL_TREE);
8309 tree data = create_tmp_var (pointer_sized_int_node);
8310 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
8311 gimple_seq_add_stmt (end, gimple_build_label (lab1));
8312 tree ptr;
8313 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
8314 ptr = create_tmp_var (build_pointer_type (record_type));
8315 else
8316 ptr = create_tmp_var (ptr_type_node);
8317 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
8319 tree field = TYPE_FIELDS (record_type);
8320 cnt = 0;
8321 if (cancellable)
8322 field = DECL_CHAIN (DECL_CHAIN (field));
8323 for (int pass = 0; pass < 2; pass++)
8325 tree decl, type, next;
8326 for (tree c = clauses;
8327 omp_task_reduction_iterate (pass, code, ccode,
8328 &c, &decl, &type, &next); c = next)
8330 tree var = decl, ref;
8331 if (TREE_CODE (decl) == MEM_REF)
8333 var = TREE_OPERAND (var, 0);
8334 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
8335 var = TREE_OPERAND (var, 0);
8336 tree v = var;
8337 if (TREE_CODE (var) == ADDR_EXPR)
8338 var = TREE_OPERAND (var, 0);
8339 else if (TREE_CODE (var) == INDIRECT_REF)
8340 var = TREE_OPERAND (var, 0);
8341 tree orig_var = var;
8342 if (is_variable_sized (var))
8344 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
8345 var = DECL_VALUE_EXPR (var);
8346 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
8347 var = TREE_OPERAND (var, 0);
8348 gcc_assert (DECL_P (var));
8350 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8351 if (orig_var != var)
8352 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
8353 else if (TREE_CODE (v) == ADDR_EXPR)
8354 t = build_fold_addr_expr (t);
8355 else if (TREE_CODE (v) == INDIRECT_REF)
8356 t = build_fold_indirect_ref (t);
8357 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
8359 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
8360 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
8361 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
8363 if (!integer_zerop (TREE_OPERAND (decl, 1)))
8364 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
8365 fold_convert (size_type_node,
8366 TREE_OPERAND (decl, 1)));
8368 else
8370 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8371 if (!omp_is_reference (decl))
8372 t = build_fold_addr_expr (t);
8374 t = fold_convert (pointer_sized_int_node, t);
8375 seq = NULL;
8376 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8377 gimple_seq_add_seq (start, seq);
8378 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8379 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8380 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8381 t = unshare_expr (byte_position (field));
8382 t = fold_convert (pointer_sized_int_node, t);
8383 ctx->task_reduction_map->put (c, cnt);
8384 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
8385 ? t : NULL_TREE);
8386 seq = NULL;
8387 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8388 gimple_seq_add_seq (start, seq);
8389 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8390 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
8391 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8393 tree bfield = DECL_CHAIN (field);
8394 tree cond;
8395 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
8396 /* In parallel or worksharing all threads unconditionally
8397 initialize all their task reduction private variables. */
8398 cond = boolean_true_node;
8399 else if (TREE_TYPE (ptr) == ptr_type_node)
8401 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8402 unshare_expr (byte_position (bfield)));
8403 seq = NULL;
8404 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
8405 gimple_seq_add_seq (end, seq);
8406 tree pbool = build_pointer_type (TREE_TYPE (bfield));
8407 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
8408 build_int_cst (pbool, 0));
8410 else
8411 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
8412 build_simple_mem_ref (ptr), bfield, NULL_TREE);
8413 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
8414 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8415 tree condv = create_tmp_var (boolean_type_node);
8416 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
8417 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
8418 lab3, lab4);
8419 gimple_seq_add_stmt (end, g);
8420 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8421 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
8423 /* If this reduction doesn't need destruction and parallel
8424 has been cancelled, there is nothing to do for this
8425 reduction, so jump around the merge operation. */
8426 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8427 g = gimple_build_cond (NE_EXPR, cancellable,
8428 build_zero_cst (TREE_TYPE (cancellable)),
8429 lab4, lab5);
8430 gimple_seq_add_stmt (end, g);
8431 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8434 tree new_var;
8435 if (TREE_TYPE (ptr) == ptr_type_node)
8437 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8438 unshare_expr (byte_position (field)));
8439 seq = NULL;
8440 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
8441 gimple_seq_add_seq (end, seq);
8442 tree pbool = build_pointer_type (TREE_TYPE (field));
8443 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
8444 build_int_cst (pbool, 0));
8446 else
8447 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
8448 build_simple_mem_ref (ptr), field, NULL_TREE);
8450 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
8451 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
8452 ref = build_simple_mem_ref (ref);
8453 /* reduction(-:var) sums up the partial results, so it acts
8454 identically to reduction(+:var). */
8455 if (rcode == MINUS_EXPR)
8456 rcode = PLUS_EXPR;
8457 if (TREE_CODE (decl) == MEM_REF)
8459 tree type = TREE_TYPE (new_var);
8460 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8461 tree i = create_tmp_var (TREE_TYPE (v));
8462 tree ptype = build_pointer_type (TREE_TYPE (type));
8463 if (DECL_P (v))
8465 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
8466 tree vv = create_tmp_var (TREE_TYPE (v));
8467 gimplify_assign (vv, v, start);
8468 v = vv;
8470 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8471 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8472 new_var = build_fold_addr_expr (new_var);
8473 new_var = fold_convert (ptype, new_var);
8474 ref = fold_convert (ptype, ref);
8475 tree m = create_tmp_var (ptype);
8476 gimplify_assign (m, new_var, end);
8477 new_var = m;
8478 m = create_tmp_var (ptype);
8479 gimplify_assign (m, ref, end);
8480 ref = m;
8481 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
8482 tree body = create_artificial_label (UNKNOWN_LOCATION);
8483 tree endl = create_artificial_label (UNKNOWN_LOCATION);
8484 gimple_seq_add_stmt (end, gimple_build_label (body));
8485 tree priv = build_simple_mem_ref (new_var);
8486 tree out = build_simple_mem_ref (ref);
8487 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8489 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8490 tree decl_placeholder
8491 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
8492 tree lab6 = NULL_TREE;
8493 if (cancellable)
8495 /* If this reduction needs destruction and parallel
8496 has been cancelled, jump around the merge operation
8497 to the destruction. */
8498 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8499 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8500 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8501 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8502 lab6, lab5);
8503 gimple_seq_add_stmt (end, g);
8504 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8506 SET_DECL_VALUE_EXPR (placeholder, out);
8507 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8508 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
8509 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
8510 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8511 gimple_seq_add_seq (end,
8512 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8513 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8514 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8516 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8517 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
8519 if (cancellable)
8520 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8521 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
8522 if (x)
8524 gimple_seq tseq = NULL;
8525 gimplify_stmt (&x, &tseq);
8526 gimple_seq_add_seq (end, tseq);
8529 else
8531 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
8532 out = unshare_expr (out);
8533 gimplify_assign (out, x, end);
8535 gimple *g
8536 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
8537 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8538 gimple_seq_add_stmt (end, g);
8539 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
8540 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8541 gimple_seq_add_stmt (end, g);
8542 g = gimple_build_assign (i, PLUS_EXPR, i,
8543 build_int_cst (TREE_TYPE (i), 1));
8544 gimple_seq_add_stmt (end, g);
8545 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
8546 gimple_seq_add_stmt (end, g);
8547 gimple_seq_add_stmt (end, gimple_build_label (endl));
8549 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8551 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8552 tree oldv = NULL_TREE;
8553 tree lab6 = NULL_TREE;
8554 if (cancellable)
8556 /* If this reduction needs destruction and parallel
8557 has been cancelled, jump around the merge operation
8558 to the destruction. */
8559 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8560 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8561 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8562 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8563 lab6, lab5);
8564 gimple_seq_add_stmt (end, g);
8565 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8567 if (omp_is_reference (decl)
8568 && !useless_type_conversion_p (TREE_TYPE (placeholder),
8569 TREE_TYPE (ref)))
8570 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8571 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8572 tree refv = create_tmp_var (TREE_TYPE (ref));
8573 gimplify_assign (refv, ref, end);
8574 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
8575 SET_DECL_VALUE_EXPR (placeholder, ref);
8576 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8577 tree d = maybe_lookup_decl (decl, ctx);
8578 gcc_assert (d);
8579 if (DECL_HAS_VALUE_EXPR_P (d))
8580 oldv = DECL_VALUE_EXPR (d);
8581 if (omp_is_reference (var))
8583 tree v = fold_convert (TREE_TYPE (d),
8584 build_fold_addr_expr (new_var));
8585 SET_DECL_VALUE_EXPR (d, v);
8587 else
8588 SET_DECL_VALUE_EXPR (d, new_var);
8589 DECL_HAS_VALUE_EXPR_P (d) = 1;
8590 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8591 if (oldv)
8592 SET_DECL_VALUE_EXPR (d, oldv);
8593 else
8595 SET_DECL_VALUE_EXPR (d, NULL_TREE);
8596 DECL_HAS_VALUE_EXPR_P (d) = 0;
8598 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8599 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8600 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8601 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8602 if (cancellable)
8603 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8604 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
8605 if (x)
8607 gimple_seq tseq = NULL;
8608 gimplify_stmt (&x, &tseq);
8609 gimple_seq_add_seq (end, tseq);
8612 else
8614 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
8615 ref = unshare_expr (ref);
8616 gimplify_assign (ref, x, end);
8618 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8619 ++cnt;
8620 field = DECL_CHAIN (bfield);
8624 if (code == OMP_TASKGROUP)
8626 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
8627 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8628 gimple_seq_add_stmt (start, g);
8630 else
8632 tree c;
8633 if (code == OMP_FOR)
8634 c = gimple_omp_for_clauses (ctx->stmt);
8635 else if (code == OMP_SECTIONS)
8636 c = gimple_omp_sections_clauses (ctx->stmt);
8637 else
8638 c = gimple_omp_taskreg_clauses (ctx->stmt);
8639 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
8640 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
8641 build_fold_addr_expr (avar));
8642 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
8645 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
8646 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
8647 size_one_node));
8648 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
8649 gimple_seq_add_stmt (end, g);
8650 gimple_seq_add_stmt (end, gimple_build_label (lab2));
8651 if (code == OMP_FOR || code == OMP_SECTIONS)
8653 enum built_in_function bfn
8654 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
8655 t = builtin_decl_explicit (bfn);
8656 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
8657 tree arg;
8658 if (cancellable)
8660 arg = create_tmp_var (c_bool_type);
8661 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
8662 cancellable));
8664 else
8665 arg = build_int_cst (c_bool_type, 0);
8666 g = gimple_build_call (t, 1, arg);
8668 else
8670 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
8671 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8673 gimple_seq_add_stmt (end, g);
8674 t = build_constructor (atype, NULL);
8675 TREE_THIS_VOLATILE (t) = 1;
8676 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
8679 /* Expand code for an OpenMP taskgroup directive. */
8681 static void
8682 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8684 gimple *stmt = gsi_stmt (*gsi_p);
8685 gcall *x;
8686 gbind *bind;
8687 gimple_seq dseq = NULL;
8688 tree block = make_node (BLOCK);
8690 bind = gimple_build_bind (NULL, NULL, block);
8691 gsi_replace (gsi_p, bind, true);
8692 gimple_bind_add_stmt (bind, stmt);
8694 push_gimplify_context ();
8696 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
8698 gimple_bind_add_stmt (bind, x);
8700 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
8701 gimple_omp_taskgroup_clauses (stmt),
8702 gimple_bind_body_ptr (bind), &dseq);
8704 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8705 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8706 gimple_omp_set_body (stmt, NULL);
8708 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8709 gimple_bind_add_seq (bind, dseq);
8711 pop_gimplify_context (bind);
8713 gimple_bind_append_vars (bind, ctx->block_vars);
8714 BLOCK_VARS (block) = ctx->block_vars;
8718 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8720 static void
8721 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
8722 omp_context *ctx)
8724 struct omp_for_data fd;
8725 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
8726 return;
8728 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
8729 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
8730 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
8731 if (!fd.ordered)
8732 return;
8734 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8735 tree c = gimple_omp_ordered_clauses (ord_stmt);
8736 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
8737 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8739 /* Merge depend clauses from multiple adjacent
8740 #pragma omp ordered depend(sink:...) constructs
8741 into one #pragma omp ordered depend(sink:...), so that
8742 we can optimize them together. */
8743 gimple_stmt_iterator gsi = *gsi_p;
8744 gsi_next (&gsi);
8745 while (!gsi_end_p (gsi))
8747 gimple *stmt = gsi_stmt (gsi);
8748 if (is_gimple_debug (stmt)
8749 || gimple_code (stmt) == GIMPLE_NOP)
8751 gsi_next (&gsi);
8752 continue;
8754 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
8755 break;
8756 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
8757 c = gimple_omp_ordered_clauses (ord_stmt2);
8758 if (c == NULL_TREE
8759 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
8760 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8761 break;
8762 while (*list_p)
8763 list_p = &OMP_CLAUSE_CHAIN (*list_p);
8764 *list_p = c;
8765 gsi_remove (&gsi, true);
8769 /* Canonicalize sink dependence clauses into one folded clause if
8770 possible.
8772 The basic algorithm is to create a sink vector whose first
8773 element is the GCD of all the first elements, and whose remaining
8774 elements are the minimum of the subsequent columns.
8776 We ignore dependence vectors whose first element is zero because
8777 such dependencies are known to be executed by the same thread.
8779 We take into account the direction of the loop, so a minimum
8780 becomes a maximum if the loop is iterating forwards. We also
8781 ignore sink clauses where the loop direction is unknown, or where
8782 the offsets are clearly invalid because they are not a multiple
8783 of the loop increment.
8785 For example:
8787 #pragma omp for ordered(2)
8788 for (i=0; i < N; ++i)
8789 for (j=0; j < M; ++j)
8791 #pragma omp ordered \
8792 depend(sink:i-8,j-2) \
8793 depend(sink:i,j-1) \ // Completely ignored because i+0.
8794 depend(sink:i-4,j-3) \
8795 depend(sink:i-6,j-4)
8796 #pragma omp ordered depend(source)
8799 Folded clause is:
8801 depend(sink:-gcd(8,4,6),-min(2,3,4))
8802 -or-
8803 depend(sink:-2,-2)
8806 /* FIXME: Computing GCD's where the first element is zero is
8807 non-trivial in the presence of collapsed loops. Do this later. */
8808 if (fd.collapse > 1)
8809 return;
8811 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
8813 /* wide_int is not a POD so it must be default-constructed. */
8814 for (unsigned i = 0; i != 2 * len - 1; ++i)
8815 new (static_cast<void*>(folded_deps + i)) wide_int ();
8817 tree folded_dep = NULL_TREE;
8818 /* TRUE if the first dimension's offset is negative. */
8819 bool neg_offset_p = false;
8821 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8822 unsigned int i;
8823 while ((c = *list_p) != NULL)
8825 bool remove = false;
8827 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
8828 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8829 goto next_ordered_clause;
8831 tree vec;
8832 for (vec = OMP_CLAUSE_DECL (c), i = 0;
8833 vec && TREE_CODE (vec) == TREE_LIST;
8834 vec = TREE_CHAIN (vec), ++i)
8836 gcc_assert (i < len);
8838 /* omp_extract_for_data has canonicalized the condition. */
8839 gcc_assert (fd.loops[i].cond_code == LT_EXPR
8840 || fd.loops[i].cond_code == GT_EXPR);
8841 bool forward = fd.loops[i].cond_code == LT_EXPR;
8842 bool maybe_lexically_later = true;
8844 /* While the committee makes up its mind, bail if we have any
8845 non-constant steps. */
8846 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
8847 goto lower_omp_ordered_ret;
8849 tree itype = TREE_TYPE (TREE_VALUE (vec));
8850 if (POINTER_TYPE_P (itype))
8851 itype = sizetype;
8852 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
8853 TYPE_PRECISION (itype),
8854 TYPE_SIGN (itype));
8856 /* Ignore invalid offsets that are not multiples of the step. */
8857 if (!wi::multiple_of_p (wi::abs (offset),
8858 wi::abs (wi::to_wide (fd.loops[i].step)),
8859 UNSIGNED))
8861 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8862 "ignoring sink clause with offset that is not "
8863 "a multiple of the loop step");
8864 remove = true;
8865 goto next_ordered_clause;
8868 /* Calculate the first dimension. The first dimension of
8869 the folded dependency vector is the GCD of the first
8870 elements, while ignoring any first elements whose offset
8871 is 0. */
8872 if (i == 0)
8874 /* Ignore dependence vectors whose first dimension is 0. */
8875 if (offset == 0)
8877 remove = true;
8878 goto next_ordered_clause;
8880 else
8882 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
8884 error_at (OMP_CLAUSE_LOCATION (c),
8885 "first offset must be in opposite direction "
8886 "of loop iterations");
8887 goto lower_omp_ordered_ret;
8889 if (forward)
8890 offset = -offset;
8891 neg_offset_p = forward;
8892 /* Initialize the first time around. */
8893 if (folded_dep == NULL_TREE)
8895 folded_dep = c;
8896 folded_deps[0] = offset;
8898 else
8899 folded_deps[0] = wi::gcd (folded_deps[0],
8900 offset, UNSIGNED);
8903 /* Calculate minimum for the remaining dimensions. */
8904 else
8906 folded_deps[len + i - 1] = offset;
8907 if (folded_dep == c)
8908 folded_deps[i] = offset;
8909 else if (maybe_lexically_later
8910 && !wi::eq_p (folded_deps[i], offset))
8912 if (forward ^ wi::gts_p (folded_deps[i], offset))
8914 unsigned int j;
8915 folded_dep = c;
8916 for (j = 1; j <= i; j++)
8917 folded_deps[j] = folded_deps[len + j - 1];
8919 else
8920 maybe_lexically_later = false;
8924 gcc_assert (i == len);
8926 remove = true;
8928 next_ordered_clause:
8929 if (remove)
8930 *list_p = OMP_CLAUSE_CHAIN (c);
8931 else
8932 list_p = &OMP_CLAUSE_CHAIN (c);
8935 if (folded_dep)
8937 if (neg_offset_p)
8938 folded_deps[0] = -folded_deps[0];
8940 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
8941 if (POINTER_TYPE_P (itype))
8942 itype = sizetype;
8944 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
8945 = wide_int_to_tree (itype, folded_deps[0]);
8946 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
8947 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
8950 lower_omp_ordered_ret:
8952 /* Ordered without clauses is #pragma omp threads, while we want
8953 a nop instead if we remove all clauses. */
8954 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
8955 gsi_replace (gsi_p, gimple_build_nop (), true);
8959 /* Expand code for an OpenMP ordered directive. */
8961 static void
8962 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8964 tree block;
8965 gimple *stmt = gsi_stmt (*gsi_p), *g;
8966 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
8967 gcall *x;
8968 gbind *bind;
8969 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8970 OMP_CLAUSE_SIMD);
8971 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
8972 loop. */
8973 bool maybe_simt
8974 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
8975 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8976 OMP_CLAUSE_THREADS);
8978 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8979 OMP_CLAUSE_DEPEND))
8981 /* FIXME: This is needs to be moved to the expansion to verify various
8982 conditions only testable on cfg with dominators computed, and also
8983 all the depend clauses to be merged still might need to be available
8984 for the runtime checks. */
8985 if (0)
8986 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
8987 return;
8990 push_gimplify_context ();
8992 block = make_node (BLOCK);
8993 bind = gimple_build_bind (NULL, NULL, block);
8994 gsi_replace (gsi_p, bind, true);
8995 gimple_bind_add_stmt (bind, stmt);
8997 if (simd)
8999 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
9000 build_int_cst (NULL_TREE, threads));
9001 cfun->has_simduid_loops = true;
9003 else
9004 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
9006 gimple_bind_add_stmt (bind, x);
9008 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9009 if (maybe_simt)
9011 counter = create_tmp_var (integer_type_node);
9012 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9013 gimple_call_set_lhs (g, counter);
9014 gimple_bind_add_stmt (bind, g);
9016 body = create_artificial_label (UNKNOWN_LOCATION);
9017 test = create_artificial_label (UNKNOWN_LOCATION);
9018 gimple_bind_add_stmt (bind, gimple_build_label (body));
9020 tree simt_pred = create_tmp_var (integer_type_node);
9021 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9022 gimple_call_set_lhs (g, simt_pred);
9023 gimple_bind_add_stmt (bind, g);
9025 tree t = create_artificial_label (UNKNOWN_LOCATION);
9026 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9027 gimple_bind_add_stmt (bind, g);
9029 gimple_bind_add_stmt (bind, gimple_build_label (t));
9031 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9032 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9033 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9034 gimple_omp_set_body (stmt, NULL);
9036 if (maybe_simt)
9038 gimple_bind_add_stmt (bind, gimple_build_label (test));
9039 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
9040 gimple_bind_add_stmt (bind, g);
9042 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
9043 tree nonneg = create_tmp_var (integer_type_node);
9044 gimple_seq tseq = NULL;
9045 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
9046 gimple_bind_add_seq (bind, tseq);
9048 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
9049 gimple_call_set_lhs (g, nonneg);
9050 gimple_bind_add_stmt (bind, g);
9052 tree end = create_artificial_label (UNKNOWN_LOCATION);
9053 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
9054 gimple_bind_add_stmt (bind, g);
9056 gimple_bind_add_stmt (bind, gimple_build_label (end));
9058 if (simd)
9059 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
9060 build_int_cst (NULL_TREE, threads));
9061 else
9062 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
9064 gimple_bind_add_stmt (bind, x);
9066 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9068 pop_gimplify_context (bind);
9070 gimple_bind_append_vars (bind, ctx->block_vars);
9071 BLOCK_VARS (block) = gimple_bind_vars (bind);
9075 /* Expand code for an OpenMP scan directive and the structured block
9076 before the scan directive. */
9078 static void
9079 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9081 gimple *stmt = gsi_stmt (*gsi_p);
9082 bool has_clauses
9083 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
9084 tree lane = NULL_TREE;
9085 gimple_seq before = NULL;
9086 omp_context *octx = ctx->outer;
9087 gcc_assert (octx);
9088 if (octx->scan_exclusive && !has_clauses)
9090 gimple_stmt_iterator gsi2 = *gsi_p;
9091 gsi_next (&gsi2);
9092 gimple *stmt2 = gsi_stmt (gsi2);
9093 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9094 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9095 the one with exclusive clause(s), comes first. */
9096 if (stmt2
9097 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
9098 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
9100 gsi_remove (gsi_p, false);
9101 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
9102 ctx = maybe_lookup_ctx (stmt2);
9103 gcc_assert (ctx);
9104 lower_omp_scan (gsi_p, ctx);
9105 return;
9109 bool input_phase = has_clauses ^ octx->scan_inclusive;
9110 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9111 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
9112 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9113 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
9114 && !gimple_omp_for_combined_p (octx->stmt));
9115 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
9116 if (is_for_simd && octx->for_simd_scan_phase)
9117 is_simd = false;
9118 if (is_simd)
9119 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
9120 OMP_CLAUSE__SIMDUID_))
9122 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
9123 lane = create_tmp_var (unsigned_type_node);
9124 tree t = build_int_cst (integer_type_node,
9125 input_phase ? 1
9126 : octx->scan_inclusive ? 2 : 3);
9127 gimple *g
9128 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
9129 gimple_call_set_lhs (g, lane);
9130 gimple_seq_add_stmt (&before, g);
9133 if (is_simd || is_for)
9135 for (tree c = gimple_omp_for_clauses (octx->stmt);
9136 c; c = OMP_CLAUSE_CHAIN (c))
9137 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9138 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9140 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9141 tree var = OMP_CLAUSE_DECL (c);
9142 tree new_var = lookup_decl (var, octx);
9143 tree val = new_var;
9144 tree var2 = NULL_TREE;
9145 tree var3 = NULL_TREE;
9146 tree var4 = NULL_TREE;
9147 tree lane0 = NULL_TREE;
9148 tree new_vard = new_var;
9149 if (omp_is_reference (var))
9151 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9152 val = new_var;
9154 if (DECL_HAS_VALUE_EXPR_P (new_vard))
9156 val = DECL_VALUE_EXPR (new_vard);
9157 if (new_vard != new_var)
9159 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
9160 val = TREE_OPERAND (val, 0);
9162 if (TREE_CODE (val) == ARRAY_REF
9163 && VAR_P (TREE_OPERAND (val, 0)))
9165 tree v = TREE_OPERAND (val, 0);
9166 if (lookup_attribute ("omp simd array",
9167 DECL_ATTRIBUTES (v)))
9169 val = unshare_expr (val);
9170 lane0 = TREE_OPERAND (val, 1);
9171 TREE_OPERAND (val, 1) = lane;
9172 var2 = lookup_decl (v, octx);
9173 if (octx->scan_exclusive)
9174 var4 = lookup_decl (var2, octx);
9175 if (input_phase
9176 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9177 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
9178 if (!input_phase)
9180 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
9181 var2, lane, NULL_TREE, NULL_TREE);
9182 TREE_THIS_NOTRAP (var2) = 1;
9183 if (octx->scan_exclusive)
9185 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
9186 var4, lane, NULL_TREE,
9187 NULL_TREE);
9188 TREE_THIS_NOTRAP (var4) = 1;
9191 else
9192 var2 = val;
9195 gcc_assert (var2);
9197 else
9199 var2 = build_outer_var_ref (var, octx);
9200 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9202 var3 = maybe_lookup_decl (new_vard, octx);
9203 if (var3 == new_vard || var3 == NULL_TREE)
9204 var3 = NULL_TREE;
9205 else if (is_simd && octx->scan_exclusive && !input_phase)
9207 var4 = maybe_lookup_decl (var3, octx);
9208 if (var4 == var3 || var4 == NULL_TREE)
9210 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
9212 var4 = var3;
9213 var3 = NULL_TREE;
9215 else
9216 var4 = NULL_TREE;
9220 if (is_simd
9221 && octx->scan_exclusive
9222 && !input_phase
9223 && var4 == NULL_TREE)
9224 var4 = create_tmp_var (TREE_TYPE (val));
9226 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9228 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9229 if (input_phase)
9231 if (var3)
9233 /* If we've added a separate identity element
9234 variable, copy it over into val. */
9235 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
9236 var3);
9237 gimplify_and_add (x, &before);
9239 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9241 /* Otherwise, assign to it the identity element. */
9242 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9243 if (is_for)
9244 tseq = copy_gimple_seq_and_replace_locals (tseq);
9245 tree ref = build_outer_var_ref (var, octx);
9246 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9247 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9248 if (x)
9250 if (new_vard != new_var)
9251 val = build_fold_addr_expr_loc (clause_loc, val);
9252 SET_DECL_VALUE_EXPR (new_vard, val);
9254 SET_DECL_VALUE_EXPR (placeholder, ref);
9255 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9256 lower_omp (&tseq, octx);
9257 if (x)
9258 SET_DECL_VALUE_EXPR (new_vard, x);
9259 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9260 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9261 gimple_seq_add_seq (&before, tseq);
9262 if (is_simd)
9263 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9266 else if (is_simd)
9268 tree x;
9269 if (octx->scan_exclusive)
9271 tree v4 = unshare_expr (var4);
9272 tree v2 = unshare_expr (var2);
9273 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
9274 gimplify_and_add (x, &before);
9276 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9277 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9278 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9279 tree vexpr = val;
9280 if (x && new_vard != new_var)
9281 vexpr = build_fold_addr_expr_loc (clause_loc, val);
9282 if (x)
9283 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9284 SET_DECL_VALUE_EXPR (placeholder, var2);
9285 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9286 lower_omp (&tseq, octx);
9287 gimple_seq_add_seq (&before, tseq);
9288 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9289 if (x)
9290 SET_DECL_VALUE_EXPR (new_vard, x);
9291 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9292 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9293 if (octx->scan_inclusive)
9295 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9296 var2);
9297 gimplify_and_add (x, &before);
9299 else if (lane0 == NULL_TREE)
9301 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9302 var4);
9303 gimplify_and_add (x, &before);
9307 else
9309 if (input_phase)
9311 /* input phase. Set val to initializer before
9312 the body. */
9313 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
9314 gimplify_assign (val, x, &before);
9316 else if (is_simd)
9318 /* scan phase. */
9319 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9320 if (code == MINUS_EXPR)
9321 code = PLUS_EXPR;
9323 tree x = build2 (code, TREE_TYPE (var2),
9324 unshare_expr (var2), unshare_expr (val));
9325 if (octx->scan_inclusive)
9327 gimplify_assign (unshare_expr (var2), x, &before);
9328 gimplify_assign (val, var2, &before);
9330 else
9332 gimplify_assign (unshare_expr (var4),
9333 unshare_expr (var2), &before);
9334 gimplify_assign (var2, x, &before);
9335 if (lane0 == NULL_TREE)
9336 gimplify_assign (val, var4, &before);
9340 if (octx->scan_exclusive && !input_phase && lane0)
9342 tree vexpr = unshare_expr (var4);
9343 TREE_OPERAND (vexpr, 1) = lane0;
9344 if (new_vard != new_var)
9345 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
9346 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9350 if (is_simd && !is_for_simd)
9352 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
9353 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
9354 gsi_replace (gsi_p, gimple_build_nop (), true);
9355 return;
9357 lower_omp (gimple_omp_body_ptr (stmt), octx);
9358 if (before)
9360 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
9361 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
9366 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9367 substitution of a couple of function calls. But in the NAMED case,
9368 requires that languages coordinate a symbol name. It is therefore
9369 best put here in common code. */
9371 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
9373 static void
9374 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9376 tree block;
9377 tree name, lock, unlock;
9378 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
9379 gbind *bind;
9380 location_t loc = gimple_location (stmt);
9381 gimple_seq tbody;
9383 name = gimple_omp_critical_name (stmt);
9384 if (name)
9386 tree decl;
9388 if (!critical_name_mutexes)
9389 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
9391 tree *n = critical_name_mutexes->get (name);
9392 if (n == NULL)
9394 char *new_str;
9396 decl = create_tmp_var_raw (ptr_type_node);
9398 new_str = ACONCAT ((".gomp_critical_user_",
9399 IDENTIFIER_POINTER (name), NULL));
9400 DECL_NAME (decl) = get_identifier (new_str);
9401 TREE_PUBLIC (decl) = 1;
9402 TREE_STATIC (decl) = 1;
9403 DECL_COMMON (decl) = 1;
9404 DECL_ARTIFICIAL (decl) = 1;
9405 DECL_IGNORED_P (decl) = 1;
9407 varpool_node::finalize_decl (decl);
9409 critical_name_mutexes->put (name, decl);
9411 else
9412 decl = *n;
9414 /* If '#pragma omp critical' is inside offloaded region or
9415 inside function marked as offloadable, the symbol must be
9416 marked as offloadable too. */
9417 omp_context *octx;
9418 if (cgraph_node::get (current_function_decl)->offloadable)
9419 varpool_node::get_create (decl)->offloadable = 1;
9420 else
9421 for (octx = ctx->outer; octx; octx = octx->outer)
9422 if (is_gimple_omp_offloaded (octx->stmt))
9424 varpool_node::get_create (decl)->offloadable = 1;
9425 break;
9428 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
9429 lock = build_call_expr_loc (loc, lock, 1,
9430 build_fold_addr_expr_loc (loc, decl));
9432 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
9433 unlock = build_call_expr_loc (loc, unlock, 1,
9434 build_fold_addr_expr_loc (loc, decl));
9436 else
9438 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
9439 lock = build_call_expr_loc (loc, lock, 0);
9441 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
9442 unlock = build_call_expr_loc (loc, unlock, 0);
9445 push_gimplify_context ();
9447 block = make_node (BLOCK);
9448 bind = gimple_build_bind (NULL, NULL, block);
9449 gsi_replace (gsi_p, bind, true);
9450 gimple_bind_add_stmt (bind, stmt);
9452 tbody = gimple_bind_body (bind);
9453 gimplify_and_add (lock, &tbody);
9454 gimple_bind_set_body (bind, tbody);
9456 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9457 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9458 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9459 gimple_omp_set_body (stmt, NULL);
9461 tbody = gimple_bind_body (bind);
9462 gimplify_and_add (unlock, &tbody);
9463 gimple_bind_set_body (bind, tbody);
9465 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9467 pop_gimplify_context (bind);
9468 gimple_bind_append_vars (bind, ctx->block_vars);
9469 BLOCK_VARS (block) = gimple_bind_vars (bind);
9472 /* A subroutine of lower_omp_for. Generate code to emit the predicate
9473 for a lastprivate clause. Given a loop control predicate of (V
9474 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9475 is appended to *DLIST, iterator initialization is appended to
9476 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9477 to be emitted in a critical section. */
9479 static void
9480 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
9481 gimple_seq *dlist, gimple_seq *clist,
9482 struct omp_context *ctx)
9484 tree clauses, cond, vinit;
9485 enum tree_code cond_code;
9486 gimple_seq stmts;
9488 cond_code = fd->loop.cond_code;
9489 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
9491 /* When possible, use a strict equality expression. This can let VRP
9492 type optimizations deduce the value and remove a copy. */
9493 if (tree_fits_shwi_p (fd->loop.step))
9495 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
9496 if (step == 1 || step == -1)
9497 cond_code = EQ_EXPR;
9500 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
9501 || gimple_omp_for_grid_phony (fd->for_stmt))
9502 cond = omp_grid_lastprivate_predicate (fd);
9503 else
9505 tree n2 = fd->loop.n2;
9506 if (fd->collapse > 1
9507 && TREE_CODE (n2) != INTEGER_CST
9508 && gimple_omp_for_combined_into_p (fd->for_stmt))
9510 struct omp_context *taskreg_ctx = NULL;
9511 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
9513 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
9514 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
9515 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
9517 if (gimple_omp_for_combined_into_p (gfor))
9519 gcc_assert (ctx->outer->outer
9520 && is_parallel_ctx (ctx->outer->outer));
9521 taskreg_ctx = ctx->outer->outer;
9523 else
9525 struct omp_for_data outer_fd;
9526 omp_extract_for_data (gfor, &outer_fd, NULL);
9527 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
9530 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
9531 taskreg_ctx = ctx->outer->outer;
9533 else if (is_taskreg_ctx (ctx->outer))
9534 taskreg_ctx = ctx->outer;
9535 if (taskreg_ctx)
9537 int i;
9538 tree taskreg_clauses
9539 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
9540 tree innerc = omp_find_clause (taskreg_clauses,
9541 OMP_CLAUSE__LOOPTEMP_);
9542 gcc_assert (innerc);
9543 for (i = 0; i < fd->collapse; i++)
9545 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9546 OMP_CLAUSE__LOOPTEMP_);
9547 gcc_assert (innerc);
9549 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9550 OMP_CLAUSE__LOOPTEMP_);
9551 if (innerc)
9552 n2 = fold_convert (TREE_TYPE (n2),
9553 lookup_decl (OMP_CLAUSE_DECL (innerc),
9554 taskreg_ctx));
9557 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
9560 clauses = gimple_omp_for_clauses (fd->for_stmt);
9561 stmts = NULL;
9562 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
9563 if (!gimple_seq_empty_p (stmts))
9565 gimple_seq_add_seq (&stmts, *dlist);
9566 *dlist = stmts;
9568 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9569 vinit = fd->loop.n1;
9570 if (cond_code == EQ_EXPR
9571 && tree_fits_shwi_p (fd->loop.n2)
9572 && ! integer_zerop (fd->loop.n2))
9573 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
9574 else
9575 vinit = unshare_expr (vinit);
9577 /* Initialize the iterator variable, so that threads that don't execute
9578 any iterations don't execute the lastprivate clauses by accident. */
9579 gimplify_assign (fd->loop.v, vinit, body_p);
9583 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9585 static tree
9586 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9587 struct walk_stmt_info *wi)
9589 gimple *stmt = gsi_stmt (*gsi_p);
9591 *handled_ops_p = true;
9592 switch (gimple_code (stmt))
9594 WALK_SUBSTMTS;
9596 case GIMPLE_OMP_FOR:
9597 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
9598 && gimple_omp_for_combined_into_p (stmt))
9599 *handled_ops_p = false;
9600 break;
9602 case GIMPLE_OMP_SCAN:
9603 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
9604 return integer_zero_node;
9605 default:
9606 break;
9608 return NULL;
9611 /* Helper function for lower_omp_for, add transformations for a worksharing
9612 loop with scan directives inside of it.
9613 For worksharing loop not combined with simd, transform:
9614 #pragma omp for reduction(inscan,+:r) private(i)
9615 for (i = 0; i < n; i = i + 1)
9618 update (r);
9620 #pragma omp scan inclusive(r)
9622 use (r);
9626 into two worksharing loops + code to merge results:
9628 num_threads = omp_get_num_threads ();
9629 thread_num = omp_get_thread_num ();
9630 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9631 <D.2099>:
9632 var2 = r;
9633 goto <D.2101>;
9634 <D.2100>:
9635 // For UDRs this is UDR init, or if ctors are needed, copy from
9636 // var3 that has been constructed to contain the neutral element.
9637 var2 = 0;
9638 <D.2101>:
9639 ivar = 0;
9640 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9641 // a shared array with num_threads elements and rprivb to a local array
9642 // number of elements equal to the number of (contiguous) iterations the
9643 // current thread will perform. controlb and controlp variables are
9644 // temporaries to handle deallocation of rprivb at the end of second
9645 // GOMP_FOR.
9646 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9647 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9648 for (i = 0; i < n; i = i + 1)
9651 // For UDRs this is UDR init or copy from var3.
9652 r = 0;
9653 // This is the input phase from user code.
9654 update (r);
9657 // For UDRs this is UDR merge.
9658 var2 = var2 + r;
9659 // Rather than handing it over to the user, save to local thread's
9660 // array.
9661 rprivb[ivar] = var2;
9662 // For exclusive scan, the above two statements are swapped.
9663 ivar = ivar + 1;
9666 // And remember the final value from this thread's into the shared
9667 // rpriva array.
9668 rpriva[(sizetype) thread_num] = var2;
9669 // If more than one thread, compute using Work-Efficient prefix sum
9670 // the inclusive parallel scan of the rpriva array.
9671 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9672 <D.2102>:
9673 GOMP_barrier ();
9674 down = 0;
9675 k = 1;
9676 num_threadsu = (unsigned int) num_threads;
9677 thread_numup1 = (unsigned int) thread_num + 1;
9678 <D.2108>:
9679 twok = k << 1;
9680 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9681 <D.2110>:
9682 down = 4294967295;
9683 k = k >> 1;
9684 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9685 <D.2112>:
9686 k = k >> 1;
9687 <D.2111>:
9688 twok = k << 1;
9689 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9690 mul = REALPART_EXPR <cplx>;
9691 ovf = IMAGPART_EXPR <cplx>;
9692 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9693 <D.2116>:
9694 andv = k & down;
9695 andvm1 = andv + 4294967295;
9696 l = mul + andvm1;
9697 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9698 <D.2120>:
9699 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9700 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9701 rpriva[l] = rpriva[l - k] + rpriva[l];
9702 <D.2117>:
9703 if (down == 0) goto <D.2121>; else goto <D.2122>;
9704 <D.2121>:
9705 k = k << 1;
9706 goto <D.2123>;
9707 <D.2122>:
9708 k = k >> 1;
9709 <D.2123>:
9710 GOMP_barrier ();
9711 if (k != 0) goto <D.2108>; else goto <D.2103>;
9712 <D.2103>:
9713 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9714 <D.2124>:
9715 // For UDRs this is UDR init or copy from var3.
9716 var2 = 0;
9717 goto <D.2126>;
9718 <D.2125>:
9719 var2 = rpriva[thread_num - 1];
9720 <D.2126>:
9721 ivar = 0;
9722 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9723 reduction(inscan,+:r) private(i)
9724 for (i = 0; i < n; i = i + 1)
9727 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
9728 r = var2 + rprivb[ivar];
9731 // This is the scan phase from user code.
9732 use (r);
9733 // Plus a bump of the iterator.
9734 ivar = ivar + 1;
9736 } */
9738 static void
9739 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
9740 struct omp_for_data *fd, omp_context *ctx)
9742 bool is_for_simd = gimple_omp_for_combined_p (stmt);
9743 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
9745 gimple_seq body = gimple_omp_body (stmt);
9746 gimple_stmt_iterator input1_gsi = gsi_none ();
9747 struct walk_stmt_info wi;
9748 memset (&wi, 0, sizeof (wi));
9749 wi.val_only = true;
9750 wi.info = (void *) &input1_gsi;
9751 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
9752 gcc_assert (!gsi_end_p (input1_gsi));
9754 gimple *input_stmt1 = gsi_stmt (input1_gsi);
9755 gimple_stmt_iterator gsi = input1_gsi;
9756 gsi_next (&gsi);
9757 gimple_stmt_iterator scan1_gsi = gsi;
9758 gimple *scan_stmt1 = gsi_stmt (gsi);
9759 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
9761 gimple_seq input_body = gimple_omp_body (input_stmt1);
9762 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
9763 gimple_omp_set_body (input_stmt1, NULL);
9764 gimple_omp_set_body (scan_stmt1, NULL);
9765 gimple_omp_set_body (stmt, NULL);
9767 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
9768 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
9769 gimple_omp_set_body (stmt, body);
9770 gimple_omp_set_body (input_stmt1, input_body);
9772 gimple_stmt_iterator input2_gsi = gsi_none ();
9773 memset (&wi, 0, sizeof (wi));
9774 wi.val_only = true;
9775 wi.info = (void *) &input2_gsi;
9776 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
9777 gcc_assert (!gsi_end_p (input2_gsi));
9779 gimple *input_stmt2 = gsi_stmt (input2_gsi);
9780 gsi = input2_gsi;
9781 gsi_next (&gsi);
9782 gimple_stmt_iterator scan2_gsi = gsi;
9783 gimple *scan_stmt2 = gsi_stmt (gsi);
9784 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
9785 gimple_omp_set_body (scan_stmt2, scan_body);
9787 gimple_stmt_iterator input3_gsi = gsi_none ();
9788 gimple_stmt_iterator scan3_gsi = gsi_none ();
9789 gimple_stmt_iterator input4_gsi = gsi_none ();
9790 gimple_stmt_iterator scan4_gsi = gsi_none ();
9791 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
9792 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
9793 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
9794 if (is_for_simd)
9796 memset (&wi, 0, sizeof (wi));
9797 wi.val_only = true;
9798 wi.info = (void *) &input3_gsi;
9799 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
9800 gcc_assert (!gsi_end_p (input3_gsi));
9802 input_stmt3 = gsi_stmt (input3_gsi);
9803 gsi = input3_gsi;
9804 gsi_next (&gsi);
9805 scan3_gsi = gsi;
9806 scan_stmt3 = gsi_stmt (gsi);
9807 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
9809 memset (&wi, 0, sizeof (wi));
9810 wi.val_only = true;
9811 wi.info = (void *) &input4_gsi;
9812 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
9813 gcc_assert (!gsi_end_p (input4_gsi));
9815 input_stmt4 = gsi_stmt (input4_gsi);
9816 gsi = input4_gsi;
9817 gsi_next (&gsi);
9818 scan4_gsi = gsi;
9819 scan_stmt4 = gsi_stmt (gsi);
9820 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
9822 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
9823 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
9826 tree num_threads = create_tmp_var (integer_type_node);
9827 tree thread_num = create_tmp_var (integer_type_node);
9828 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9829 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9830 gimple *g = gimple_build_call (nthreads_decl, 0);
9831 gimple_call_set_lhs (g, num_threads);
9832 gimple_seq_add_stmt (body_p, g);
9833 g = gimple_build_call (threadnum_decl, 0);
9834 gimple_call_set_lhs (g, thread_num);
9835 gimple_seq_add_stmt (body_p, g);
9837 tree ivar = create_tmp_var (sizetype);
9838 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
9839 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
9840 tree k = create_tmp_var (unsigned_type_node);
9841 tree l = create_tmp_var (unsigned_type_node);
9843 gimple_seq clist = NULL, mdlist = NULL;
9844 gimple_seq thr01_list = NULL, thrn1_list = NULL;
9845 gimple_seq thr02_list = NULL, thrn2_list = NULL;
9846 gimple_seq scan1_list = NULL, input2_list = NULL;
9847 gimple_seq last_list = NULL, reduc_list = NULL;
9848 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
9849 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9850 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9852 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9853 tree var = OMP_CLAUSE_DECL (c);
9854 tree new_var = lookup_decl (var, ctx);
9855 tree var3 = NULL_TREE;
9856 tree new_vard = new_var;
9857 if (omp_is_reference (var))
9858 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9859 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9861 var3 = maybe_lookup_decl (new_vard, ctx);
9862 if (var3 == new_vard)
9863 var3 = NULL_TREE;
9866 tree ptype = build_pointer_type (TREE_TYPE (new_var));
9867 tree rpriva = create_tmp_var (ptype);
9868 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9869 OMP_CLAUSE_DECL (nc) = rpriva;
9870 *cp1 = nc;
9871 cp1 = &OMP_CLAUSE_CHAIN (nc);
9873 tree rprivb = create_tmp_var (ptype);
9874 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9875 OMP_CLAUSE_DECL (nc) = rprivb;
9876 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
9877 *cp1 = nc;
9878 cp1 = &OMP_CLAUSE_CHAIN (nc);
9880 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
9881 if (new_vard != new_var)
9882 TREE_ADDRESSABLE (var2) = 1;
9883 gimple_add_tmp_var (var2);
9885 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
9886 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9887 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9888 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9889 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
9891 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
9892 thread_num, integer_minus_one_node);
9893 x = fold_convert_loc (clause_loc, sizetype, x);
9894 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9895 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9896 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9897 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
9899 x = fold_convert_loc (clause_loc, sizetype, l);
9900 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9901 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9902 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9903 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
9905 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
9906 x = fold_convert_loc (clause_loc, sizetype, x);
9907 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9908 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9909 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9910 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
9912 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
9913 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9914 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
9915 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
9917 tree var4 = is_for_simd ? new_var : var2;
9918 tree var5 = NULL_TREE, var6 = NULL_TREE;
9919 if (is_for_simd)
9921 var5 = lookup_decl (var, input_simd_ctx);
9922 var6 = lookup_decl (var, scan_simd_ctx);
9923 if (new_vard != new_var)
9925 var5 = build_simple_mem_ref_loc (clause_loc, var5);
9926 var6 = build_simple_mem_ref_loc (clause_loc, var6);
9929 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9931 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9932 tree val = var2;
9934 x = lang_hooks.decls.omp_clause_default_ctor
9935 (c, var2, build_outer_var_ref (var, ctx));
9936 if (x)
9937 gimplify_and_add (x, &clist);
9939 x = build_outer_var_ref (var, ctx);
9940 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
9942 gimplify_and_add (x, &thr01_list);
9944 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
9945 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9946 if (var3)
9948 x = unshare_expr (var4);
9949 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
9950 gimplify_and_add (x, &thrn1_list);
9951 x = unshare_expr (var4);
9952 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
9953 gimplify_and_add (x, &thr02_list);
9955 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9957 /* Otherwise, assign to it the identity element. */
9958 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9959 tseq = copy_gimple_seq_and_replace_locals (tseq);
9960 if (!is_for_simd)
9962 if (new_vard != new_var)
9963 val = build_fold_addr_expr_loc (clause_loc, val);
9964 SET_DECL_VALUE_EXPR (new_vard, val);
9965 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9967 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
9968 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9969 lower_omp (&tseq, ctx);
9970 gimple_seq_add_seq (&thrn1_list, tseq);
9971 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9972 lower_omp (&tseq, ctx);
9973 gimple_seq_add_seq (&thr02_list, tseq);
9974 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9975 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9976 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9977 if (y)
9978 SET_DECL_VALUE_EXPR (new_vard, y);
9979 else
9981 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9982 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9986 x = unshare_expr (var4);
9987 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
9988 gimplify_and_add (x, &thrn2_list);
9990 if (is_for_simd)
9992 x = unshare_expr (rprivb_ref);
9993 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
9994 gimplify_and_add (x, &scan1_list);
9996 else
9998 if (ctx->scan_exclusive)
10000 x = unshare_expr (rprivb_ref);
10001 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10002 gimplify_and_add (x, &scan1_list);
10005 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10006 tseq = copy_gimple_seq_and_replace_locals (tseq);
10007 SET_DECL_VALUE_EXPR (placeholder, var2);
10008 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10009 lower_omp (&tseq, ctx);
10010 gimple_seq_add_seq (&scan1_list, tseq);
10012 if (ctx->scan_inclusive)
10014 x = unshare_expr (rprivb_ref);
10015 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10016 gimplify_and_add (x, &scan1_list);
10020 x = unshare_expr (rpriva_ref);
10021 x = lang_hooks.decls.omp_clause_assign_op (c, x,
10022 unshare_expr (var4));
10023 gimplify_and_add (x, &mdlist);
10025 x = unshare_expr (is_for_simd ? var6 : new_var);
10026 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
10027 gimplify_and_add (x, &input2_list);
10029 val = rprivb_ref;
10030 if (new_vard != new_var)
10031 val = build_fold_addr_expr_loc (clause_loc, val);
10033 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10034 tseq = copy_gimple_seq_and_replace_locals (tseq);
10035 SET_DECL_VALUE_EXPR (new_vard, val);
10036 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10037 if (is_for_simd)
10039 SET_DECL_VALUE_EXPR (placeholder, var6);
10040 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10042 else
10043 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10044 lower_omp (&tseq, ctx);
10045 if (y)
10046 SET_DECL_VALUE_EXPR (new_vard, y);
10047 else
10049 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10050 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10052 if (!is_for_simd)
10054 SET_DECL_VALUE_EXPR (placeholder, new_var);
10055 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10056 lower_omp (&tseq, ctx);
10058 gimple_seq_add_seq (&input2_list, tseq);
10060 x = build_outer_var_ref (var, ctx);
10061 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
10062 gimplify_and_add (x, &last_list);
10064 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
10065 gimplify_and_add (x, &reduc_list);
10066 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10067 tseq = copy_gimple_seq_and_replace_locals (tseq);
10068 val = rprival_ref;
10069 if (new_vard != new_var)
10070 val = build_fold_addr_expr_loc (clause_loc, val);
10071 SET_DECL_VALUE_EXPR (new_vard, val);
10072 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10073 SET_DECL_VALUE_EXPR (placeholder, var2);
10074 lower_omp (&tseq, ctx);
10075 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10076 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10077 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10078 if (y)
10079 SET_DECL_VALUE_EXPR (new_vard, y);
10080 else
10082 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10083 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10085 gimple_seq_add_seq (&reduc_list, tseq);
10086 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
10087 gimplify_and_add (x, &reduc_list);
10089 x = lang_hooks.decls.omp_clause_dtor (c, var2);
10090 if (x)
10091 gimplify_and_add (x, dlist);
10093 else
10095 x = build_outer_var_ref (var, ctx);
10096 gimplify_assign (unshare_expr (var4), x, &thr01_list);
10098 x = omp_reduction_init (c, TREE_TYPE (new_var));
10099 gimplify_assign (unshare_expr (var4), unshare_expr (x),
10100 &thrn1_list);
10101 gimplify_assign (unshare_expr (var4), x, &thr02_list);
10103 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
10105 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10106 if (code == MINUS_EXPR)
10107 code = PLUS_EXPR;
10109 if (is_for_simd)
10110 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
10111 else
10113 if (ctx->scan_exclusive)
10114 gimplify_assign (unshare_expr (rprivb_ref), var2,
10115 &scan1_list);
10116 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
10117 gimplify_assign (var2, x, &scan1_list);
10118 if (ctx->scan_inclusive)
10119 gimplify_assign (unshare_expr (rprivb_ref), var2,
10120 &scan1_list);
10123 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
10124 &mdlist);
10126 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
10127 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
10129 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
10130 &last_list);
10132 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
10133 unshare_expr (rprival_ref));
10134 gimplify_assign (rprival_ref, x, &reduc_list);
10138 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10139 gimple_seq_add_stmt (&scan1_list, g);
10140 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10141 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10142 ? scan_stmt4 : scan_stmt2), g);
10144 tree controlb = create_tmp_var (boolean_type_node);
10145 tree controlp = create_tmp_var (ptr_type_node);
10146 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10147 OMP_CLAUSE_DECL (nc) = controlb;
10148 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10149 *cp1 = nc;
10150 cp1 = &OMP_CLAUSE_CHAIN (nc);
10151 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10152 OMP_CLAUSE_DECL (nc) = controlp;
10153 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10154 *cp1 = nc;
10155 cp1 = &OMP_CLAUSE_CHAIN (nc);
10156 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10157 OMP_CLAUSE_DECL (nc) = controlb;
10158 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10159 *cp2 = nc;
10160 cp2 = &OMP_CLAUSE_CHAIN (nc);
10161 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10162 OMP_CLAUSE_DECL (nc) = controlp;
10163 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10164 *cp2 = nc;
10165 cp2 = &OMP_CLAUSE_CHAIN (nc);
10167 *cp1 = gimple_omp_for_clauses (stmt);
10168 gimple_omp_for_set_clauses (stmt, new_clauses1);
10169 *cp2 = gimple_omp_for_clauses (new_stmt);
10170 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
10172 if (is_for_simd)
10174 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
10175 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
10177 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
10178 GSI_SAME_STMT);
10179 gsi_remove (&input3_gsi, true);
10180 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
10181 GSI_SAME_STMT);
10182 gsi_remove (&scan3_gsi, true);
10183 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
10184 GSI_SAME_STMT);
10185 gsi_remove (&input4_gsi, true);
10186 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
10187 GSI_SAME_STMT);
10188 gsi_remove (&scan4_gsi, true);
10190 else
10192 gimple_omp_set_body (scan_stmt1, scan1_list);
10193 gimple_omp_set_body (input_stmt2, input2_list);
10196 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
10197 GSI_SAME_STMT);
10198 gsi_remove (&input1_gsi, true);
10199 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
10200 GSI_SAME_STMT);
10201 gsi_remove (&scan1_gsi, true);
10202 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
10203 GSI_SAME_STMT);
10204 gsi_remove (&input2_gsi, true);
10205 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
10206 GSI_SAME_STMT);
10207 gsi_remove (&scan2_gsi, true);
10209 gimple_seq_add_seq (body_p, clist);
10211 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
10212 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
10213 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
10214 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10215 gimple_seq_add_stmt (body_p, g);
10216 g = gimple_build_label (lab1);
10217 gimple_seq_add_stmt (body_p, g);
10218 gimple_seq_add_seq (body_p, thr01_list);
10219 g = gimple_build_goto (lab3);
10220 gimple_seq_add_stmt (body_p, g);
10221 g = gimple_build_label (lab2);
10222 gimple_seq_add_stmt (body_p, g);
10223 gimple_seq_add_seq (body_p, thrn1_list);
10224 g = gimple_build_label (lab3);
10225 gimple_seq_add_stmt (body_p, g);
10227 g = gimple_build_assign (ivar, size_zero_node);
10228 gimple_seq_add_stmt (body_p, g);
10230 gimple_seq_add_stmt (body_p, stmt);
10231 gimple_seq_add_seq (body_p, body);
10232 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
10233 fd->loop.v));
10235 g = gimple_build_omp_return (true);
10236 gimple_seq_add_stmt (body_p, g);
10237 gimple_seq_add_seq (body_p, mdlist);
10239 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10240 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10241 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
10242 gimple_seq_add_stmt (body_p, g);
10243 g = gimple_build_label (lab1);
10244 gimple_seq_add_stmt (body_p, g);
10246 g = omp_build_barrier (NULL);
10247 gimple_seq_add_stmt (body_p, g);
10249 tree down = create_tmp_var (unsigned_type_node);
10250 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
10251 gimple_seq_add_stmt (body_p, g);
10253 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
10254 gimple_seq_add_stmt (body_p, g);
10256 tree num_threadsu = create_tmp_var (unsigned_type_node);
10257 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
10258 gimple_seq_add_stmt (body_p, g);
10260 tree thread_numu = create_tmp_var (unsigned_type_node);
10261 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
10262 gimple_seq_add_stmt (body_p, g);
10264 tree thread_nump1 = create_tmp_var (unsigned_type_node);
10265 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
10266 build_int_cst (unsigned_type_node, 1));
10267 gimple_seq_add_stmt (body_p, g);
10269 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10270 g = gimple_build_label (lab3);
10271 gimple_seq_add_stmt (body_p, g);
10273 tree twok = create_tmp_var (unsigned_type_node);
10274 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10275 gimple_seq_add_stmt (body_p, g);
10277 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
10278 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
10279 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
10280 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
10281 gimple_seq_add_stmt (body_p, g);
10282 g = gimple_build_label (lab4);
10283 gimple_seq_add_stmt (body_p, g);
10284 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
10285 gimple_seq_add_stmt (body_p, g);
10286 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10287 gimple_seq_add_stmt (body_p, g);
10289 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
10290 gimple_seq_add_stmt (body_p, g);
10291 g = gimple_build_label (lab6);
10292 gimple_seq_add_stmt (body_p, g);
10294 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10295 gimple_seq_add_stmt (body_p, g);
10297 g = gimple_build_label (lab5);
10298 gimple_seq_add_stmt (body_p, g);
10300 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10301 gimple_seq_add_stmt (body_p, g);
10303 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
10304 DECL_GIMPLE_REG_P (cplx) = 1;
10305 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
10306 gimple_call_set_lhs (g, cplx);
10307 gimple_seq_add_stmt (body_p, g);
10308 tree mul = create_tmp_var (unsigned_type_node);
10309 g = gimple_build_assign (mul, REALPART_EXPR,
10310 build1 (REALPART_EXPR, unsigned_type_node, cplx));
10311 gimple_seq_add_stmt (body_p, g);
10312 tree ovf = create_tmp_var (unsigned_type_node);
10313 g = gimple_build_assign (ovf, IMAGPART_EXPR,
10314 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
10315 gimple_seq_add_stmt (body_p, g);
10317 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
10318 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
10319 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
10320 lab7, lab8);
10321 gimple_seq_add_stmt (body_p, g);
10322 g = gimple_build_label (lab7);
10323 gimple_seq_add_stmt (body_p, g);
10325 tree andv = create_tmp_var (unsigned_type_node);
10326 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
10327 gimple_seq_add_stmt (body_p, g);
10328 tree andvm1 = create_tmp_var (unsigned_type_node);
10329 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
10330 build_minus_one_cst (unsigned_type_node));
10331 gimple_seq_add_stmt (body_p, g);
10333 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
10334 gimple_seq_add_stmt (body_p, g);
10336 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
10337 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
10338 gimple_seq_add_stmt (body_p, g);
10339 g = gimple_build_label (lab9);
10340 gimple_seq_add_stmt (body_p, g);
10341 gimple_seq_add_seq (body_p, reduc_list);
10342 g = gimple_build_label (lab8);
10343 gimple_seq_add_stmt (body_p, g);
10345 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
10346 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
10347 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
10348 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
10349 lab10, lab11);
10350 gimple_seq_add_stmt (body_p, g);
10351 g = gimple_build_label (lab10);
10352 gimple_seq_add_stmt (body_p, g);
10353 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
10354 gimple_seq_add_stmt (body_p, g);
10355 g = gimple_build_goto (lab12);
10356 gimple_seq_add_stmt (body_p, g);
10357 g = gimple_build_label (lab11);
10358 gimple_seq_add_stmt (body_p, g);
10359 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10360 gimple_seq_add_stmt (body_p, g);
10361 g = gimple_build_label (lab12);
10362 gimple_seq_add_stmt (body_p, g);
10364 g = omp_build_barrier (NULL);
10365 gimple_seq_add_stmt (body_p, g);
10367 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
10368 lab3, lab2);
10369 gimple_seq_add_stmt (body_p, g);
10371 g = gimple_build_label (lab2);
10372 gimple_seq_add_stmt (body_p, g);
10374 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10375 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10376 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10377 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10378 gimple_seq_add_stmt (body_p, g);
10379 g = gimple_build_label (lab1);
10380 gimple_seq_add_stmt (body_p, g);
10381 gimple_seq_add_seq (body_p, thr02_list);
10382 g = gimple_build_goto (lab3);
10383 gimple_seq_add_stmt (body_p, g);
10384 g = gimple_build_label (lab2);
10385 gimple_seq_add_stmt (body_p, g);
10386 gimple_seq_add_seq (body_p, thrn2_list);
10387 g = gimple_build_label (lab3);
10388 gimple_seq_add_stmt (body_p, g);
10390 g = gimple_build_assign (ivar, size_zero_node);
10391 gimple_seq_add_stmt (body_p, g);
10392 gimple_seq_add_stmt (body_p, new_stmt);
10393 gimple_seq_add_seq (body_p, new_body);
10395 gimple_seq new_dlist = NULL;
10396 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10397 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10398 tree num_threadsm1 = create_tmp_var (integer_type_node);
10399 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
10400 integer_minus_one_node);
10401 gimple_seq_add_stmt (&new_dlist, g);
10402 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
10403 gimple_seq_add_stmt (&new_dlist, g);
10404 g = gimple_build_label (lab1);
10405 gimple_seq_add_stmt (&new_dlist, g);
10406 gimple_seq_add_seq (&new_dlist, last_list);
10407 g = gimple_build_label (lab2);
10408 gimple_seq_add_stmt (&new_dlist, g);
10409 gimple_seq_add_seq (&new_dlist, *dlist);
10410 *dlist = new_dlist;
10413 /* Lower code for an OMP loop directive. */
10415 static void
10416 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10418 tree *rhs_p, block;
10419 struct omp_for_data fd, *fdp = NULL;
10420 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
10421 gbind *new_stmt;
10422 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
10423 gimple_seq cnt_list = NULL, clist = NULL;
10424 gimple_seq oacc_head = NULL, oacc_tail = NULL;
10425 size_t i;
10427 push_gimplify_context ();
10429 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
10431 block = make_node (BLOCK);
10432 new_stmt = gimple_build_bind (NULL, NULL, block);
10433 /* Replace at gsi right away, so that 'stmt' is no member
10434 of a sequence anymore as we're going to add to a different
10435 one below. */
10436 gsi_replace (gsi_p, new_stmt, true);
10438 /* Move declaration of temporaries in the loop body before we make
10439 it go away. */
10440 omp_for_body = gimple_omp_body (stmt);
10441 if (!gimple_seq_empty_p (omp_for_body)
10442 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
10444 gbind *inner_bind
10445 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
10446 tree vars = gimple_bind_vars (inner_bind);
10447 gimple_bind_append_vars (new_stmt, vars);
10448 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10449 keep them on the inner_bind and it's block. */
10450 gimple_bind_set_vars (inner_bind, NULL_TREE);
10451 if (gimple_bind_block (inner_bind))
10452 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
10455 if (gimple_omp_for_combined_into_p (stmt))
10457 omp_extract_for_data (stmt, &fd, NULL);
10458 fdp = &fd;
10460 /* We need two temporaries with fd.loop.v type (istart/iend)
10461 and then (fd.collapse - 1) temporaries with the same
10462 type for count2 ... countN-1 vars if not constant. */
10463 size_t count = 2;
10464 tree type = fd.iter_type;
10465 if (fd.collapse > 1
10466 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
10467 count += fd.collapse - 1;
10468 bool taskreg_for
10469 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
10470 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
10471 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
10472 tree simtc = NULL;
10473 tree clauses = *pc;
10474 if (taskreg_for)
10475 outerc
10476 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
10477 OMP_CLAUSE__LOOPTEMP_);
10478 if (ctx->simt_stmt)
10479 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
10480 OMP_CLAUSE__LOOPTEMP_);
10481 for (i = 0; i < count; i++)
10483 tree temp;
10484 if (taskreg_for)
10486 gcc_assert (outerc);
10487 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
10488 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
10489 OMP_CLAUSE__LOOPTEMP_);
10491 else
10493 /* If there are 2 adjacent SIMD stmts, one with _simt_
10494 clause, another without, make sure they have the same
10495 decls in _looptemp_ clauses, because the outer stmt
10496 they are combined into will look up just one inner_stmt. */
10497 if (ctx->simt_stmt)
10498 temp = OMP_CLAUSE_DECL (simtc);
10499 else
10500 temp = create_tmp_var (type);
10501 insert_decl_map (&ctx->outer->cb, temp, temp);
10503 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
10504 OMP_CLAUSE_DECL (*pc) = temp;
10505 pc = &OMP_CLAUSE_CHAIN (*pc);
10506 if (ctx->simt_stmt)
10507 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
10508 OMP_CLAUSE__LOOPTEMP_);
10510 *pc = clauses;
10513 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
10514 dlist = NULL;
10515 body = NULL;
10516 tree rclauses
10517 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
10518 OMP_CLAUSE_REDUCTION);
10519 tree rtmp = NULL_TREE;
10520 if (rclauses)
10522 tree type = build_pointer_type (pointer_sized_int_node);
10523 tree temp = create_tmp_var (type);
10524 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
10525 OMP_CLAUSE_DECL (c) = temp;
10526 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
10527 gimple_omp_for_set_clauses (stmt, c);
10528 lower_omp_task_reductions (ctx, OMP_FOR,
10529 gimple_omp_for_clauses (stmt),
10530 &tred_ilist, &tred_dlist);
10531 rclauses = c;
10532 rtmp = make_ssa_name (type);
10533 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
10536 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
10537 ctx);
10539 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
10540 fdp);
10541 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
10542 gimple_omp_for_pre_body (stmt));
10544 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10546 /* Lower the header expressions. At this point, we can assume that
10547 the header is of the form:
10549 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
10551 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
10552 using the .omp_data_s mapping, if needed. */
10553 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
10555 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
10556 if (!is_gimple_min_invariant (*rhs_p))
10557 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10558 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10559 recompute_tree_invariant_for_addr_expr (*rhs_p);
10561 rhs_p = gimple_omp_for_final_ptr (stmt, i);
10562 if (!is_gimple_min_invariant (*rhs_p))
10563 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10564 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10565 recompute_tree_invariant_for_addr_expr (*rhs_p);
10567 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
10568 if (!is_gimple_min_invariant (*rhs_p))
10569 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10571 if (rclauses)
10572 gimple_seq_add_seq (&tred_ilist, cnt_list);
10573 else
10574 gimple_seq_add_seq (&body, cnt_list);
10576 /* Once lowered, extract the bounds and clauses. */
10577 omp_extract_for_data (stmt, &fd, NULL);
10579 if (is_gimple_omp_oacc (ctx->stmt)
10580 && !ctx_in_oacc_kernels_region (ctx))
10581 lower_oacc_head_tail (gimple_location (stmt),
10582 gimple_omp_for_clauses (stmt),
10583 &oacc_head, &oacc_tail, ctx);
10585 /* Add OpenACC partitioning and reduction markers just before the loop. */
10586 if (oacc_head)
10587 gimple_seq_add_seq (&body, oacc_head);
10589 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
10591 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10592 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10593 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10594 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10596 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
10597 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
10598 OMP_CLAUSE_LINEAR_STEP (c)
10599 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
10600 ctx);
10603 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
10604 && gimple_omp_for_grid_phony (stmt));
10605 if ((ctx->scan_inclusive || ctx->scan_exclusive)
10606 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10608 gcc_assert (!phony_loop);
10609 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
10611 else
10613 if (!phony_loop)
10614 gimple_seq_add_stmt (&body, stmt);
10615 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
10618 if (!phony_loop)
10619 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
10620 fd.loop.v));
10622 /* After the loop, add exit clauses. */
10623 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
10625 if (clist)
10627 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
10628 gcall *g = gimple_build_call (fndecl, 0);
10629 gimple_seq_add_stmt (&body, g);
10630 gimple_seq_add_seq (&body, clist);
10631 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
10632 g = gimple_build_call (fndecl, 0);
10633 gimple_seq_add_stmt (&body, g);
10636 if (ctx->cancellable)
10637 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
10639 gimple_seq_add_seq (&body, dlist);
10641 if (rclauses)
10643 gimple_seq_add_seq (&tred_ilist, body);
10644 body = tred_ilist;
10647 body = maybe_catch_exception (body);
10649 if (!phony_loop)
10651 /* Region exit marker goes at the end of the loop body. */
10652 gimple *g = gimple_build_omp_return (fd.have_nowait);
10653 gimple_seq_add_stmt (&body, g);
10655 gimple_seq_add_seq (&body, tred_dlist);
10657 maybe_add_implicit_barrier_cancel (ctx, g, &body);
10659 if (rclauses)
10660 OMP_CLAUSE_DECL (rclauses) = rtmp;
10663 /* Add OpenACC joining and reduction markers just after the loop. */
10664 if (oacc_tail)
10665 gimple_seq_add_seq (&body, oacc_tail);
10667 pop_gimplify_context (new_stmt);
10669 gimple_bind_append_vars (new_stmt, ctx->block_vars);
10670 maybe_remove_omp_member_access_dummy_vars (new_stmt);
10671 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
10672 if (BLOCK_VARS (block))
10673 TREE_USED (block) = 1;
10675 gimple_bind_set_body (new_stmt, body);
10676 gimple_omp_set_body (stmt, NULL);
10677 gimple_omp_for_set_pre_body (stmt, NULL);
10680 /* Callback for walk_stmts. Check if the current statement only contains
10681 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
10683 static tree
10684 check_combined_parallel (gimple_stmt_iterator *gsi_p,
10685 bool *handled_ops_p,
10686 struct walk_stmt_info *wi)
10688 int *info = (int *) wi->info;
10689 gimple *stmt = gsi_stmt (*gsi_p);
10691 *handled_ops_p = true;
10692 switch (gimple_code (stmt))
10694 WALK_SUBSTMTS;
10696 case GIMPLE_DEBUG:
10697 break;
10698 case GIMPLE_OMP_FOR:
10699 case GIMPLE_OMP_SECTIONS:
10700 *info = *info == 0 ? 1 : -1;
10701 break;
10702 default:
10703 *info = -1;
10704 break;
10706 return NULL;
10709 struct omp_taskcopy_context
10711 /* This field must be at the beginning, as we do "inheritance": Some
10712 callback functions for tree-inline.c (e.g., omp_copy_decl)
10713 receive a copy_body_data pointer that is up-casted to an
10714 omp_context pointer. */
10715 copy_body_data cb;
10716 omp_context *ctx;
10719 static tree
10720 task_copyfn_copy_decl (tree var, copy_body_data *cb)
10722 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
10724 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
10725 return create_tmp_var (TREE_TYPE (var));
10727 return var;
10730 static tree
10731 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
10733 tree name, new_fields = NULL, type, f;
10735 type = lang_hooks.types.make_type (RECORD_TYPE);
10736 name = DECL_NAME (TYPE_NAME (orig_type));
10737 name = build_decl (gimple_location (tcctx->ctx->stmt),
10738 TYPE_DECL, name, type);
10739 TYPE_NAME (type) = name;
10741 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
10743 tree new_f = copy_node (f);
10744 DECL_CONTEXT (new_f) = type;
10745 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
10746 TREE_CHAIN (new_f) = new_fields;
10747 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10748 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10749 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
10750 &tcctx->cb, NULL);
10751 new_fields = new_f;
10752 tcctx->cb.decl_map->put (f, new_f);
10754 TYPE_FIELDS (type) = nreverse (new_fields);
10755 layout_type (type);
10756 return type;
10759 /* Create task copyfn. */
10761 static void
10762 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
10764 struct function *child_cfun;
10765 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
10766 tree record_type, srecord_type, bind, list;
10767 bool record_needs_remap = false, srecord_needs_remap = false;
10768 splay_tree_node n;
10769 struct omp_taskcopy_context tcctx;
10770 location_t loc = gimple_location (task_stmt);
10771 size_t looptempno = 0;
10773 child_fn = gimple_omp_task_copy_fn (task_stmt);
10774 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
10775 gcc_assert (child_cfun->cfg == NULL);
10776 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
10778 /* Reset DECL_CONTEXT on function arguments. */
10779 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
10780 DECL_CONTEXT (t) = child_fn;
10782 /* Populate the function. */
10783 push_gimplify_context ();
10784 push_cfun (child_cfun);
10786 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
10787 TREE_SIDE_EFFECTS (bind) = 1;
10788 list = NULL;
10789 DECL_SAVED_TREE (child_fn) = bind;
10790 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
10792 /* Remap src and dst argument types if needed. */
10793 record_type = ctx->record_type;
10794 srecord_type = ctx->srecord_type;
10795 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
10796 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10798 record_needs_remap = true;
10799 break;
10801 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
10802 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10804 srecord_needs_remap = true;
10805 break;
10808 if (record_needs_remap || srecord_needs_remap)
10810 memset (&tcctx, '\0', sizeof (tcctx));
10811 tcctx.cb.src_fn = ctx->cb.src_fn;
10812 tcctx.cb.dst_fn = child_fn;
10813 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
10814 gcc_checking_assert (tcctx.cb.src_node);
10815 tcctx.cb.dst_node = tcctx.cb.src_node;
10816 tcctx.cb.src_cfun = ctx->cb.src_cfun;
10817 tcctx.cb.copy_decl = task_copyfn_copy_decl;
10818 tcctx.cb.eh_lp_nr = 0;
10819 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
10820 tcctx.cb.decl_map = new hash_map<tree, tree>;
10821 tcctx.ctx = ctx;
10823 if (record_needs_remap)
10824 record_type = task_copyfn_remap_type (&tcctx, record_type);
10825 if (srecord_needs_remap)
10826 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
10828 else
10829 tcctx.cb.decl_map = NULL;
10831 arg = DECL_ARGUMENTS (child_fn);
10832 TREE_TYPE (arg) = build_pointer_type (record_type);
10833 sarg = DECL_CHAIN (arg);
10834 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
10836 /* First pass: initialize temporaries used in record_type and srecord_type
10837 sizes and field offsets. */
10838 if (tcctx.cb.decl_map)
10839 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10840 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10842 tree *p;
10844 decl = OMP_CLAUSE_DECL (c);
10845 p = tcctx.cb.decl_map->get (decl);
10846 if (p == NULL)
10847 continue;
10848 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10849 sf = (tree) n->value;
10850 sf = *tcctx.cb.decl_map->get (sf);
10851 src = build_simple_mem_ref_loc (loc, sarg);
10852 src = omp_build_component_ref (src, sf);
10853 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
10854 append_to_statement_list (t, &list);
10857 /* Second pass: copy shared var pointers and copy construct non-VLA
10858 firstprivate vars. */
10859 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10860 switch (OMP_CLAUSE_CODE (c))
10862 splay_tree_key key;
10863 case OMP_CLAUSE_SHARED:
10864 decl = OMP_CLAUSE_DECL (c);
10865 key = (splay_tree_key) decl;
10866 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
10867 key = (splay_tree_key) &DECL_UID (decl);
10868 n = splay_tree_lookup (ctx->field_map, key);
10869 if (n == NULL)
10870 break;
10871 f = (tree) n->value;
10872 if (tcctx.cb.decl_map)
10873 f = *tcctx.cb.decl_map->get (f);
10874 n = splay_tree_lookup (ctx->sfield_map, key);
10875 sf = (tree) n->value;
10876 if (tcctx.cb.decl_map)
10877 sf = *tcctx.cb.decl_map->get (sf);
10878 src = build_simple_mem_ref_loc (loc, sarg);
10879 src = omp_build_component_ref (src, sf);
10880 dst = build_simple_mem_ref_loc (loc, arg);
10881 dst = omp_build_component_ref (dst, f);
10882 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10883 append_to_statement_list (t, &list);
10884 break;
10885 case OMP_CLAUSE_REDUCTION:
10886 case OMP_CLAUSE_IN_REDUCTION:
10887 decl = OMP_CLAUSE_DECL (c);
10888 if (TREE_CODE (decl) == MEM_REF)
10890 decl = TREE_OPERAND (decl, 0);
10891 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
10892 decl = TREE_OPERAND (decl, 0);
10893 if (TREE_CODE (decl) == INDIRECT_REF
10894 || TREE_CODE (decl) == ADDR_EXPR)
10895 decl = TREE_OPERAND (decl, 0);
10897 key = (splay_tree_key) decl;
10898 n = splay_tree_lookup (ctx->field_map, key);
10899 if (n == NULL)
10900 break;
10901 f = (tree) n->value;
10902 if (tcctx.cb.decl_map)
10903 f = *tcctx.cb.decl_map->get (f);
10904 n = splay_tree_lookup (ctx->sfield_map, key);
10905 sf = (tree) n->value;
10906 if (tcctx.cb.decl_map)
10907 sf = *tcctx.cb.decl_map->get (sf);
10908 src = build_simple_mem_ref_loc (loc, sarg);
10909 src = omp_build_component_ref (src, sf);
10910 if (decl != OMP_CLAUSE_DECL (c)
10911 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10912 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
10913 src = build_simple_mem_ref_loc (loc, src);
10914 dst = build_simple_mem_ref_loc (loc, arg);
10915 dst = omp_build_component_ref (dst, f);
10916 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10917 append_to_statement_list (t, &list);
10918 break;
10919 case OMP_CLAUSE__LOOPTEMP_:
10920 /* Fields for first two _looptemp_ clauses are initialized by
10921 GOMP_taskloop*, the rest are handled like firstprivate. */
10922 if (looptempno < 2)
10924 looptempno++;
10925 break;
10927 /* FALLTHRU */
10928 case OMP_CLAUSE__REDUCTEMP_:
10929 case OMP_CLAUSE_FIRSTPRIVATE:
10930 decl = OMP_CLAUSE_DECL (c);
10931 if (is_variable_sized (decl))
10932 break;
10933 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10934 if (n == NULL)
10935 break;
10936 f = (tree) n->value;
10937 if (tcctx.cb.decl_map)
10938 f = *tcctx.cb.decl_map->get (f);
10939 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10940 if (n != NULL)
10942 sf = (tree) n->value;
10943 if (tcctx.cb.decl_map)
10944 sf = *tcctx.cb.decl_map->get (sf);
10945 src = build_simple_mem_ref_loc (loc, sarg);
10946 src = omp_build_component_ref (src, sf);
10947 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
10948 src = build_simple_mem_ref_loc (loc, src);
10950 else
10951 src = decl;
10952 dst = build_simple_mem_ref_loc (loc, arg);
10953 dst = omp_build_component_ref (dst, f);
10954 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
10955 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10956 else
10957 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
10958 append_to_statement_list (t, &list);
10959 break;
10960 case OMP_CLAUSE_PRIVATE:
10961 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
10962 break;
10963 decl = OMP_CLAUSE_DECL (c);
10964 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10965 f = (tree) n->value;
10966 if (tcctx.cb.decl_map)
10967 f = *tcctx.cb.decl_map->get (f);
10968 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10969 if (n != NULL)
10971 sf = (tree) n->value;
10972 if (tcctx.cb.decl_map)
10973 sf = *tcctx.cb.decl_map->get (sf);
10974 src = build_simple_mem_ref_loc (loc, sarg);
10975 src = omp_build_component_ref (src, sf);
10976 if (use_pointer_for_field (decl, NULL))
10977 src = build_simple_mem_ref_loc (loc, src);
10979 else
10980 src = decl;
10981 dst = build_simple_mem_ref_loc (loc, arg);
10982 dst = omp_build_component_ref (dst, f);
10983 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10984 append_to_statement_list (t, &list);
10985 break;
10986 default:
10987 break;
10990 /* Last pass: handle VLA firstprivates. */
10991 if (tcctx.cb.decl_map)
10992 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10993 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10995 tree ind, ptr, df;
10997 decl = OMP_CLAUSE_DECL (c);
10998 if (!is_variable_sized (decl))
10999 continue;
11000 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11001 if (n == NULL)
11002 continue;
11003 f = (tree) n->value;
11004 f = *tcctx.cb.decl_map->get (f);
11005 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
11006 ind = DECL_VALUE_EXPR (decl);
11007 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
11008 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
11009 n = splay_tree_lookup (ctx->sfield_map,
11010 (splay_tree_key) TREE_OPERAND (ind, 0));
11011 sf = (tree) n->value;
11012 sf = *tcctx.cb.decl_map->get (sf);
11013 src = build_simple_mem_ref_loc (loc, sarg);
11014 src = omp_build_component_ref (src, sf);
11015 src = build_simple_mem_ref_loc (loc, src);
11016 dst = build_simple_mem_ref_loc (loc, arg);
11017 dst = omp_build_component_ref (dst, f);
11018 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
11019 append_to_statement_list (t, &list);
11020 n = splay_tree_lookup (ctx->field_map,
11021 (splay_tree_key) TREE_OPERAND (ind, 0));
11022 df = (tree) n->value;
11023 df = *tcctx.cb.decl_map->get (df);
11024 ptr = build_simple_mem_ref_loc (loc, arg);
11025 ptr = omp_build_component_ref (ptr, df);
11026 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
11027 build_fold_addr_expr_loc (loc, dst));
11028 append_to_statement_list (t, &list);
11031 t = build1 (RETURN_EXPR, void_type_node, NULL);
11032 append_to_statement_list (t, &list);
11034 if (tcctx.cb.decl_map)
11035 delete tcctx.cb.decl_map;
11036 pop_gimplify_context (NULL);
11037 BIND_EXPR_BODY (bind) = list;
11038 pop_cfun ();
11041 static void
11042 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
11044 tree c, clauses;
11045 gimple *g;
11046 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
11048 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
11049 gcc_assert (clauses);
11050 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11051 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
11052 switch (OMP_CLAUSE_DEPEND_KIND (c))
11054 case OMP_CLAUSE_DEPEND_LAST:
11055 /* Lowering already done at gimplification. */
11056 return;
11057 case OMP_CLAUSE_DEPEND_IN:
11058 cnt[2]++;
11059 break;
11060 case OMP_CLAUSE_DEPEND_OUT:
11061 case OMP_CLAUSE_DEPEND_INOUT:
11062 cnt[0]++;
11063 break;
11064 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11065 cnt[1]++;
11066 break;
11067 case OMP_CLAUSE_DEPEND_DEPOBJ:
11068 cnt[3]++;
11069 break;
11070 case OMP_CLAUSE_DEPEND_SOURCE:
11071 case OMP_CLAUSE_DEPEND_SINK:
11072 /* FALLTHRU */
11073 default:
11074 gcc_unreachable ();
11076 if (cnt[1] || cnt[3])
11077 idx = 5;
11078 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
11079 tree type = build_array_type_nelts (ptr_type_node, total + idx);
11080 tree array = create_tmp_var (type);
11081 TREE_ADDRESSABLE (array) = 1;
11082 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
11083 NULL_TREE);
11084 if (idx == 5)
11086 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
11087 gimple_seq_add_stmt (iseq, g);
11088 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
11089 NULL_TREE);
11091 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
11092 gimple_seq_add_stmt (iseq, g);
11093 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
11095 r = build4 (ARRAY_REF, ptr_type_node, array,
11096 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
11097 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
11098 gimple_seq_add_stmt (iseq, g);
11100 for (i = 0; i < 4; i++)
11102 if (cnt[i] == 0)
11103 continue;
11104 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11105 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
11106 continue;
11107 else
11109 switch (OMP_CLAUSE_DEPEND_KIND (c))
11111 case OMP_CLAUSE_DEPEND_IN:
11112 if (i != 2)
11113 continue;
11114 break;
11115 case OMP_CLAUSE_DEPEND_OUT:
11116 case OMP_CLAUSE_DEPEND_INOUT:
11117 if (i != 0)
11118 continue;
11119 break;
11120 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11121 if (i != 1)
11122 continue;
11123 break;
11124 case OMP_CLAUSE_DEPEND_DEPOBJ:
11125 if (i != 3)
11126 continue;
11127 break;
11128 default:
11129 gcc_unreachable ();
11131 tree t = OMP_CLAUSE_DECL (c);
11132 t = fold_convert (ptr_type_node, t);
11133 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
11134 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
11135 NULL_TREE, NULL_TREE);
11136 g = gimple_build_assign (r, t);
11137 gimple_seq_add_stmt (iseq, g);
11140 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
11141 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
11142 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
11143 OMP_CLAUSE_CHAIN (c) = *pclauses;
11144 *pclauses = c;
11145 tree clobber = build_clobber (type);
11146 g = gimple_build_assign (array, clobber);
11147 gimple_seq_add_stmt (oseq, g);
11150 /* Lower the OpenMP parallel or task directive in the current statement
11151 in GSI_P. CTX holds context information for the directive. */
11153 static void
11154 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11156 tree clauses;
11157 tree child_fn, t;
11158 gimple *stmt = gsi_stmt (*gsi_p);
11159 gbind *par_bind, *bind, *dep_bind = NULL;
11160 gimple_seq par_body;
11161 location_t loc = gimple_location (stmt);
11163 clauses = gimple_omp_taskreg_clauses (stmt);
11164 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11165 && gimple_omp_task_taskwait_p (stmt))
11167 par_bind = NULL;
11168 par_body = NULL;
11170 else
11172 par_bind
11173 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
11174 par_body = gimple_bind_body (par_bind);
11176 child_fn = ctx->cb.dst_fn;
11177 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11178 && !gimple_omp_parallel_combined_p (stmt))
11180 struct walk_stmt_info wi;
11181 int ws_num = 0;
11183 memset (&wi, 0, sizeof (wi));
11184 wi.info = &ws_num;
11185 wi.val_only = true;
11186 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
11187 if (ws_num == 1)
11188 gimple_omp_parallel_set_combined_p (stmt, true);
11190 gimple_seq dep_ilist = NULL;
11191 gimple_seq dep_olist = NULL;
11192 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11193 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11195 push_gimplify_context ();
11196 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11197 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
11198 &dep_ilist, &dep_olist);
11201 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11202 && gimple_omp_task_taskwait_p (stmt))
11204 if (dep_bind)
11206 gsi_replace (gsi_p, dep_bind, true);
11207 gimple_bind_add_seq (dep_bind, dep_ilist);
11208 gimple_bind_add_stmt (dep_bind, stmt);
11209 gimple_bind_add_seq (dep_bind, dep_olist);
11210 pop_gimplify_context (dep_bind);
11212 return;
11215 if (ctx->srecord_type)
11216 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
11218 gimple_seq tskred_ilist = NULL;
11219 gimple_seq tskred_olist = NULL;
11220 if ((is_task_ctx (ctx)
11221 && gimple_omp_task_taskloop_p (ctx->stmt)
11222 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
11223 OMP_CLAUSE_REDUCTION))
11224 || (is_parallel_ctx (ctx)
11225 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
11226 OMP_CLAUSE__REDUCTEMP_)))
11228 if (dep_bind == NULL)
11230 push_gimplify_context ();
11231 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11233 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
11234 : OMP_PARALLEL,
11235 gimple_omp_taskreg_clauses (ctx->stmt),
11236 &tskred_ilist, &tskred_olist);
11239 push_gimplify_context ();
11241 gimple_seq par_olist = NULL;
11242 gimple_seq par_ilist = NULL;
11243 gimple_seq par_rlist = NULL;
11244 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11245 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
11246 if (phony_construct && ctx->record_type)
11248 gcc_checking_assert (!ctx->receiver_decl);
11249 ctx->receiver_decl = create_tmp_var
11250 (build_reference_type (ctx->record_type), ".omp_rec");
11252 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
11253 lower_omp (&par_body, ctx);
11254 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
11255 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
11257 /* Declare all the variables created by mapping and the variables
11258 declared in the scope of the parallel body. */
11259 record_vars_into (ctx->block_vars, child_fn);
11260 maybe_remove_omp_member_access_dummy_vars (par_bind);
11261 record_vars_into (gimple_bind_vars (par_bind), child_fn);
11263 if (ctx->record_type)
11265 ctx->sender_decl
11266 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
11267 : ctx->record_type, ".omp_data_o");
11268 DECL_NAMELESS (ctx->sender_decl) = 1;
11269 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11270 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
11273 gimple_seq olist = NULL;
11274 gimple_seq ilist = NULL;
11275 lower_send_clauses (clauses, &ilist, &olist, ctx);
11276 lower_send_shared_vars (&ilist, &olist, ctx);
11278 if (ctx->record_type)
11280 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
11281 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11282 clobber));
11285 /* Once all the expansions are done, sequence all the different
11286 fragments inside gimple_omp_body. */
11288 gimple_seq new_body = NULL;
11290 if (ctx->record_type)
11292 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11293 /* fixup_child_record_type might have changed receiver_decl's type. */
11294 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11295 gimple_seq_add_stmt (&new_body,
11296 gimple_build_assign (ctx->receiver_decl, t));
11299 gimple_seq_add_seq (&new_body, par_ilist);
11300 gimple_seq_add_seq (&new_body, par_body);
11301 gimple_seq_add_seq (&new_body, par_rlist);
11302 if (ctx->cancellable)
11303 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
11304 gimple_seq_add_seq (&new_body, par_olist);
11305 new_body = maybe_catch_exception (new_body);
11306 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
11307 gimple_seq_add_stmt (&new_body,
11308 gimple_build_omp_continue (integer_zero_node,
11309 integer_zero_node));
11310 if (!phony_construct)
11312 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
11313 gimple_omp_set_body (stmt, new_body);
11316 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
11317 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11318 else
11319 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
11320 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
11321 gimple_bind_add_seq (bind, ilist);
11322 if (!phony_construct)
11323 gimple_bind_add_stmt (bind, stmt);
11324 else
11325 gimple_bind_add_seq (bind, new_body);
11326 gimple_bind_add_seq (bind, olist);
11328 pop_gimplify_context (NULL);
11330 if (dep_bind)
11332 gimple_bind_add_seq (dep_bind, dep_ilist);
11333 gimple_bind_add_seq (dep_bind, tskred_ilist);
11334 gimple_bind_add_stmt (dep_bind, bind);
11335 gimple_bind_add_seq (dep_bind, tskred_olist);
11336 gimple_bind_add_seq (dep_bind, dep_olist);
11337 pop_gimplify_context (dep_bind);
11341 /* Lower the GIMPLE_OMP_TARGET in the current statement
11342 in GSI_P. CTX holds context information for the directive. */
11344 static void
11345 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11347 tree clauses;
11348 tree child_fn, t, c;
11349 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
11350 gbind *tgt_bind, *bind, *dep_bind = NULL;
11351 gimple_seq tgt_body, olist, ilist, fplist, new_body;
11352 location_t loc = gimple_location (stmt);
11353 bool offloaded, data_region;
11354 unsigned int map_cnt = 0;
11356 offloaded = is_gimple_omp_offloaded (stmt);
11357 switch (gimple_omp_target_kind (stmt))
11359 case GF_OMP_TARGET_KIND_REGION:
11360 case GF_OMP_TARGET_KIND_UPDATE:
11361 case GF_OMP_TARGET_KIND_ENTER_DATA:
11362 case GF_OMP_TARGET_KIND_EXIT_DATA:
11363 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
11364 case GF_OMP_TARGET_KIND_OACC_KERNELS:
11365 case GF_OMP_TARGET_KIND_OACC_SERIAL:
11366 case GF_OMP_TARGET_KIND_OACC_UPDATE:
11367 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
11368 case GF_OMP_TARGET_KIND_OACC_DECLARE:
11369 data_region = false;
11370 break;
11371 case GF_OMP_TARGET_KIND_DATA:
11372 case GF_OMP_TARGET_KIND_OACC_DATA:
11373 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
11374 data_region = true;
11375 break;
11376 default:
11377 gcc_unreachable ();
11380 clauses = gimple_omp_target_clauses (stmt);
11382 gimple_seq dep_ilist = NULL;
11383 gimple_seq dep_olist = NULL;
11384 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11386 push_gimplify_context ();
11387 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11388 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
11389 &dep_ilist, &dep_olist);
11392 tgt_bind = NULL;
11393 tgt_body = NULL;
11394 if (offloaded)
11396 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
11397 tgt_body = gimple_bind_body (tgt_bind);
11399 else if (data_region)
11400 tgt_body = gimple_omp_body (stmt);
11401 child_fn = ctx->cb.dst_fn;
11403 push_gimplify_context ();
11404 fplist = NULL;
11406 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11407 switch (OMP_CLAUSE_CODE (c))
11409 tree var, x;
11411 default:
11412 break;
11413 case OMP_CLAUSE_MAP:
11414 #if CHECKING_P
11415 /* First check what we're prepared to handle in the following. */
11416 switch (OMP_CLAUSE_MAP_KIND (c))
11418 case GOMP_MAP_ALLOC:
11419 case GOMP_MAP_TO:
11420 case GOMP_MAP_FROM:
11421 case GOMP_MAP_TOFROM:
11422 case GOMP_MAP_POINTER:
11423 case GOMP_MAP_TO_PSET:
11424 case GOMP_MAP_DELETE:
11425 case GOMP_MAP_RELEASE:
11426 case GOMP_MAP_ALWAYS_TO:
11427 case GOMP_MAP_ALWAYS_FROM:
11428 case GOMP_MAP_ALWAYS_TOFROM:
11429 case GOMP_MAP_FIRSTPRIVATE_POINTER:
11430 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
11431 case GOMP_MAP_STRUCT:
11432 case GOMP_MAP_ALWAYS_POINTER:
11433 break;
11434 case GOMP_MAP_FORCE_ALLOC:
11435 case GOMP_MAP_FORCE_TO:
11436 case GOMP_MAP_FORCE_FROM:
11437 case GOMP_MAP_FORCE_TOFROM:
11438 case GOMP_MAP_FORCE_PRESENT:
11439 case GOMP_MAP_FORCE_DEVICEPTR:
11440 case GOMP_MAP_DEVICE_RESIDENT:
11441 case GOMP_MAP_LINK:
11442 gcc_assert (is_gimple_omp_oacc (stmt));
11443 break;
11444 default:
11445 gcc_unreachable ();
11447 #endif
11448 /* FALLTHRU */
11449 case OMP_CLAUSE_TO:
11450 case OMP_CLAUSE_FROM:
11451 oacc_firstprivate:
11452 var = OMP_CLAUSE_DECL (c);
11453 if (!DECL_P (var))
11455 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
11456 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11457 && (OMP_CLAUSE_MAP_KIND (c)
11458 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
11459 map_cnt++;
11460 continue;
11463 if (DECL_SIZE (var)
11464 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
11466 tree var2 = DECL_VALUE_EXPR (var);
11467 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
11468 var2 = TREE_OPERAND (var2, 0);
11469 gcc_assert (DECL_P (var2));
11470 var = var2;
11473 if (offloaded
11474 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11475 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11476 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11478 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11480 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
11481 && varpool_node::get_create (var)->offloadable)
11482 continue;
11484 tree type = build_pointer_type (TREE_TYPE (var));
11485 tree new_var = lookup_decl (var, ctx);
11486 x = create_tmp_var_raw (type, get_name (new_var));
11487 gimple_add_tmp_var (x);
11488 x = build_simple_mem_ref (x);
11489 SET_DECL_VALUE_EXPR (new_var, x);
11490 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11492 continue;
11495 if (!maybe_lookup_field (var, ctx))
11496 continue;
11498 /* Don't remap compute constructs' reduction variables, because the
11499 intermediate result must be local to each gang. */
11500 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11501 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
11503 x = build_receiver_ref (var, true, ctx);
11504 tree new_var = lookup_decl (var, ctx);
11506 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11507 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11508 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11509 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11510 x = build_simple_mem_ref (x);
11511 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11513 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11514 if (omp_is_reference (new_var)
11515 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
11516 || DECL_BY_REFERENCE (var)))
11518 /* Create a local object to hold the instance
11519 value. */
11520 tree type = TREE_TYPE (TREE_TYPE (new_var));
11521 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
11522 tree inst = create_tmp_var (type, id);
11523 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
11524 x = build_fold_addr_expr (inst);
11526 gimplify_assign (new_var, x, &fplist);
11528 else if (DECL_P (new_var))
11530 SET_DECL_VALUE_EXPR (new_var, x);
11531 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11533 else
11534 gcc_unreachable ();
11536 map_cnt++;
11537 break;
11539 case OMP_CLAUSE_FIRSTPRIVATE:
11540 if (is_oacc_parallel_or_serial (ctx))
11541 goto oacc_firstprivate;
11542 map_cnt++;
11543 var = OMP_CLAUSE_DECL (c);
11544 if (!omp_is_reference (var)
11545 && !is_gimple_reg_type (TREE_TYPE (var)))
11547 tree new_var = lookup_decl (var, ctx);
11548 if (is_variable_sized (var))
11550 tree pvar = DECL_VALUE_EXPR (var);
11551 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11552 pvar = TREE_OPERAND (pvar, 0);
11553 gcc_assert (DECL_P (pvar));
11554 tree new_pvar = lookup_decl (pvar, ctx);
11555 x = build_fold_indirect_ref (new_pvar);
11556 TREE_THIS_NOTRAP (x) = 1;
11558 else
11559 x = build_receiver_ref (var, true, ctx);
11560 SET_DECL_VALUE_EXPR (new_var, x);
11561 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11563 break;
11565 case OMP_CLAUSE_PRIVATE:
11566 if (is_gimple_omp_oacc (ctx->stmt))
11567 break;
11568 var = OMP_CLAUSE_DECL (c);
11569 if (is_variable_sized (var))
11571 tree new_var = lookup_decl (var, ctx);
11572 tree pvar = DECL_VALUE_EXPR (var);
11573 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11574 pvar = TREE_OPERAND (pvar, 0);
11575 gcc_assert (DECL_P (pvar));
11576 tree new_pvar = lookup_decl (pvar, ctx);
11577 x = build_fold_indirect_ref (new_pvar);
11578 TREE_THIS_NOTRAP (x) = 1;
11579 SET_DECL_VALUE_EXPR (new_var, x);
11580 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11582 break;
11584 case OMP_CLAUSE_USE_DEVICE_PTR:
11585 case OMP_CLAUSE_USE_DEVICE_ADDR:
11586 case OMP_CLAUSE_IS_DEVICE_PTR:
11587 var = OMP_CLAUSE_DECL (c);
11588 map_cnt++;
11589 if (is_variable_sized (var))
11591 tree new_var = lookup_decl (var, ctx);
11592 tree pvar = DECL_VALUE_EXPR (var);
11593 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11594 pvar = TREE_OPERAND (pvar, 0);
11595 gcc_assert (DECL_P (pvar));
11596 tree new_pvar = lookup_decl (pvar, ctx);
11597 x = build_fold_indirect_ref (new_pvar);
11598 TREE_THIS_NOTRAP (x) = 1;
11599 SET_DECL_VALUE_EXPR (new_var, x);
11600 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11602 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
11603 && !omp_is_reference (var)
11604 && !omp_is_allocatable_or_ptr (var)
11605 && !lang_hooks.decls.omp_array_data (var, true))
11606 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11608 tree new_var = lookup_decl (var, ctx);
11609 tree type = build_pointer_type (TREE_TYPE (var));
11610 x = create_tmp_var_raw (type, get_name (new_var));
11611 gimple_add_tmp_var (x);
11612 x = build_simple_mem_ref (x);
11613 SET_DECL_VALUE_EXPR (new_var, x);
11614 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11616 else
11618 tree new_var = lookup_decl (var, ctx);
11619 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
11620 gimple_add_tmp_var (x);
11621 SET_DECL_VALUE_EXPR (new_var, x);
11622 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11624 break;
11627 if (offloaded)
11629 target_nesting_level++;
11630 lower_omp (&tgt_body, ctx);
11631 target_nesting_level--;
11633 else if (data_region)
11634 lower_omp (&tgt_body, ctx);
11636 if (offloaded)
11638 /* Declare all the variables created by mapping and the variables
11639 declared in the scope of the target body. */
11640 record_vars_into (ctx->block_vars, child_fn);
11641 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
11642 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
11645 olist = NULL;
11646 ilist = NULL;
11647 if (ctx->record_type)
11649 ctx->sender_decl
11650 = create_tmp_var (ctx->record_type, ".omp_data_arr");
11651 DECL_NAMELESS (ctx->sender_decl) = 1;
11652 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11653 t = make_tree_vec (3);
11654 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
11655 TREE_VEC_ELT (t, 1)
11656 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
11657 ".omp_data_sizes");
11658 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
11659 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
11660 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
11661 tree tkind_type = short_unsigned_type_node;
11662 int talign_shift = 8;
11663 TREE_VEC_ELT (t, 2)
11664 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
11665 ".omp_data_kinds");
11666 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
11667 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
11668 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
11669 gimple_omp_target_set_data_arg (stmt, t);
11671 vec<constructor_elt, va_gc> *vsize;
11672 vec<constructor_elt, va_gc> *vkind;
11673 vec_alloc (vsize, map_cnt);
11674 vec_alloc (vkind, map_cnt);
11675 unsigned int map_idx = 0;
11677 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11678 switch (OMP_CLAUSE_CODE (c))
11680 tree ovar, nc, s, purpose, var, x, type;
11681 unsigned int talign;
11683 default:
11684 break;
11686 case OMP_CLAUSE_MAP:
11687 case OMP_CLAUSE_TO:
11688 case OMP_CLAUSE_FROM:
11689 oacc_firstprivate_map:
11690 nc = c;
11691 ovar = OMP_CLAUSE_DECL (c);
11692 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11693 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11694 || (OMP_CLAUSE_MAP_KIND (c)
11695 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
11696 break;
11697 if (!DECL_P (ovar))
11699 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11700 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
11702 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
11703 == get_base_address (ovar));
11704 nc = OMP_CLAUSE_CHAIN (c);
11705 ovar = OMP_CLAUSE_DECL (nc);
11707 else
11709 tree x = build_sender_ref (ovar, ctx);
11710 tree v
11711 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
11712 gimplify_assign (x, v, &ilist);
11713 nc = NULL_TREE;
11716 else
11718 if (DECL_SIZE (ovar)
11719 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
11721 tree ovar2 = DECL_VALUE_EXPR (ovar);
11722 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
11723 ovar2 = TREE_OPERAND (ovar2, 0);
11724 gcc_assert (DECL_P (ovar2));
11725 ovar = ovar2;
11727 if (!maybe_lookup_field (ovar, ctx))
11728 continue;
11731 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
11732 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
11733 talign = DECL_ALIGN_UNIT (ovar);
11734 if (nc)
11736 var = lookup_decl_in_outer_ctx (ovar, ctx);
11737 x = build_sender_ref (ovar, ctx);
11739 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11740 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11741 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11742 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
11744 gcc_assert (offloaded);
11745 tree avar
11746 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
11747 mark_addressable (avar);
11748 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
11749 talign = DECL_ALIGN_UNIT (avar);
11750 avar = build_fold_addr_expr (avar);
11751 gimplify_assign (x, avar, &ilist);
11753 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11755 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11756 if (!omp_is_reference (var))
11758 if (is_gimple_reg (var)
11759 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11760 TREE_NO_WARNING (var) = 1;
11761 var = build_fold_addr_expr (var);
11763 else
11764 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11765 gimplify_assign (x, var, &ilist);
11767 else if (is_gimple_reg (var))
11769 gcc_assert (offloaded);
11770 tree avar = create_tmp_var (TREE_TYPE (var));
11771 mark_addressable (avar);
11772 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
11773 if (GOMP_MAP_COPY_TO_P (map_kind)
11774 || map_kind == GOMP_MAP_POINTER
11775 || map_kind == GOMP_MAP_TO_PSET
11776 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11778 /* If we need to initialize a temporary
11779 with VAR because it is not addressable, and
11780 the variable hasn't been initialized yet, then
11781 we'll get a warning for the store to avar.
11782 Don't warn in that case, the mapping might
11783 be implicit. */
11784 TREE_NO_WARNING (var) = 1;
11785 gimplify_assign (avar, var, &ilist);
11787 avar = build_fold_addr_expr (avar);
11788 gimplify_assign (x, avar, &ilist);
11789 if ((GOMP_MAP_COPY_FROM_P (map_kind)
11790 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11791 && !TYPE_READONLY (TREE_TYPE (var)))
11793 x = unshare_expr (x);
11794 x = build_simple_mem_ref (x);
11795 gimplify_assign (var, x, &olist);
11798 else
11800 /* While MAP is handled explicitly by the FE,
11801 for 'target update', only the identified is passed. */
11802 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
11803 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
11804 && (omp_is_allocatable_or_ptr (var)
11805 && omp_check_optional_argument (var, false)))
11806 var = build_fold_indirect_ref (var);
11807 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
11808 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
11809 || (!omp_is_allocatable_or_ptr (var)
11810 && !omp_check_optional_argument (var, false)))
11811 var = build_fold_addr_expr (var);
11812 gimplify_assign (x, var, &ilist);
11815 s = NULL_TREE;
11816 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11818 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11819 s = TREE_TYPE (ovar);
11820 if (TREE_CODE (s) == REFERENCE_TYPE)
11821 s = TREE_TYPE (s);
11822 s = TYPE_SIZE_UNIT (s);
11824 else
11825 s = OMP_CLAUSE_SIZE (c);
11826 if (s == NULL_TREE)
11827 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11828 s = fold_convert (size_type_node, s);
11829 purpose = size_int (map_idx++);
11830 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11831 if (TREE_CODE (s) != INTEGER_CST)
11832 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11834 unsigned HOST_WIDE_INT tkind, tkind_zero;
11835 switch (OMP_CLAUSE_CODE (c))
11837 case OMP_CLAUSE_MAP:
11838 tkind = OMP_CLAUSE_MAP_KIND (c);
11839 tkind_zero = tkind;
11840 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
11841 switch (tkind)
11843 case GOMP_MAP_ALLOC:
11844 case GOMP_MAP_TO:
11845 case GOMP_MAP_FROM:
11846 case GOMP_MAP_TOFROM:
11847 case GOMP_MAP_ALWAYS_TO:
11848 case GOMP_MAP_ALWAYS_FROM:
11849 case GOMP_MAP_ALWAYS_TOFROM:
11850 case GOMP_MAP_RELEASE:
11851 case GOMP_MAP_FORCE_TO:
11852 case GOMP_MAP_FORCE_FROM:
11853 case GOMP_MAP_FORCE_TOFROM:
11854 case GOMP_MAP_FORCE_PRESENT:
11855 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
11856 break;
11857 case GOMP_MAP_DELETE:
11858 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
11859 default:
11860 break;
11862 if (tkind_zero != tkind)
11864 if (integer_zerop (s))
11865 tkind = tkind_zero;
11866 else if (integer_nonzerop (s))
11867 tkind_zero = tkind;
11869 break;
11870 case OMP_CLAUSE_FIRSTPRIVATE:
11871 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11872 tkind = GOMP_MAP_TO;
11873 tkind_zero = tkind;
11874 break;
11875 case OMP_CLAUSE_TO:
11876 tkind = GOMP_MAP_TO;
11877 tkind_zero = tkind;
11878 break;
11879 case OMP_CLAUSE_FROM:
11880 tkind = GOMP_MAP_FROM;
11881 tkind_zero = tkind;
11882 break;
11883 default:
11884 gcc_unreachable ();
11886 gcc_checking_assert (tkind
11887 < (HOST_WIDE_INT_C (1U) << talign_shift));
11888 gcc_checking_assert (tkind_zero
11889 < (HOST_WIDE_INT_C (1U) << talign_shift));
11890 talign = ceil_log2 (talign);
11891 tkind |= talign << talign_shift;
11892 tkind_zero |= talign << talign_shift;
11893 gcc_checking_assert (tkind
11894 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11895 gcc_checking_assert (tkind_zero
11896 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11897 if (tkind == tkind_zero)
11898 x = build_int_cstu (tkind_type, tkind);
11899 else
11901 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
11902 x = build3 (COND_EXPR, tkind_type,
11903 fold_build2 (EQ_EXPR, boolean_type_node,
11904 unshare_expr (s), size_zero_node),
11905 build_int_cstu (tkind_type, tkind_zero),
11906 build_int_cstu (tkind_type, tkind));
11908 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
11909 if (nc && nc != c)
11910 c = nc;
11911 break;
11913 case OMP_CLAUSE_FIRSTPRIVATE:
11914 if (is_oacc_parallel_or_serial (ctx))
11915 goto oacc_firstprivate_map;
11916 ovar = OMP_CLAUSE_DECL (c);
11917 if (omp_is_reference (ovar))
11918 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11919 else
11920 talign = DECL_ALIGN_UNIT (ovar);
11921 var = lookup_decl_in_outer_ctx (ovar, ctx);
11922 x = build_sender_ref (ovar, ctx);
11923 tkind = GOMP_MAP_FIRSTPRIVATE;
11924 type = TREE_TYPE (ovar);
11925 if (omp_is_reference (ovar))
11926 type = TREE_TYPE (type);
11927 if ((INTEGRAL_TYPE_P (type)
11928 && TYPE_PRECISION (type) <= POINTER_SIZE)
11929 || TREE_CODE (type) == POINTER_TYPE)
11931 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11932 tree t = var;
11933 if (omp_is_reference (var))
11934 t = build_simple_mem_ref (var);
11935 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11936 TREE_NO_WARNING (var) = 1;
11937 if (TREE_CODE (type) != POINTER_TYPE)
11938 t = fold_convert (pointer_sized_int_node, t);
11939 t = fold_convert (TREE_TYPE (x), t);
11940 gimplify_assign (x, t, &ilist);
11942 else if (omp_is_reference (var))
11943 gimplify_assign (x, var, &ilist);
11944 else if (is_gimple_reg (var))
11946 tree avar = create_tmp_var (TREE_TYPE (var));
11947 mark_addressable (avar);
11948 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11949 TREE_NO_WARNING (var) = 1;
11950 gimplify_assign (avar, var, &ilist);
11951 avar = build_fold_addr_expr (avar);
11952 gimplify_assign (x, avar, &ilist);
11954 else
11956 var = build_fold_addr_expr (var);
11957 gimplify_assign (x, var, &ilist);
11959 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
11960 s = size_int (0);
11961 else if (omp_is_reference (ovar))
11962 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11963 else
11964 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11965 s = fold_convert (size_type_node, s);
11966 purpose = size_int (map_idx++);
11967 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11968 if (TREE_CODE (s) != INTEGER_CST)
11969 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11971 gcc_checking_assert (tkind
11972 < (HOST_WIDE_INT_C (1U) << talign_shift));
11973 talign = ceil_log2 (talign);
11974 tkind |= talign << talign_shift;
11975 gcc_checking_assert (tkind
11976 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11977 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
11978 build_int_cstu (tkind_type, tkind));
11979 break;
11981 case OMP_CLAUSE_USE_DEVICE_PTR:
11982 case OMP_CLAUSE_USE_DEVICE_ADDR:
11983 case OMP_CLAUSE_IS_DEVICE_PTR:
11984 bool do_optional_check;
11985 do_optional_check = false;
11986 ovar = OMP_CLAUSE_DECL (c);
11987 var = lookup_decl_in_outer_ctx (ovar, ctx);
11989 if (lang_hooks.decls.omp_array_data (ovar, true))
11991 tkind = (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
11992 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
11993 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
11995 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
11997 tkind = GOMP_MAP_USE_DEVICE_PTR;
11998 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
12000 else
12002 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
12003 x = build_sender_ref (ovar, ctx);
12005 type = TREE_TYPE (ovar);
12006 if (lang_hooks.decls.omp_array_data (ovar, true))
12008 var = lang_hooks.decls.omp_array_data (ovar, false);
12009 do_optional_check = true;
12011 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12012 && !omp_is_reference (ovar)
12013 && !omp_is_allocatable_or_ptr (ovar))
12014 || TREE_CODE (type) == ARRAY_TYPE)
12015 var = build_fold_addr_expr (var);
12016 else
12018 if (omp_is_reference (ovar)
12019 || omp_check_optional_argument (ovar, false)
12020 || omp_is_allocatable_or_ptr (ovar))
12022 type = TREE_TYPE (type);
12023 if (TREE_CODE (type) != ARRAY_TYPE
12024 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12025 && !omp_is_allocatable_or_ptr (ovar))
12026 || (omp_is_reference (ovar)
12027 && omp_is_allocatable_or_ptr (ovar))))
12029 var = build_simple_mem_ref (var);
12030 do_optional_check = true;
12032 var = fold_convert (TREE_TYPE (x), var);
12035 tree present;
12036 present = (do_optional_check
12037 ? omp_check_optional_argument (ovar, true) : NULL_TREE);
12038 if (present)
12040 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12041 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12042 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12043 tree new_x = unshare_expr (x);
12044 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
12045 fb_rvalue);
12046 gcond *cond = gimple_build_cond_from_tree (present,
12047 notnull_label,
12048 null_label);
12049 gimple_seq_add_stmt (&ilist, cond);
12050 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
12051 gimplify_assign (new_x, null_pointer_node, &ilist);
12052 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
12053 gimple_seq_add_stmt (&ilist,
12054 gimple_build_label (notnull_label));
12055 gimplify_assign (x, var, &ilist);
12056 gimple_seq_add_stmt (&ilist,
12057 gimple_build_label (opt_arg_label));
12059 else
12060 gimplify_assign (x, var, &ilist);
12061 s = size_int (0);
12062 purpose = size_int (map_idx++);
12063 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12064 gcc_checking_assert (tkind
12065 < (HOST_WIDE_INT_C (1U) << talign_shift));
12066 gcc_checking_assert (tkind
12067 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12068 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
12069 build_int_cstu (tkind_type, tkind));
12070 break;
12073 gcc_assert (map_idx == map_cnt);
12075 DECL_INITIAL (TREE_VEC_ELT (t, 1))
12076 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
12077 DECL_INITIAL (TREE_VEC_ELT (t, 2))
12078 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
12079 for (int i = 1; i <= 2; i++)
12080 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
12082 gimple_seq initlist = NULL;
12083 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
12084 TREE_VEC_ELT (t, i)),
12085 &initlist, true, NULL_TREE);
12086 gimple_seq_add_seq (&ilist, initlist);
12088 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
12089 gimple_seq_add_stmt (&olist,
12090 gimple_build_assign (TREE_VEC_ELT (t, i),
12091 clobber));
12094 tree clobber = build_clobber (ctx->record_type);
12095 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12096 clobber));
12099 /* Once all the expansions are done, sequence all the different
12100 fragments inside gimple_omp_body. */
12102 new_body = NULL;
12104 if (offloaded
12105 && ctx->record_type)
12107 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12108 /* fixup_child_record_type might have changed receiver_decl's type. */
12109 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12110 gimple_seq_add_stmt (&new_body,
12111 gimple_build_assign (ctx->receiver_decl, t));
12113 gimple_seq_add_seq (&new_body, fplist);
12115 if (offloaded || data_region)
12117 tree prev = NULL_TREE;
12118 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12119 switch (OMP_CLAUSE_CODE (c))
12121 tree var, x;
12122 default:
12123 break;
12124 case OMP_CLAUSE_FIRSTPRIVATE:
12125 if (is_gimple_omp_oacc (ctx->stmt))
12126 break;
12127 var = OMP_CLAUSE_DECL (c);
12128 if (omp_is_reference (var)
12129 || is_gimple_reg_type (TREE_TYPE (var)))
12131 tree new_var = lookup_decl (var, ctx);
12132 tree type;
12133 type = TREE_TYPE (var);
12134 if (omp_is_reference (var))
12135 type = TREE_TYPE (type);
12136 if ((INTEGRAL_TYPE_P (type)
12137 && TYPE_PRECISION (type) <= POINTER_SIZE)
12138 || TREE_CODE (type) == POINTER_TYPE)
12140 x = build_receiver_ref (var, false, ctx);
12141 if (TREE_CODE (type) != POINTER_TYPE)
12142 x = fold_convert (pointer_sized_int_node, x);
12143 x = fold_convert (type, x);
12144 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12145 fb_rvalue);
12146 if (omp_is_reference (var))
12148 tree v = create_tmp_var_raw (type, get_name (var));
12149 gimple_add_tmp_var (v);
12150 TREE_ADDRESSABLE (v) = 1;
12151 gimple_seq_add_stmt (&new_body,
12152 gimple_build_assign (v, x));
12153 x = build_fold_addr_expr (v);
12155 gimple_seq_add_stmt (&new_body,
12156 gimple_build_assign (new_var, x));
12158 else
12160 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
12161 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12162 fb_rvalue);
12163 gimple_seq_add_stmt (&new_body,
12164 gimple_build_assign (new_var, x));
12167 else if (is_variable_sized (var))
12169 tree pvar = DECL_VALUE_EXPR (var);
12170 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12171 pvar = TREE_OPERAND (pvar, 0);
12172 gcc_assert (DECL_P (pvar));
12173 tree new_var = lookup_decl (pvar, ctx);
12174 x = build_receiver_ref (var, false, ctx);
12175 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12176 gimple_seq_add_stmt (&new_body,
12177 gimple_build_assign (new_var, x));
12179 break;
12180 case OMP_CLAUSE_PRIVATE:
12181 if (is_gimple_omp_oacc (ctx->stmt))
12182 break;
12183 var = OMP_CLAUSE_DECL (c);
12184 if (omp_is_reference (var))
12186 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12187 tree new_var = lookup_decl (var, ctx);
12188 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12189 if (TREE_CONSTANT (x))
12191 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
12192 get_name (var));
12193 gimple_add_tmp_var (x);
12194 TREE_ADDRESSABLE (x) = 1;
12195 x = build_fold_addr_expr_loc (clause_loc, x);
12197 else
12198 break;
12200 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12201 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12202 gimple_seq_add_stmt (&new_body,
12203 gimple_build_assign (new_var, x));
12205 break;
12206 case OMP_CLAUSE_USE_DEVICE_PTR:
12207 case OMP_CLAUSE_USE_DEVICE_ADDR:
12208 case OMP_CLAUSE_IS_DEVICE_PTR:
12209 tree new_var;
12210 gimple_seq assign_body;
12211 bool is_array_data;
12212 bool do_optional_check;
12213 assign_body = NULL;
12214 do_optional_check = false;
12215 var = OMP_CLAUSE_DECL (c);
12216 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
12218 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
12219 x = build_sender_ref (is_array_data
12220 ? (splay_tree_key) &DECL_NAME (var)
12221 : (splay_tree_key) &DECL_UID (var), ctx);
12222 else
12223 x = build_receiver_ref (var, false, ctx);
12225 if (is_array_data)
12227 bool is_ref = omp_is_reference (var);
12228 do_optional_check = true;
12229 /* First, we copy the descriptor data from the host; then
12230 we update its data to point to the target address. */
12231 new_var = lookup_decl (var, ctx);
12232 new_var = DECL_VALUE_EXPR (new_var);
12233 tree v = new_var;
12235 if (is_ref)
12237 var = build_fold_indirect_ref (var);
12238 gimplify_expr (&var, &assign_body, NULL, is_gimple_val,
12239 fb_rvalue);
12240 v = create_tmp_var_raw (TREE_TYPE (var), get_name (var));
12241 gimple_add_tmp_var (v);
12242 TREE_ADDRESSABLE (v) = 1;
12243 gimple_seq_add_stmt (&assign_body,
12244 gimple_build_assign (v, var));
12245 tree rhs = build_fold_addr_expr (v);
12246 gimple_seq_add_stmt (&assign_body,
12247 gimple_build_assign (new_var, rhs));
12249 else
12250 gimple_seq_add_stmt (&assign_body,
12251 gimple_build_assign (new_var, var));
12253 tree v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
12254 gcc_assert (v2);
12255 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12256 gimple_seq_add_stmt (&assign_body,
12257 gimple_build_assign (v2, x));
12259 else if (is_variable_sized (var))
12261 tree pvar = DECL_VALUE_EXPR (var);
12262 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12263 pvar = TREE_OPERAND (pvar, 0);
12264 gcc_assert (DECL_P (pvar));
12265 new_var = lookup_decl (pvar, ctx);
12266 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12267 gimple_seq_add_stmt (&assign_body,
12268 gimple_build_assign (new_var, x));
12270 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12271 && !omp_is_reference (var)
12272 && !omp_is_allocatable_or_ptr (var))
12273 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12275 new_var = lookup_decl (var, ctx);
12276 new_var = DECL_VALUE_EXPR (new_var);
12277 gcc_assert (TREE_CODE (new_var) == MEM_REF);
12278 new_var = TREE_OPERAND (new_var, 0);
12279 gcc_assert (DECL_P (new_var));
12280 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12281 gimple_seq_add_stmt (&assign_body,
12282 gimple_build_assign (new_var, x));
12284 else
12286 tree type = TREE_TYPE (var);
12287 new_var = lookup_decl (var, ctx);
12288 if (omp_is_reference (var))
12290 type = TREE_TYPE (type);
12291 if (TREE_CODE (type) != ARRAY_TYPE
12292 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12293 || (omp_is_reference (var)
12294 && omp_is_allocatable_or_ptr (var))))
12296 tree v = create_tmp_var_raw (type, get_name (var));
12297 gimple_add_tmp_var (v);
12298 TREE_ADDRESSABLE (v) = 1;
12299 x = fold_convert (type, x);
12300 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
12301 fb_rvalue);
12302 gimple_seq_add_stmt (&assign_body,
12303 gimple_build_assign (v, x));
12304 x = build_fold_addr_expr (v);
12305 do_optional_check = true;
12308 new_var = DECL_VALUE_EXPR (new_var);
12309 x = fold_convert (TREE_TYPE (new_var), x);
12310 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12311 gimple_seq_add_stmt (&assign_body,
12312 gimple_build_assign (new_var, x));
12314 tree present;
12315 present = (do_optional_check
12316 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
12317 : NULL_TREE);
12318 if (present)
12320 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12321 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12322 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12323 glabel *null_glabel = gimple_build_label (null_label);
12324 glabel *notnull_glabel = gimple_build_label (notnull_label);
12325 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
12326 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12327 fb_rvalue);
12328 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
12329 fb_rvalue);
12330 gcond *cond = gimple_build_cond_from_tree (present,
12331 notnull_label,
12332 null_label);
12333 gimple_seq_add_stmt (&new_body, cond);
12334 gimple_seq_add_stmt (&new_body, null_glabel);
12335 gimplify_assign (new_var, null_pointer_node, &new_body);
12336 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
12337 gimple_seq_add_stmt (&new_body, notnull_glabel);
12338 gimple_seq_add_seq (&new_body, assign_body);
12339 gimple_seq_add_stmt (&new_body,
12340 gimple_build_label (opt_arg_label));
12342 else
12343 gimple_seq_add_seq (&new_body, assign_body);
12344 break;
12346 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
12347 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
12348 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
12349 or references to VLAs. */
12350 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12351 switch (OMP_CLAUSE_CODE (c))
12353 tree var;
12354 default:
12355 break;
12356 case OMP_CLAUSE_MAP:
12357 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12358 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12360 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12361 poly_int64 offset = 0;
12362 gcc_assert (prev);
12363 var = OMP_CLAUSE_DECL (c);
12364 if (DECL_P (var)
12365 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
12366 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
12367 ctx))
12368 && varpool_node::get_create (var)->offloadable)
12369 break;
12370 if (TREE_CODE (var) == INDIRECT_REF
12371 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
12372 var = TREE_OPERAND (var, 0);
12373 if (TREE_CODE (var) == COMPONENT_REF)
12375 var = get_addr_base_and_unit_offset (var, &offset);
12376 gcc_assert (var != NULL_TREE && DECL_P (var));
12378 else if (DECL_SIZE (var)
12379 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12381 tree var2 = DECL_VALUE_EXPR (var);
12382 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12383 var2 = TREE_OPERAND (var2, 0);
12384 gcc_assert (DECL_P (var2));
12385 var = var2;
12387 tree new_var = lookup_decl (var, ctx), x;
12388 tree type = TREE_TYPE (new_var);
12389 bool is_ref;
12390 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
12391 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12392 == COMPONENT_REF))
12394 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
12395 is_ref = true;
12396 new_var = build2 (MEM_REF, type,
12397 build_fold_addr_expr (new_var),
12398 build_int_cst (build_pointer_type (type),
12399 offset));
12401 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
12403 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
12404 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
12405 new_var = build2 (MEM_REF, type,
12406 build_fold_addr_expr (new_var),
12407 build_int_cst (build_pointer_type (type),
12408 offset));
12410 else
12411 is_ref = omp_is_reference (var);
12412 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12413 is_ref = false;
12414 bool ref_to_array = false;
12415 if (is_ref)
12417 type = TREE_TYPE (type);
12418 if (TREE_CODE (type) == ARRAY_TYPE)
12420 type = build_pointer_type (type);
12421 ref_to_array = true;
12424 else if (TREE_CODE (type) == ARRAY_TYPE)
12426 tree decl2 = DECL_VALUE_EXPR (new_var);
12427 gcc_assert (TREE_CODE (decl2) == MEM_REF);
12428 decl2 = TREE_OPERAND (decl2, 0);
12429 gcc_assert (DECL_P (decl2));
12430 new_var = decl2;
12431 type = TREE_TYPE (new_var);
12433 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
12434 x = fold_convert_loc (clause_loc, type, x);
12435 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
12437 tree bias = OMP_CLAUSE_SIZE (c);
12438 if (DECL_P (bias))
12439 bias = lookup_decl (bias, ctx);
12440 bias = fold_convert_loc (clause_loc, sizetype, bias);
12441 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
12442 bias);
12443 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
12444 TREE_TYPE (x), x, bias);
12446 if (ref_to_array)
12447 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12448 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12449 if (is_ref && !ref_to_array)
12451 tree t = create_tmp_var_raw (type, get_name (var));
12452 gimple_add_tmp_var (t);
12453 TREE_ADDRESSABLE (t) = 1;
12454 gimple_seq_add_stmt (&new_body,
12455 gimple_build_assign (t, x));
12456 x = build_fold_addr_expr_loc (clause_loc, t);
12458 gimple_seq_add_stmt (&new_body,
12459 gimple_build_assign (new_var, x));
12460 prev = NULL_TREE;
12462 else if (OMP_CLAUSE_CHAIN (c)
12463 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
12464 == OMP_CLAUSE_MAP
12465 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12466 == GOMP_MAP_FIRSTPRIVATE_POINTER
12467 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12468 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12469 prev = c;
12470 break;
12471 case OMP_CLAUSE_PRIVATE:
12472 var = OMP_CLAUSE_DECL (c);
12473 if (is_variable_sized (var))
12475 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12476 tree new_var = lookup_decl (var, ctx);
12477 tree pvar = DECL_VALUE_EXPR (var);
12478 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12479 pvar = TREE_OPERAND (pvar, 0);
12480 gcc_assert (DECL_P (pvar));
12481 tree new_pvar = lookup_decl (pvar, ctx);
12482 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12483 tree al = size_int (DECL_ALIGN (var));
12484 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
12485 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12486 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
12487 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12488 gimple_seq_add_stmt (&new_body,
12489 gimple_build_assign (new_pvar, x));
12491 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
12493 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12494 tree new_var = lookup_decl (var, ctx);
12495 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12496 if (TREE_CONSTANT (x))
12497 break;
12498 else
12500 tree atmp
12501 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12502 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
12503 tree al = size_int (TYPE_ALIGN (rtype));
12504 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12507 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12508 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12509 gimple_seq_add_stmt (&new_body,
12510 gimple_build_assign (new_var, x));
12512 break;
12515 gimple_seq fork_seq = NULL;
12516 gimple_seq join_seq = NULL;
12518 if (is_oacc_parallel_or_serial (ctx))
12520 /* If there are reductions on the offloaded region itself, treat
12521 them as a dummy GANG loop. */
12522 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
12524 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
12525 false, NULL, NULL, &fork_seq, &join_seq, ctx);
12528 gimple_seq_add_seq (&new_body, fork_seq);
12529 gimple_seq_add_seq (&new_body, tgt_body);
12530 gimple_seq_add_seq (&new_body, join_seq);
12532 if (offloaded)
12533 new_body = maybe_catch_exception (new_body);
12535 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12536 gimple_omp_set_body (stmt, new_body);
12539 bind = gimple_build_bind (NULL, NULL,
12540 tgt_bind ? gimple_bind_block (tgt_bind)
12541 : NULL_TREE);
12542 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12543 gimple_bind_add_seq (bind, ilist);
12544 gimple_bind_add_stmt (bind, stmt);
12545 gimple_bind_add_seq (bind, olist);
12547 pop_gimplify_context (NULL);
12549 if (dep_bind)
12551 gimple_bind_add_seq (dep_bind, dep_ilist);
12552 gimple_bind_add_stmt (dep_bind, bind);
12553 gimple_bind_add_seq (dep_bind, dep_olist);
12554 pop_gimplify_context (dep_bind);
12558 /* Expand code for an OpenMP teams directive. */
12560 static void
12561 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12563 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
12564 push_gimplify_context ();
12566 tree block = make_node (BLOCK);
12567 gbind *bind = gimple_build_bind (NULL, NULL, block);
12568 gsi_replace (gsi_p, bind, true);
12569 gimple_seq bind_body = NULL;
12570 gimple_seq dlist = NULL;
12571 gimple_seq olist = NULL;
12573 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12574 OMP_CLAUSE_NUM_TEAMS);
12575 if (num_teams == NULL_TREE)
12576 num_teams = build_int_cst (unsigned_type_node, 0);
12577 else
12579 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
12580 num_teams = fold_convert (unsigned_type_node, num_teams);
12581 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
12583 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12584 OMP_CLAUSE_THREAD_LIMIT);
12585 if (thread_limit == NULL_TREE)
12586 thread_limit = build_int_cst (unsigned_type_node, 0);
12587 else
12589 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
12590 thread_limit = fold_convert (unsigned_type_node, thread_limit);
12591 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
12592 fb_rvalue);
12595 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
12596 &bind_body, &dlist, ctx, NULL);
12597 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
12598 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
12599 NULL, ctx);
12600 if (!gimple_omp_teams_grid_phony (teams_stmt))
12602 gimple_seq_add_stmt (&bind_body, teams_stmt);
12603 location_t loc = gimple_location (teams_stmt);
12604 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
12605 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
12606 gimple_set_location (call, loc);
12607 gimple_seq_add_stmt (&bind_body, call);
12610 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
12611 gimple_omp_set_body (teams_stmt, NULL);
12612 gimple_seq_add_seq (&bind_body, olist);
12613 gimple_seq_add_seq (&bind_body, dlist);
12614 if (!gimple_omp_teams_grid_phony (teams_stmt))
12615 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
12616 gimple_bind_set_body (bind, bind_body);
12618 pop_gimplify_context (bind);
12620 gimple_bind_append_vars (bind, ctx->block_vars);
12621 BLOCK_VARS (block) = ctx->block_vars;
12622 if (BLOCK_VARS (block))
12623 TREE_USED (block) = 1;
12626 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
12628 static void
12629 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12631 gimple *stmt = gsi_stmt (*gsi_p);
12632 lower_omp (gimple_omp_body_ptr (stmt), ctx);
12633 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
12634 gimple_build_omp_return (false));
12638 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
12639 regimplified. If DATA is non-NULL, lower_omp_1 is outside
12640 of OMP context, but with task_shared_vars set. */
12642 static tree
12643 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
12644 void *data)
12646 tree t = *tp;
12648 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
12649 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
12650 return t;
12652 if (task_shared_vars
12653 && DECL_P (t)
12654 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
12655 return t;
12657 /* If a global variable has been privatized, TREE_CONSTANT on
12658 ADDR_EXPR might be wrong. */
12659 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
12660 recompute_tree_invariant_for_addr_expr (t);
12662 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
12663 return NULL_TREE;
12666 /* Data to be communicated between lower_omp_regimplify_operands and
12667 lower_omp_regimplify_operands_p. */
12669 struct lower_omp_regimplify_operands_data
12671 omp_context *ctx;
12672 vec<tree> *decls;
12675 /* Helper function for lower_omp_regimplify_operands. Find
12676 omp_member_access_dummy_var vars and adjust temporarily their
12677 DECL_VALUE_EXPRs if needed. */
12679 static tree
12680 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
12681 void *data)
12683 tree t = omp_member_access_dummy_var (*tp);
12684 if (t)
12686 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12687 lower_omp_regimplify_operands_data *ldata
12688 = (lower_omp_regimplify_operands_data *) wi->info;
12689 tree o = maybe_lookup_decl (t, ldata->ctx);
12690 if (o != t)
12692 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
12693 ldata->decls->safe_push (*tp);
12694 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
12695 SET_DECL_VALUE_EXPR (*tp, v);
12698 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
12699 return NULL_TREE;
12702 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
12703 of omp_member_access_dummy_var vars during regimplification. */
12705 static void
12706 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
12707 gimple_stmt_iterator *gsi_p)
12709 auto_vec<tree, 10> decls;
12710 if (ctx)
12712 struct walk_stmt_info wi;
12713 memset (&wi, '\0', sizeof (wi));
12714 struct lower_omp_regimplify_operands_data data;
12715 data.ctx = ctx;
12716 data.decls = &decls;
12717 wi.info = &data;
12718 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
12720 gimple_regimplify_operands (stmt, gsi_p);
12721 while (!decls.is_empty ())
12723 tree t = decls.pop ();
12724 tree v = decls.pop ();
12725 SET_DECL_VALUE_EXPR (t, v);
12729 static void
12730 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12732 gimple *stmt = gsi_stmt (*gsi_p);
12733 struct walk_stmt_info wi;
12734 gcall *call_stmt;
12736 if (gimple_has_location (stmt))
12737 input_location = gimple_location (stmt);
12739 if (task_shared_vars)
12740 memset (&wi, '\0', sizeof (wi));
12742 /* If we have issued syntax errors, avoid doing any heavy lifting.
12743 Just replace the OMP directives with a NOP to avoid
12744 confusing RTL expansion. */
12745 if (seen_error () && is_gimple_omp (stmt))
12747 gsi_replace (gsi_p, gimple_build_nop (), true);
12748 return;
12751 switch (gimple_code (stmt))
12753 case GIMPLE_COND:
12755 gcond *cond_stmt = as_a <gcond *> (stmt);
12756 if ((ctx || task_shared_vars)
12757 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
12758 lower_omp_regimplify_p,
12759 ctx ? NULL : &wi, NULL)
12760 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
12761 lower_omp_regimplify_p,
12762 ctx ? NULL : &wi, NULL)))
12763 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
12765 break;
12766 case GIMPLE_CATCH:
12767 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
12768 break;
12769 case GIMPLE_EH_FILTER:
12770 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
12771 break;
12772 case GIMPLE_TRY:
12773 lower_omp (gimple_try_eval_ptr (stmt), ctx);
12774 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
12775 break;
12776 case GIMPLE_TRANSACTION:
12777 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
12778 ctx);
12779 break;
12780 case GIMPLE_BIND:
12781 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
12782 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
12783 break;
12784 case GIMPLE_OMP_PARALLEL:
12785 case GIMPLE_OMP_TASK:
12786 ctx = maybe_lookup_ctx (stmt);
12787 gcc_assert (ctx);
12788 if (ctx->cancellable)
12789 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12790 lower_omp_taskreg (gsi_p, ctx);
12791 break;
12792 case GIMPLE_OMP_FOR:
12793 ctx = maybe_lookup_ctx (stmt);
12794 gcc_assert (ctx);
12795 if (ctx->cancellable)
12796 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12797 lower_omp_for (gsi_p, ctx);
12798 break;
12799 case GIMPLE_OMP_SECTIONS:
12800 ctx = maybe_lookup_ctx (stmt);
12801 gcc_assert (ctx);
12802 if (ctx->cancellable)
12803 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12804 lower_omp_sections (gsi_p, ctx);
12805 break;
12806 case GIMPLE_OMP_SINGLE:
12807 ctx = maybe_lookup_ctx (stmt);
12808 gcc_assert (ctx);
12809 lower_omp_single (gsi_p, ctx);
12810 break;
12811 case GIMPLE_OMP_MASTER:
12812 ctx = maybe_lookup_ctx (stmt);
12813 gcc_assert (ctx);
12814 lower_omp_master (gsi_p, ctx);
12815 break;
12816 case GIMPLE_OMP_TASKGROUP:
12817 ctx = maybe_lookup_ctx (stmt);
12818 gcc_assert (ctx);
12819 lower_omp_taskgroup (gsi_p, ctx);
12820 break;
12821 case GIMPLE_OMP_ORDERED:
12822 ctx = maybe_lookup_ctx (stmt);
12823 gcc_assert (ctx);
12824 lower_omp_ordered (gsi_p, ctx);
12825 break;
12826 case GIMPLE_OMP_SCAN:
12827 ctx = maybe_lookup_ctx (stmt);
12828 gcc_assert (ctx);
12829 lower_omp_scan (gsi_p, ctx);
12830 break;
12831 case GIMPLE_OMP_CRITICAL:
12832 ctx = maybe_lookup_ctx (stmt);
12833 gcc_assert (ctx);
12834 lower_omp_critical (gsi_p, ctx);
12835 break;
12836 case GIMPLE_OMP_ATOMIC_LOAD:
12837 if ((ctx || task_shared_vars)
12838 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
12839 as_a <gomp_atomic_load *> (stmt)),
12840 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
12841 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12842 break;
12843 case GIMPLE_OMP_TARGET:
12844 ctx = maybe_lookup_ctx (stmt);
12845 gcc_assert (ctx);
12846 lower_omp_target (gsi_p, ctx);
12847 break;
12848 case GIMPLE_OMP_TEAMS:
12849 ctx = maybe_lookup_ctx (stmt);
12850 gcc_assert (ctx);
12851 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
12852 lower_omp_taskreg (gsi_p, ctx);
12853 else
12854 lower_omp_teams (gsi_p, ctx);
12855 break;
12856 case GIMPLE_OMP_GRID_BODY:
12857 ctx = maybe_lookup_ctx (stmt);
12858 gcc_assert (ctx);
12859 lower_omp_grid_body (gsi_p, ctx);
12860 break;
12861 case GIMPLE_CALL:
12862 tree fndecl;
12863 call_stmt = as_a <gcall *> (stmt);
12864 fndecl = gimple_call_fndecl (call_stmt);
12865 if (fndecl
12866 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
12867 switch (DECL_FUNCTION_CODE (fndecl))
12869 case BUILT_IN_GOMP_BARRIER:
12870 if (ctx == NULL)
12871 break;
12872 /* FALLTHRU */
12873 case BUILT_IN_GOMP_CANCEL:
12874 case BUILT_IN_GOMP_CANCELLATION_POINT:
12875 omp_context *cctx;
12876 cctx = ctx;
12877 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
12878 cctx = cctx->outer;
12879 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
12880 if (!cctx->cancellable)
12882 if (DECL_FUNCTION_CODE (fndecl)
12883 == BUILT_IN_GOMP_CANCELLATION_POINT)
12885 stmt = gimple_build_nop ();
12886 gsi_replace (gsi_p, stmt, false);
12888 break;
12890 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
12892 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
12893 gimple_call_set_fndecl (call_stmt, fndecl);
12894 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
12896 tree lhs;
12897 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
12898 gimple_call_set_lhs (call_stmt, lhs);
12899 tree fallthru_label;
12900 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
12901 gimple *g;
12902 g = gimple_build_label (fallthru_label);
12903 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12904 g = gimple_build_cond (NE_EXPR, lhs,
12905 fold_convert (TREE_TYPE (lhs),
12906 boolean_false_node),
12907 cctx->cancel_label, fallthru_label);
12908 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12909 break;
12910 default:
12911 break;
12913 goto regimplify;
12915 case GIMPLE_ASSIGN:
12916 for (omp_context *up = ctx; up; up = up->outer)
12918 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
12919 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
12920 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
12921 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
12922 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
12923 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
12924 && (gimple_omp_target_kind (up->stmt)
12925 == GF_OMP_TARGET_KIND_DATA)))
12926 continue;
12927 else if (!up->lastprivate_conditional_map)
12928 break;
12929 tree lhs = get_base_address (gimple_assign_lhs (stmt));
12930 if (TREE_CODE (lhs) == MEM_REF
12931 && DECL_P (TREE_OPERAND (lhs, 0))
12932 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
12933 0))) == REFERENCE_TYPE)
12934 lhs = TREE_OPERAND (lhs, 0);
12935 if (DECL_P (lhs))
12936 if (tree *v = up->lastprivate_conditional_map->get (lhs))
12938 tree clauses;
12939 if (up->combined_into_simd_safelen1)
12941 up = up->outer;
12942 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
12943 up = up->outer;
12945 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
12946 clauses = gimple_omp_for_clauses (up->stmt);
12947 else
12948 clauses = gimple_omp_sections_clauses (up->stmt);
12949 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
12950 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
12951 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
12952 OMP_CLAUSE__CONDTEMP_);
12953 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
12954 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
12955 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12958 /* FALLTHRU */
12960 default:
12961 regimplify:
12962 if ((ctx || task_shared_vars)
12963 && walk_gimple_op (stmt, lower_omp_regimplify_p,
12964 ctx ? NULL : &wi))
12966 /* Just remove clobbers, this should happen only if we have
12967 "privatized" local addressable variables in SIMD regions,
12968 the clobber isn't needed in that case and gimplifying address
12969 of the ARRAY_REF into a pointer and creating MEM_REF based
12970 clobber would create worse code than we get with the clobber
12971 dropped. */
12972 if (gimple_clobber_p (stmt))
12974 gsi_replace (gsi_p, gimple_build_nop (), true);
12975 break;
12977 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12979 break;
12983 static void
12984 lower_omp (gimple_seq *body, omp_context *ctx)
12986 location_t saved_location = input_location;
12987 gimple_stmt_iterator gsi;
12988 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
12989 lower_omp_1 (&gsi, ctx);
12990 /* During gimplification, we haven't folded statments inside offloading
12991 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
12992 if (target_nesting_level || taskreg_nesting_level)
12993 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
12994 fold_stmt (&gsi);
12995 input_location = saved_location;
12998 /* Main entry point. */
13000 static unsigned int
13001 execute_lower_omp (void)
13003 gimple_seq body;
13004 int i;
13005 omp_context *ctx;
13007 /* This pass always runs, to provide PROP_gimple_lomp.
13008 But often, there is nothing to do. */
13009 if (flag_openacc == 0 && flag_openmp == 0
13010 && flag_openmp_simd == 0)
13011 return 0;
13013 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
13014 delete_omp_context);
13016 body = gimple_body (current_function_decl);
13018 if (hsa_gen_requested_p ())
13019 omp_grid_gridify_all_targets (&body);
13021 scan_omp (&body, NULL);
13022 gcc_assert (taskreg_nesting_level == 0);
13023 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
13024 finish_taskreg_scan (ctx);
13025 taskreg_contexts.release ();
13027 if (all_contexts->root)
13029 if (task_shared_vars)
13030 push_gimplify_context ();
13031 lower_omp (&body, NULL);
13032 if (task_shared_vars)
13033 pop_gimplify_context (NULL);
13036 if (all_contexts)
13038 splay_tree_delete (all_contexts);
13039 all_contexts = NULL;
13041 BITMAP_FREE (task_shared_vars);
13042 BITMAP_FREE (global_nonaddressable_vars);
13044 /* If current function is a method, remove artificial dummy VAR_DECL created
13045 for non-static data member privatization, they aren't needed for
13046 debuginfo nor anything else, have been already replaced everywhere in the
13047 IL and cause problems with LTO. */
13048 if (DECL_ARGUMENTS (current_function_decl)
13049 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
13050 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
13051 == POINTER_TYPE))
13052 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
13053 return 0;
13056 namespace {
13058 const pass_data pass_data_lower_omp =
13060 GIMPLE_PASS, /* type */
13061 "omplower", /* name */
13062 OPTGROUP_OMP, /* optinfo_flags */
13063 TV_NONE, /* tv_id */
13064 PROP_gimple_any, /* properties_required */
13065 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
13066 0, /* properties_destroyed */
13067 0, /* todo_flags_start */
13068 0, /* todo_flags_finish */
13071 class pass_lower_omp : public gimple_opt_pass
13073 public:
13074 pass_lower_omp (gcc::context *ctxt)
13075 : gimple_opt_pass (pass_data_lower_omp, ctxt)
13078 /* opt_pass methods: */
13079 virtual unsigned int execute (function *) { return execute_lower_omp (); }
13081 }; // class pass_lower_omp
13083 } // anon namespace
13085 gimple_opt_pass *
13086 make_pass_lower_omp (gcc::context *ctxt)
13088 return new pass_lower_omp (ctxt);
13091 /* The following is a utility to diagnose structured block violations.
13092 It is not part of the "omplower" pass, as that's invoked too late. It
13093 should be invoked by the respective front ends after gimplification. */
13095 static splay_tree all_labels;
13097 /* Check for mismatched contexts and generate an error if needed. Return
13098 true if an error is detected. */
13100 static bool
13101 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
13102 gimple *branch_ctx, gimple *label_ctx)
13104 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
13105 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
13107 if (label_ctx == branch_ctx)
13108 return false;
13110 const char* kind = NULL;
13112 if (flag_openacc)
13114 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
13115 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
13117 gcc_checking_assert (kind == NULL);
13118 kind = "OpenACC";
13121 if (kind == NULL)
13123 gcc_checking_assert (flag_openmp || flag_openmp_simd);
13124 kind = "OpenMP";
13127 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
13128 so we could traverse it and issue a correct "exit" or "enter" error
13129 message upon a structured block violation.
13131 We built the context by building a list with tree_cons'ing, but there is
13132 no easy counterpart in gimple tuples. It seems like far too much work
13133 for issuing exit/enter error messages. If someone really misses the
13134 distinct error message... patches welcome. */
13136 #if 0
13137 /* Try to avoid confusing the user by producing and error message
13138 with correct "exit" or "enter" verbiage. We prefer "exit"
13139 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
13140 if (branch_ctx == NULL)
13141 exit_p = false;
13142 else
13144 while (label_ctx)
13146 if (TREE_VALUE (label_ctx) == branch_ctx)
13148 exit_p = false;
13149 break;
13151 label_ctx = TREE_CHAIN (label_ctx);
13155 if (exit_p)
13156 error ("invalid exit from %s structured block", kind);
13157 else
13158 error ("invalid entry to %s structured block", kind);
13159 #endif
13161 /* If it's obvious we have an invalid entry, be specific about the error. */
13162 if (branch_ctx == NULL)
13163 error ("invalid entry to %s structured block", kind);
13164 else
13166 /* Otherwise, be vague and lazy, but efficient. */
13167 error ("invalid branch to/from %s structured block", kind);
13170 gsi_replace (gsi_p, gimple_build_nop (), false);
13171 return true;
13174 /* Pass 1: Create a minimal tree of structured blocks, and record
13175 where each label is found. */
13177 static tree
13178 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13179 struct walk_stmt_info *wi)
13181 gimple *context = (gimple *) wi->info;
13182 gimple *inner_context;
13183 gimple *stmt = gsi_stmt (*gsi_p);
13185 *handled_ops_p = true;
13187 switch (gimple_code (stmt))
13189 WALK_SUBSTMTS;
13191 case GIMPLE_OMP_PARALLEL:
13192 case GIMPLE_OMP_TASK:
13193 case GIMPLE_OMP_SECTIONS:
13194 case GIMPLE_OMP_SINGLE:
13195 case GIMPLE_OMP_SECTION:
13196 case GIMPLE_OMP_MASTER:
13197 case GIMPLE_OMP_ORDERED:
13198 case GIMPLE_OMP_SCAN:
13199 case GIMPLE_OMP_CRITICAL:
13200 case GIMPLE_OMP_TARGET:
13201 case GIMPLE_OMP_TEAMS:
13202 case GIMPLE_OMP_TASKGROUP:
13203 /* The minimal context here is just the current OMP construct. */
13204 inner_context = stmt;
13205 wi->info = inner_context;
13206 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13207 wi->info = context;
13208 break;
13210 case GIMPLE_OMP_FOR:
13211 inner_context = stmt;
13212 wi->info = inner_context;
13213 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13214 walk them. */
13215 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
13216 diagnose_sb_1, NULL, wi);
13217 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13218 wi->info = context;
13219 break;
13221 case GIMPLE_LABEL:
13222 splay_tree_insert (all_labels,
13223 (splay_tree_key) gimple_label_label (
13224 as_a <glabel *> (stmt)),
13225 (splay_tree_value) context);
13226 break;
13228 default:
13229 break;
13232 return NULL_TREE;
13235 /* Pass 2: Check each branch and see if its context differs from that of
13236 the destination label's context. */
13238 static tree
13239 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13240 struct walk_stmt_info *wi)
13242 gimple *context = (gimple *) wi->info;
13243 splay_tree_node n;
13244 gimple *stmt = gsi_stmt (*gsi_p);
13246 *handled_ops_p = true;
13248 switch (gimple_code (stmt))
13250 WALK_SUBSTMTS;
13252 case GIMPLE_OMP_PARALLEL:
13253 case GIMPLE_OMP_TASK:
13254 case GIMPLE_OMP_SECTIONS:
13255 case GIMPLE_OMP_SINGLE:
13256 case GIMPLE_OMP_SECTION:
13257 case GIMPLE_OMP_MASTER:
13258 case GIMPLE_OMP_ORDERED:
13259 case GIMPLE_OMP_SCAN:
13260 case GIMPLE_OMP_CRITICAL:
13261 case GIMPLE_OMP_TARGET:
13262 case GIMPLE_OMP_TEAMS:
13263 case GIMPLE_OMP_TASKGROUP:
13264 wi->info = stmt;
13265 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13266 wi->info = context;
13267 break;
13269 case GIMPLE_OMP_FOR:
13270 wi->info = stmt;
13271 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13272 walk them. */
13273 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
13274 diagnose_sb_2, NULL, wi);
13275 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13276 wi->info = context;
13277 break;
13279 case GIMPLE_COND:
13281 gcond *cond_stmt = as_a <gcond *> (stmt);
13282 tree lab = gimple_cond_true_label (cond_stmt);
13283 if (lab)
13285 n = splay_tree_lookup (all_labels,
13286 (splay_tree_key) lab);
13287 diagnose_sb_0 (gsi_p, context,
13288 n ? (gimple *) n->value : NULL);
13290 lab = gimple_cond_false_label (cond_stmt);
13291 if (lab)
13293 n = splay_tree_lookup (all_labels,
13294 (splay_tree_key) lab);
13295 diagnose_sb_0 (gsi_p, context,
13296 n ? (gimple *) n->value : NULL);
13299 break;
13301 case GIMPLE_GOTO:
13303 tree lab = gimple_goto_dest (stmt);
13304 if (TREE_CODE (lab) != LABEL_DECL)
13305 break;
13307 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13308 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
13310 break;
13312 case GIMPLE_SWITCH:
13314 gswitch *switch_stmt = as_a <gswitch *> (stmt);
13315 unsigned int i;
13316 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
13318 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
13319 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13320 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
13321 break;
13324 break;
13326 case GIMPLE_RETURN:
13327 diagnose_sb_0 (gsi_p, context, NULL);
13328 break;
13330 default:
13331 break;
13334 return NULL_TREE;
13337 static unsigned int
13338 diagnose_omp_structured_block_errors (void)
13340 struct walk_stmt_info wi;
13341 gimple_seq body = gimple_body (current_function_decl);
13343 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
13345 memset (&wi, 0, sizeof (wi));
13346 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
13348 memset (&wi, 0, sizeof (wi));
13349 wi.want_locations = true;
13350 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
13352 gimple_set_body (current_function_decl, body);
13354 splay_tree_delete (all_labels);
13355 all_labels = NULL;
13357 return 0;
13360 namespace {
13362 const pass_data pass_data_diagnose_omp_blocks =
13364 GIMPLE_PASS, /* type */
13365 "*diagnose_omp_blocks", /* name */
13366 OPTGROUP_OMP, /* optinfo_flags */
13367 TV_NONE, /* tv_id */
13368 PROP_gimple_any, /* properties_required */
13369 0, /* properties_provided */
13370 0, /* properties_destroyed */
13371 0, /* todo_flags_start */
13372 0, /* todo_flags_finish */
13375 class pass_diagnose_omp_blocks : public gimple_opt_pass
13377 public:
13378 pass_diagnose_omp_blocks (gcc::context *ctxt)
13379 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
13382 /* opt_pass methods: */
13383 virtual bool gate (function *)
13385 return flag_openacc || flag_openmp || flag_openmp_simd;
13387 virtual unsigned int execute (function *)
13389 return diagnose_omp_structured_block_errors ();
13392 }; // class pass_diagnose_omp_blocks
13394 } // anon namespace
13396 gimple_opt_pass *
13397 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
13399 return new pass_diagnose_omp_blocks (ctxt);
13403 #include "gt-omp-low.h"