* tree-ssa-loop-ivopts.c (ivopts_estimate_reg_pressure): New
[official-gcc.git] / gcc / omp-low.c
blob9a1624851c3762411cca91f1d1e35fc09f5b1cc0
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2017 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
62 /* Lowering of OMP parallel and workshare constructs proceeds in two
63 phases. The first phase scans the function looking for OMP statements
64 and then for variables that must be replaced to satisfy data sharing
65 clauses. The second phase expands code for the constructs, as well as
66 re-gimplifying things when variables have been replaced with complex
67 expressions.
69 Final code generation is done by pass_expand_omp. The flowgraph is
70 scanned for regions which are then moved to a new
71 function, to be invoked by the thread library, or offloaded. */
73 /* Context structure. Used to store information about each parallel
74 directive in the code. */
76 struct omp_context
78 /* This field must be at the beginning, as we do "inheritance": Some
79 callback functions for tree-inline.c (e.g., omp_copy_decl)
80 receive a copy_body_data pointer that is up-casted to an
81 omp_context pointer. */
82 copy_body_data cb;
84 /* The tree of contexts corresponding to the encountered constructs. */
85 struct omp_context *outer;
86 gimple *stmt;
88 /* Map variables to fields in a structure that allows communication
89 between sending and receiving threads. */
90 splay_tree field_map;
91 tree record_type;
92 tree sender_decl;
93 tree receiver_decl;
95 /* These are used just by task contexts, if task firstprivate fn is
96 needed. srecord_type is used to communicate from the thread
97 that encountered the task construct to task firstprivate fn,
98 record_type is allocated by GOMP_task, initialized by task firstprivate
99 fn and passed to the task body fn. */
100 splay_tree sfield_map;
101 tree srecord_type;
103 /* A chain of variables to add to the top-level block surrounding the
104 construct. In the case of a parallel, this is in the child function. */
105 tree block_vars;
107 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
108 barriers should jump to during omplower pass. */
109 tree cancel_label;
111 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
112 otherwise. */
113 gimple *simt_stmt;
115 /* Nesting depth of this context. Used to beautify error messages re
116 invalid gotos. The outermost ctx is depth 1, with depth 0 being
117 reserved for the main body of the function. */
118 int depth;
120 /* True if this parallel directive is nested within another. */
121 bool is_nested;
123 /* True if this construct can be cancelled. */
124 bool cancellable;
127 static splay_tree all_contexts;
128 static int taskreg_nesting_level;
129 static int target_nesting_level;
130 static bitmap task_shared_vars;
131 static vec<omp_context *> taskreg_contexts;
133 static void scan_omp (gimple_seq *, omp_context *);
134 static tree scan_omp_1_op (tree *, int *, void *);
136 #define WALK_SUBSTMTS \
137 case GIMPLE_BIND: \
138 case GIMPLE_TRY: \
139 case GIMPLE_CATCH: \
140 case GIMPLE_EH_FILTER: \
141 case GIMPLE_TRANSACTION: \
142 /* The sub-statements for these should be walked. */ \
143 *handled_ops_p = false; \
144 break;
146 /* Return true if CTX corresponds to an oacc parallel region. */
148 static bool
149 is_oacc_parallel (omp_context *ctx)
151 enum gimple_code outer_type = gimple_code (ctx->stmt);
152 return ((outer_type == GIMPLE_OMP_TARGET)
153 && (gimple_omp_target_kind (ctx->stmt)
154 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
157 /* Return true if CTX corresponds to an oacc kernels region. */
159 static bool
160 is_oacc_kernels (omp_context *ctx)
162 enum gimple_code outer_type = gimple_code (ctx->stmt);
163 return ((outer_type == GIMPLE_OMP_TARGET)
164 && (gimple_omp_target_kind (ctx->stmt)
165 == GF_OMP_TARGET_KIND_OACC_KERNELS));
168 /* If DECL is the artificial dummy VAR_DECL created for non-static
169 data member privatization, return the underlying "this" parameter,
170 otherwise return NULL. */
172 tree
173 omp_member_access_dummy_var (tree decl)
175 if (!VAR_P (decl)
176 || !DECL_ARTIFICIAL (decl)
177 || !DECL_IGNORED_P (decl)
178 || !DECL_HAS_VALUE_EXPR_P (decl)
179 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
180 return NULL_TREE;
182 tree v = DECL_VALUE_EXPR (decl);
183 if (TREE_CODE (v) != COMPONENT_REF)
184 return NULL_TREE;
186 while (1)
187 switch (TREE_CODE (v))
189 case COMPONENT_REF:
190 case MEM_REF:
191 case INDIRECT_REF:
192 CASE_CONVERT:
193 case POINTER_PLUS_EXPR:
194 v = TREE_OPERAND (v, 0);
195 continue;
196 case PARM_DECL:
197 if (DECL_CONTEXT (v) == current_function_decl
198 && DECL_ARTIFICIAL (v)
199 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
200 return v;
201 return NULL_TREE;
202 default:
203 return NULL_TREE;
207 /* Helper for unshare_and_remap, called through walk_tree. */
209 static tree
210 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
212 tree *pair = (tree *) data;
213 if (*tp == pair[0])
215 *tp = unshare_expr (pair[1]);
216 *walk_subtrees = 0;
218 else if (IS_TYPE_OR_DECL_P (*tp))
219 *walk_subtrees = 0;
220 return NULL_TREE;
223 /* Return unshare_expr (X) with all occurrences of FROM
224 replaced with TO. */
226 static tree
227 unshare_and_remap (tree x, tree from, tree to)
229 tree pair[2] = { from, to };
230 x = unshare_expr (x);
231 walk_tree (&x, unshare_and_remap_1, pair, NULL);
232 return x;
235 /* Convenience function for calling scan_omp_1_op on tree operands. */
237 static inline tree
238 scan_omp_op (tree *tp, omp_context *ctx)
240 struct walk_stmt_info wi;
242 memset (&wi, 0, sizeof (wi));
243 wi.info = ctx;
244 wi.want_locations = true;
246 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
249 static void lower_omp (gimple_seq *, omp_context *);
250 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
251 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
253 /* Return true if CTX is for an omp parallel. */
255 static inline bool
256 is_parallel_ctx (omp_context *ctx)
258 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
262 /* Return true if CTX is for an omp task. */
264 static inline bool
265 is_task_ctx (omp_context *ctx)
267 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
271 /* Return true if CTX is for an omp taskloop. */
273 static inline bool
274 is_taskloop_ctx (omp_context *ctx)
276 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
277 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
281 /* Return true if CTX is for an omp parallel or omp task. */
283 static inline bool
284 is_taskreg_ctx (omp_context *ctx)
286 return is_parallel_ctx (ctx) || is_task_ctx (ctx);
289 /* Return true if EXPR is variable sized. */
291 static inline bool
292 is_variable_sized (const_tree expr)
294 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
297 /* Lookup variables. The "maybe" form
298 allows for the variable form to not have been entered, otherwise we
299 assert that the variable must have been entered. */
301 static inline tree
302 lookup_decl (tree var, omp_context *ctx)
304 tree *n = ctx->cb.decl_map->get (var);
305 return *n;
308 static inline tree
309 maybe_lookup_decl (const_tree var, omp_context *ctx)
311 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
312 return n ? *n : NULL_TREE;
315 static inline tree
316 lookup_field (tree var, omp_context *ctx)
318 splay_tree_node n;
319 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
320 return (tree) n->value;
323 static inline tree
324 lookup_sfield (splay_tree_key key, omp_context *ctx)
326 splay_tree_node n;
327 n = splay_tree_lookup (ctx->sfield_map
328 ? ctx->sfield_map : ctx->field_map, key);
329 return (tree) n->value;
332 static inline tree
333 lookup_sfield (tree var, omp_context *ctx)
335 return lookup_sfield ((splay_tree_key) var, ctx);
338 static inline tree
339 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
341 splay_tree_node n;
342 n = splay_tree_lookup (ctx->field_map, key);
343 return n ? (tree) n->value : NULL_TREE;
346 static inline tree
347 maybe_lookup_field (tree var, omp_context *ctx)
349 return maybe_lookup_field ((splay_tree_key) var, ctx);
352 /* Return true if DECL should be copied by pointer. SHARED_CTX is
353 the parallel context if DECL is to be shared. */
355 static bool
356 use_pointer_for_field (tree decl, omp_context *shared_ctx)
358 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
359 || TYPE_ATOMIC (TREE_TYPE (decl)))
360 return true;
362 /* We can only use copy-in/copy-out semantics for shared variables
363 when we know the value is not accessible from an outer scope. */
364 if (shared_ctx)
366 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
368 /* ??? Trivially accessible from anywhere. But why would we even
369 be passing an address in this case? Should we simply assert
370 this to be false, or should we have a cleanup pass that removes
371 these from the list of mappings? */
372 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
373 return true;
375 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
376 without analyzing the expression whether or not its location
377 is accessible to anyone else. In the case of nested parallel
378 regions it certainly may be. */
379 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
380 return true;
382 /* Do not use copy-in/copy-out for variables that have their
383 address taken. */
384 if (TREE_ADDRESSABLE (decl))
385 return true;
387 /* lower_send_shared_vars only uses copy-in, but not copy-out
388 for these. */
389 if (TREE_READONLY (decl)
390 || ((TREE_CODE (decl) == RESULT_DECL
391 || TREE_CODE (decl) == PARM_DECL)
392 && DECL_BY_REFERENCE (decl)))
393 return false;
395 /* Disallow copy-in/out in nested parallel if
396 decl is shared in outer parallel, otherwise
397 each thread could store the shared variable
398 in its own copy-in location, making the
399 variable no longer really shared. */
400 if (shared_ctx->is_nested)
402 omp_context *up;
404 for (up = shared_ctx->outer; up; up = up->outer)
405 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
406 break;
408 if (up)
410 tree c;
412 for (c = gimple_omp_taskreg_clauses (up->stmt);
413 c; c = OMP_CLAUSE_CHAIN (c))
414 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
415 && OMP_CLAUSE_DECL (c) == decl)
416 break;
418 if (c)
419 goto maybe_mark_addressable_and_ret;
423 /* For tasks avoid using copy-in/out. As tasks can be
424 deferred or executed in different thread, when GOMP_task
425 returns, the task hasn't necessarily terminated. */
426 if (is_task_ctx (shared_ctx))
428 tree outer;
429 maybe_mark_addressable_and_ret:
430 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
431 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
433 /* Taking address of OUTER in lower_send_shared_vars
434 might need regimplification of everything that uses the
435 variable. */
436 if (!task_shared_vars)
437 task_shared_vars = BITMAP_ALLOC (NULL);
438 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
439 TREE_ADDRESSABLE (outer) = 1;
441 return true;
445 return false;
448 /* Construct a new automatic decl similar to VAR. */
450 static tree
451 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
453 tree copy = copy_var_decl (var, name, type);
455 DECL_CONTEXT (copy) = current_function_decl;
456 DECL_CHAIN (copy) = ctx->block_vars;
457 /* If VAR is listed in task_shared_vars, it means it wasn't
458 originally addressable and is just because task needs to take
459 it's address. But we don't need to take address of privatizations
460 from that var. */
461 if (TREE_ADDRESSABLE (var)
462 && task_shared_vars
463 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
464 TREE_ADDRESSABLE (copy) = 0;
465 ctx->block_vars = copy;
467 return copy;
470 static tree
471 omp_copy_decl_1 (tree var, omp_context *ctx)
473 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
476 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
477 as appropriate. */
478 static tree
479 omp_build_component_ref (tree obj, tree field)
481 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
482 if (TREE_THIS_VOLATILE (field))
483 TREE_THIS_VOLATILE (ret) |= 1;
484 if (TREE_READONLY (field))
485 TREE_READONLY (ret) |= 1;
486 return ret;
489 /* Build tree nodes to access the field for VAR on the receiver side. */
491 static tree
492 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
494 tree x, field = lookup_field (var, ctx);
496 /* If the receiver record type was remapped in the child function,
497 remap the field into the new record type. */
498 x = maybe_lookup_field (field, ctx);
499 if (x != NULL)
500 field = x;
502 x = build_simple_mem_ref (ctx->receiver_decl);
503 TREE_THIS_NOTRAP (x) = 1;
504 x = omp_build_component_ref (x, field);
505 if (by_ref)
507 x = build_simple_mem_ref (x);
508 TREE_THIS_NOTRAP (x) = 1;
511 return x;
514 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
515 of a parallel, this is a component reference; for workshare constructs
516 this is some variable. */
518 static tree
519 build_outer_var_ref (tree var, omp_context *ctx,
520 enum omp_clause_code code = OMP_CLAUSE_ERROR)
522 tree x;
524 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
525 x = var;
526 else if (is_variable_sized (var))
528 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
529 x = build_outer_var_ref (x, ctx, code);
530 x = build_simple_mem_ref (x);
532 else if (is_taskreg_ctx (ctx))
534 bool by_ref = use_pointer_for_field (var, NULL);
535 x = build_receiver_ref (var, by_ref, ctx);
537 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
538 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
539 || (code == OMP_CLAUSE_PRIVATE
540 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
541 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
542 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
544 /* #pragma omp simd isn't a worksharing construct, and can reference
545 even private vars in its linear etc. clauses.
546 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
547 to private vars in all worksharing constructs. */
548 x = NULL_TREE;
549 if (ctx->outer && is_taskreg_ctx (ctx))
550 x = lookup_decl (var, ctx->outer);
551 else if (ctx->outer)
552 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
553 if (x == NULL_TREE)
554 x = var;
556 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
558 gcc_assert (ctx->outer);
559 splay_tree_node n
560 = splay_tree_lookup (ctx->outer->field_map,
561 (splay_tree_key) &DECL_UID (var));
562 if (n == NULL)
564 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
565 x = var;
566 else
567 x = lookup_decl (var, ctx->outer);
569 else
571 tree field = (tree) n->value;
572 /* If the receiver record type was remapped in the child function,
573 remap the field into the new record type. */
574 x = maybe_lookup_field (field, ctx->outer);
575 if (x != NULL)
576 field = x;
578 x = build_simple_mem_ref (ctx->outer->receiver_decl);
579 x = omp_build_component_ref (x, field);
580 if (use_pointer_for_field (var, ctx->outer))
581 x = build_simple_mem_ref (x);
584 else if (ctx->outer)
586 omp_context *outer = ctx->outer;
587 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
589 outer = outer->outer;
590 gcc_assert (outer
591 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
593 x = lookup_decl (var, outer);
595 else if (omp_is_reference (var))
596 /* This can happen with orphaned constructs. If var is reference, it is
597 possible it is shared and as such valid. */
598 x = var;
599 else if (omp_member_access_dummy_var (var))
600 x = var;
601 else
602 gcc_unreachable ();
604 if (x == var)
606 tree t = omp_member_access_dummy_var (var);
607 if (t)
609 x = DECL_VALUE_EXPR (var);
610 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
611 if (o != t)
612 x = unshare_and_remap (x, t, o);
613 else
614 x = unshare_expr (x);
618 if (omp_is_reference (var))
619 x = build_simple_mem_ref (x);
621 return x;
624 /* Build tree nodes to access the field for VAR on the sender side. */
626 static tree
627 build_sender_ref (splay_tree_key key, omp_context *ctx)
629 tree field = lookup_sfield (key, ctx);
630 return omp_build_component_ref (ctx->sender_decl, field);
633 static tree
634 build_sender_ref (tree var, omp_context *ctx)
636 return build_sender_ref ((splay_tree_key) var, ctx);
639 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
640 BASE_POINTERS_RESTRICT, declare the field with restrict. */
642 static void
643 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx,
644 bool base_pointers_restrict = false)
646 tree field, type, sfield = NULL_TREE;
647 splay_tree_key key = (splay_tree_key) var;
649 if ((mask & 8) != 0)
651 key = (splay_tree_key) &DECL_UID (var);
652 gcc_checking_assert (key != (splay_tree_key) var);
654 gcc_assert ((mask & 1) == 0
655 || !splay_tree_lookup (ctx->field_map, key));
656 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
657 || !splay_tree_lookup (ctx->sfield_map, key));
658 gcc_assert ((mask & 3) == 3
659 || !is_gimple_omp_oacc (ctx->stmt));
661 type = TREE_TYPE (var);
662 /* Prevent redeclaring the var in the split-off function with a restrict
663 pointer type. Note that we only clear type itself, restrict qualifiers in
664 the pointed-to type will be ignored by points-to analysis. */
665 if (POINTER_TYPE_P (type)
666 && TYPE_RESTRICT (type))
667 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
669 if (mask & 4)
671 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
672 type = build_pointer_type (build_pointer_type (type));
674 else if (by_ref)
676 type = build_pointer_type (type);
677 if (base_pointers_restrict)
678 type = build_qualified_type (type, TYPE_QUAL_RESTRICT);
680 else if ((mask & 3) == 1 && omp_is_reference (var))
681 type = TREE_TYPE (type);
683 field = build_decl (DECL_SOURCE_LOCATION (var),
684 FIELD_DECL, DECL_NAME (var), type);
686 /* Remember what variable this field was created for. This does have a
687 side effect of making dwarf2out ignore this member, so for helpful
688 debugging we clear it later in delete_omp_context. */
689 DECL_ABSTRACT_ORIGIN (field) = var;
690 if (type == TREE_TYPE (var))
692 SET_DECL_ALIGN (field, DECL_ALIGN (var));
693 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
694 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
696 else
697 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
699 if ((mask & 3) == 3)
701 insert_field_into_struct (ctx->record_type, field);
702 if (ctx->srecord_type)
704 sfield = build_decl (DECL_SOURCE_LOCATION (var),
705 FIELD_DECL, DECL_NAME (var), type);
706 DECL_ABSTRACT_ORIGIN (sfield) = var;
707 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
708 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
709 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
710 insert_field_into_struct (ctx->srecord_type, sfield);
713 else
715 if (ctx->srecord_type == NULL_TREE)
717 tree t;
719 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
720 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
721 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
723 sfield = build_decl (DECL_SOURCE_LOCATION (t),
724 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
725 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
726 insert_field_into_struct (ctx->srecord_type, sfield);
727 splay_tree_insert (ctx->sfield_map,
728 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
729 (splay_tree_value) sfield);
732 sfield = field;
733 insert_field_into_struct ((mask & 1) ? ctx->record_type
734 : ctx->srecord_type, field);
737 if (mask & 1)
738 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
739 if ((mask & 2) && ctx->sfield_map)
740 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
743 static tree
744 install_var_local (tree var, omp_context *ctx)
746 tree new_var = omp_copy_decl_1 (var, ctx);
747 insert_decl_map (&ctx->cb, var, new_var);
748 return new_var;
751 /* Adjust the replacement for DECL in CTX for the new context. This means
752 copying the DECL_VALUE_EXPR, and fixing up the type. */
754 static void
755 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
757 tree new_decl, size;
759 new_decl = lookup_decl (decl, ctx);
761 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
763 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
764 && DECL_HAS_VALUE_EXPR_P (decl))
766 tree ve = DECL_VALUE_EXPR (decl);
767 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
768 SET_DECL_VALUE_EXPR (new_decl, ve);
769 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
772 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
774 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
775 if (size == error_mark_node)
776 size = TYPE_SIZE (TREE_TYPE (new_decl));
777 DECL_SIZE (new_decl) = size;
779 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
780 if (size == error_mark_node)
781 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
782 DECL_SIZE_UNIT (new_decl) = size;
786 /* The callback for remap_decl. Search all containing contexts for a
787 mapping of the variable; this avoids having to duplicate the splay
788 tree ahead of time. We know a mapping doesn't already exist in the
789 given context. Create new mappings to implement default semantics. */
791 static tree
792 omp_copy_decl (tree var, copy_body_data *cb)
794 omp_context *ctx = (omp_context *) cb;
795 tree new_var;
797 if (TREE_CODE (var) == LABEL_DECL)
799 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
800 DECL_CONTEXT (new_var) = current_function_decl;
801 insert_decl_map (&ctx->cb, var, new_var);
802 return new_var;
805 while (!is_taskreg_ctx (ctx))
807 ctx = ctx->outer;
808 if (ctx == NULL)
809 return var;
810 new_var = maybe_lookup_decl (var, ctx);
811 if (new_var)
812 return new_var;
815 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
816 return var;
818 return error_mark_node;
821 /* Create a new context, with OUTER_CTX being the surrounding context. */
823 static omp_context *
824 new_omp_context (gimple *stmt, omp_context *outer_ctx)
826 omp_context *ctx = XCNEW (omp_context);
828 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
829 (splay_tree_value) ctx);
830 ctx->stmt = stmt;
832 if (outer_ctx)
834 ctx->outer = outer_ctx;
835 ctx->cb = outer_ctx->cb;
836 ctx->cb.block = NULL;
837 ctx->depth = outer_ctx->depth + 1;
839 else
841 ctx->cb.src_fn = current_function_decl;
842 ctx->cb.dst_fn = current_function_decl;
843 ctx->cb.src_node = cgraph_node::get (current_function_decl);
844 gcc_checking_assert (ctx->cb.src_node);
845 ctx->cb.dst_node = ctx->cb.src_node;
846 ctx->cb.src_cfun = cfun;
847 ctx->cb.copy_decl = omp_copy_decl;
848 ctx->cb.eh_lp_nr = 0;
849 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
850 ctx->depth = 1;
853 ctx->cb.decl_map = new hash_map<tree, tree>;
855 return ctx;
858 static gimple_seq maybe_catch_exception (gimple_seq);
860 /* Finalize task copyfn. */
862 static void
863 finalize_task_copyfn (gomp_task *task_stmt)
865 struct function *child_cfun;
866 tree child_fn;
867 gimple_seq seq = NULL, new_seq;
868 gbind *bind;
870 child_fn = gimple_omp_task_copy_fn (task_stmt);
871 if (child_fn == NULL_TREE)
872 return;
874 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
875 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
877 push_cfun (child_cfun);
878 bind = gimplify_body (child_fn, false);
879 gimple_seq_add_stmt (&seq, bind);
880 new_seq = maybe_catch_exception (seq);
881 if (new_seq != seq)
883 bind = gimple_build_bind (NULL, new_seq, NULL);
884 seq = NULL;
885 gimple_seq_add_stmt (&seq, bind);
887 gimple_set_body (child_fn, seq);
888 pop_cfun ();
890 /* Inform the callgraph about the new function. */
891 cgraph_node *node = cgraph_node::get_create (child_fn);
892 node->parallelized_function = 1;
893 cgraph_node::add_new_function (child_fn, false);
896 /* Destroy a omp_context data structures. Called through the splay tree
897 value delete callback. */
899 static void
900 delete_omp_context (splay_tree_value value)
902 omp_context *ctx = (omp_context *) value;
904 delete ctx->cb.decl_map;
906 if (ctx->field_map)
907 splay_tree_delete (ctx->field_map);
908 if (ctx->sfield_map)
909 splay_tree_delete (ctx->sfield_map);
911 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
912 it produces corrupt debug information. */
913 if (ctx->record_type)
915 tree t;
916 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
917 DECL_ABSTRACT_ORIGIN (t) = NULL;
919 if (ctx->srecord_type)
921 tree t;
922 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
923 DECL_ABSTRACT_ORIGIN (t) = NULL;
926 if (is_task_ctx (ctx))
927 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
929 XDELETE (ctx);
932 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
933 context. */
935 static void
936 fixup_child_record_type (omp_context *ctx)
938 tree f, type = ctx->record_type;
940 if (!ctx->receiver_decl)
941 return;
942 /* ??? It isn't sufficient to just call remap_type here, because
943 variably_modified_type_p doesn't work the way we expect for
944 record types. Testing each field for whether it needs remapping
945 and creating a new record by hand works, however. */
946 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
947 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
948 break;
949 if (f)
951 tree name, new_fields = NULL;
953 type = lang_hooks.types.make_type (RECORD_TYPE);
954 name = DECL_NAME (TYPE_NAME (ctx->record_type));
955 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
956 TYPE_DECL, name, type);
957 TYPE_NAME (type) = name;
959 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
961 tree new_f = copy_node (f);
962 DECL_CONTEXT (new_f) = type;
963 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
964 DECL_CHAIN (new_f) = new_fields;
965 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
966 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
967 &ctx->cb, NULL);
968 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
969 &ctx->cb, NULL);
970 new_fields = new_f;
972 /* Arrange to be able to look up the receiver field
973 given the sender field. */
974 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
975 (splay_tree_value) new_f);
977 TYPE_FIELDS (type) = nreverse (new_fields);
978 layout_type (type);
981 /* In a target region we never modify any of the pointers in *.omp_data_i,
982 so attempt to help the optimizers. */
983 if (is_gimple_omp_offloaded (ctx->stmt))
984 type = build_qualified_type (type, TYPE_QUAL_CONST);
986 TREE_TYPE (ctx->receiver_decl)
987 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
990 /* Instantiate decls as necessary in CTX to satisfy the data sharing
991 specified by CLAUSES. If BASE_POINTERS_RESTRICT, install var field with
992 restrict. */
994 static void
995 scan_sharing_clauses (tree clauses, omp_context *ctx,
996 bool base_pointers_restrict = false)
998 tree c, decl;
999 bool scan_array_reductions = false;
1001 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1003 bool by_ref;
1005 switch (OMP_CLAUSE_CODE (c))
1007 case OMP_CLAUSE_PRIVATE:
1008 decl = OMP_CLAUSE_DECL (c);
1009 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1010 goto do_private;
1011 else if (!is_variable_sized (decl))
1012 install_var_local (decl, ctx);
1013 break;
1015 case OMP_CLAUSE_SHARED:
1016 decl = OMP_CLAUSE_DECL (c);
1017 /* Ignore shared directives in teams construct. */
1018 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1020 /* Global variables don't need to be copied,
1021 the receiver side will use them directly. */
1022 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1023 if (is_global_var (odecl))
1024 break;
1025 insert_decl_map (&ctx->cb, decl, odecl);
1026 break;
1028 gcc_assert (is_taskreg_ctx (ctx));
1029 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1030 || !is_variable_sized (decl));
1031 /* Global variables don't need to be copied,
1032 the receiver side will use them directly. */
1033 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1034 break;
1035 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1037 use_pointer_for_field (decl, ctx);
1038 break;
1040 by_ref = use_pointer_for_field (decl, NULL);
1041 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1042 || TREE_ADDRESSABLE (decl)
1043 || by_ref
1044 || omp_is_reference (decl))
1046 by_ref = use_pointer_for_field (decl, ctx);
1047 install_var_field (decl, by_ref, 3, ctx);
1048 install_var_local (decl, ctx);
1049 break;
1051 /* We don't need to copy const scalar vars back. */
1052 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1053 goto do_private;
1055 case OMP_CLAUSE_REDUCTION:
1056 decl = OMP_CLAUSE_DECL (c);
1057 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1058 && TREE_CODE (decl) == MEM_REF)
1060 tree t = TREE_OPERAND (decl, 0);
1061 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1062 t = TREE_OPERAND (t, 0);
1063 if (TREE_CODE (t) == INDIRECT_REF
1064 || TREE_CODE (t) == ADDR_EXPR)
1065 t = TREE_OPERAND (t, 0);
1066 install_var_local (t, ctx);
1067 if (is_taskreg_ctx (ctx)
1068 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1069 && !is_variable_sized (t))
1071 by_ref = use_pointer_for_field (t, ctx);
1072 install_var_field (t, by_ref, 3, ctx);
1074 break;
1076 goto do_private;
1078 case OMP_CLAUSE_LASTPRIVATE:
1079 /* Let the corresponding firstprivate clause create
1080 the variable. */
1081 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1082 break;
1083 /* FALLTHRU */
1085 case OMP_CLAUSE_FIRSTPRIVATE:
1086 case OMP_CLAUSE_LINEAR:
1087 decl = OMP_CLAUSE_DECL (c);
1088 do_private:
1089 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1090 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1091 && is_gimple_omp_offloaded (ctx->stmt))
1093 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1094 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1095 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1096 install_var_field (decl, true, 3, ctx);
1097 else
1098 install_var_field (decl, false, 3, ctx);
1100 if (is_variable_sized (decl))
1102 if (is_task_ctx (ctx))
1103 install_var_field (decl, false, 1, ctx);
1104 break;
1106 else if (is_taskreg_ctx (ctx))
1108 bool global
1109 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1110 by_ref = use_pointer_for_field (decl, NULL);
1112 if (is_task_ctx (ctx)
1113 && (global || by_ref || omp_is_reference (decl)))
1115 install_var_field (decl, false, 1, ctx);
1116 if (!global)
1117 install_var_field (decl, by_ref, 2, ctx);
1119 else if (!global)
1120 install_var_field (decl, by_ref, 3, ctx);
1122 install_var_local (decl, ctx);
1123 break;
1125 case OMP_CLAUSE_USE_DEVICE_PTR:
1126 decl = OMP_CLAUSE_DECL (c);
1127 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1128 install_var_field (decl, true, 3, ctx);
1129 else
1130 install_var_field (decl, false, 3, ctx);
1131 if (DECL_SIZE (decl)
1132 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1134 tree decl2 = DECL_VALUE_EXPR (decl);
1135 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1136 decl2 = TREE_OPERAND (decl2, 0);
1137 gcc_assert (DECL_P (decl2));
1138 install_var_local (decl2, ctx);
1140 install_var_local (decl, ctx);
1141 break;
1143 case OMP_CLAUSE_IS_DEVICE_PTR:
1144 decl = OMP_CLAUSE_DECL (c);
1145 goto do_private;
1147 case OMP_CLAUSE__LOOPTEMP_:
1148 gcc_assert (is_taskreg_ctx (ctx));
1149 decl = OMP_CLAUSE_DECL (c);
1150 install_var_field (decl, false, 3, ctx);
1151 install_var_local (decl, ctx);
1152 break;
1154 case OMP_CLAUSE_COPYPRIVATE:
1155 case OMP_CLAUSE_COPYIN:
1156 decl = OMP_CLAUSE_DECL (c);
1157 by_ref = use_pointer_for_field (decl, NULL);
1158 install_var_field (decl, by_ref, 3, ctx);
1159 break;
1161 case OMP_CLAUSE_FINAL:
1162 case OMP_CLAUSE_IF:
1163 case OMP_CLAUSE_NUM_THREADS:
1164 case OMP_CLAUSE_NUM_TEAMS:
1165 case OMP_CLAUSE_THREAD_LIMIT:
1166 case OMP_CLAUSE_DEVICE:
1167 case OMP_CLAUSE_SCHEDULE:
1168 case OMP_CLAUSE_DIST_SCHEDULE:
1169 case OMP_CLAUSE_DEPEND:
1170 case OMP_CLAUSE_PRIORITY:
1171 case OMP_CLAUSE_GRAINSIZE:
1172 case OMP_CLAUSE_NUM_TASKS:
1173 case OMP_CLAUSE__CILK_FOR_COUNT_:
1174 case OMP_CLAUSE_NUM_GANGS:
1175 case OMP_CLAUSE_NUM_WORKERS:
1176 case OMP_CLAUSE_VECTOR_LENGTH:
1177 if (ctx->outer)
1178 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1179 break;
1181 case OMP_CLAUSE_TO:
1182 case OMP_CLAUSE_FROM:
1183 case OMP_CLAUSE_MAP:
1184 if (ctx->outer)
1185 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1186 decl = OMP_CLAUSE_DECL (c);
1187 /* Global variables with "omp declare target" attribute
1188 don't need to be copied, the receiver side will use them
1189 directly. However, global variables with "omp declare target link"
1190 attribute need to be copied. */
1191 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1192 && DECL_P (decl)
1193 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1194 && (OMP_CLAUSE_MAP_KIND (c)
1195 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1196 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1197 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1198 && varpool_node::get_create (decl)->offloadable
1199 && !lookup_attribute ("omp declare target link",
1200 DECL_ATTRIBUTES (decl)))
1201 break;
1202 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1203 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1205 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1206 not offloaded; there is nothing to map for those. */
1207 if (!is_gimple_omp_offloaded (ctx->stmt)
1208 && !POINTER_TYPE_P (TREE_TYPE (decl))
1209 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1210 break;
1212 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1213 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1214 || (OMP_CLAUSE_MAP_KIND (c)
1215 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1217 if (TREE_CODE (decl) == COMPONENT_REF
1218 || (TREE_CODE (decl) == INDIRECT_REF
1219 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1220 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1221 == REFERENCE_TYPE)))
1222 break;
1223 if (DECL_SIZE (decl)
1224 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1226 tree decl2 = DECL_VALUE_EXPR (decl);
1227 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1228 decl2 = TREE_OPERAND (decl2, 0);
1229 gcc_assert (DECL_P (decl2));
1230 install_var_local (decl2, ctx);
1232 install_var_local (decl, ctx);
1233 break;
1235 if (DECL_P (decl))
1237 if (DECL_SIZE (decl)
1238 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1240 tree decl2 = DECL_VALUE_EXPR (decl);
1241 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1242 decl2 = TREE_OPERAND (decl2, 0);
1243 gcc_assert (DECL_P (decl2));
1244 install_var_field (decl2, true, 3, ctx);
1245 install_var_local (decl2, ctx);
1246 install_var_local (decl, ctx);
1248 else
1250 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1251 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1252 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1253 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1254 install_var_field (decl, true, 7, ctx);
1255 else
1256 install_var_field (decl, true, 3, ctx,
1257 base_pointers_restrict);
1258 if (is_gimple_omp_offloaded (ctx->stmt)
1259 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1260 install_var_local (decl, ctx);
1263 else
1265 tree base = get_base_address (decl);
1266 tree nc = OMP_CLAUSE_CHAIN (c);
1267 if (DECL_P (base)
1268 && nc != NULL_TREE
1269 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1270 && OMP_CLAUSE_DECL (nc) == base
1271 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1272 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1274 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1275 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1277 else
1279 if (ctx->outer)
1281 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1282 decl = OMP_CLAUSE_DECL (c);
1284 gcc_assert (!splay_tree_lookup (ctx->field_map,
1285 (splay_tree_key) decl));
1286 tree field
1287 = build_decl (OMP_CLAUSE_LOCATION (c),
1288 FIELD_DECL, NULL_TREE, ptr_type_node);
1289 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1290 insert_field_into_struct (ctx->record_type, field);
1291 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1292 (splay_tree_value) field);
1295 break;
1297 case OMP_CLAUSE__GRIDDIM_:
1298 if (ctx->outer)
1300 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1301 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1303 break;
1305 case OMP_CLAUSE_NOWAIT:
1306 case OMP_CLAUSE_ORDERED:
1307 case OMP_CLAUSE_COLLAPSE:
1308 case OMP_CLAUSE_UNTIED:
1309 case OMP_CLAUSE_MERGEABLE:
1310 case OMP_CLAUSE_PROC_BIND:
1311 case OMP_CLAUSE_SAFELEN:
1312 case OMP_CLAUSE_SIMDLEN:
1313 case OMP_CLAUSE_THREADS:
1314 case OMP_CLAUSE_SIMD:
1315 case OMP_CLAUSE_NOGROUP:
1316 case OMP_CLAUSE_DEFAULTMAP:
1317 case OMP_CLAUSE_ASYNC:
1318 case OMP_CLAUSE_WAIT:
1319 case OMP_CLAUSE_GANG:
1320 case OMP_CLAUSE_WORKER:
1321 case OMP_CLAUSE_VECTOR:
1322 case OMP_CLAUSE_INDEPENDENT:
1323 case OMP_CLAUSE_AUTO:
1324 case OMP_CLAUSE_SEQ:
1325 case OMP_CLAUSE_TILE:
1326 case OMP_CLAUSE__SIMT_:
1327 case OMP_CLAUSE_DEFAULT:
1328 break;
1330 case OMP_CLAUSE_ALIGNED:
1331 decl = OMP_CLAUSE_DECL (c);
1332 if (is_global_var (decl)
1333 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1334 install_var_local (decl, ctx);
1335 break;
1337 case OMP_CLAUSE__CACHE_:
1338 default:
1339 gcc_unreachable ();
1343 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1345 switch (OMP_CLAUSE_CODE (c))
1347 case OMP_CLAUSE_LASTPRIVATE:
1348 /* Let the corresponding firstprivate clause create
1349 the variable. */
1350 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1351 scan_array_reductions = true;
1352 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1353 break;
1354 /* FALLTHRU */
1356 case OMP_CLAUSE_FIRSTPRIVATE:
1357 case OMP_CLAUSE_PRIVATE:
1358 case OMP_CLAUSE_LINEAR:
1359 case OMP_CLAUSE_IS_DEVICE_PTR:
1360 decl = OMP_CLAUSE_DECL (c);
1361 if (is_variable_sized (decl))
1363 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1364 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1365 && is_gimple_omp_offloaded (ctx->stmt))
1367 tree decl2 = DECL_VALUE_EXPR (decl);
1368 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1369 decl2 = TREE_OPERAND (decl2, 0);
1370 gcc_assert (DECL_P (decl2));
1371 install_var_local (decl2, ctx);
1372 fixup_remapped_decl (decl2, ctx, false);
1374 install_var_local (decl, ctx);
1376 fixup_remapped_decl (decl, ctx,
1377 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1378 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1379 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1380 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1381 scan_array_reductions = true;
1382 break;
1384 case OMP_CLAUSE_REDUCTION:
1385 decl = OMP_CLAUSE_DECL (c);
1386 if (TREE_CODE (decl) != MEM_REF)
1388 if (is_variable_sized (decl))
1389 install_var_local (decl, ctx);
1390 fixup_remapped_decl (decl, ctx, false);
1392 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1393 scan_array_reductions = true;
1394 break;
1396 case OMP_CLAUSE_SHARED:
1397 /* Ignore shared directives in teams construct. */
1398 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1399 break;
1400 decl = OMP_CLAUSE_DECL (c);
1401 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1402 break;
1403 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1405 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1406 ctx->outer)))
1407 break;
1408 bool by_ref = use_pointer_for_field (decl, ctx);
1409 install_var_field (decl, by_ref, 11, ctx);
1410 break;
1412 fixup_remapped_decl (decl, ctx, false);
1413 break;
1415 case OMP_CLAUSE_MAP:
1416 if (!is_gimple_omp_offloaded (ctx->stmt))
1417 break;
1418 decl = OMP_CLAUSE_DECL (c);
1419 if (DECL_P (decl)
1420 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1421 && (OMP_CLAUSE_MAP_KIND (c)
1422 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1423 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1424 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1425 && varpool_node::get_create (decl)->offloadable)
1426 break;
1427 if (DECL_P (decl))
1429 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1430 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1431 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1432 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1434 tree new_decl = lookup_decl (decl, ctx);
1435 TREE_TYPE (new_decl)
1436 = remap_type (TREE_TYPE (decl), &ctx->cb);
1438 else if (DECL_SIZE (decl)
1439 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1441 tree decl2 = DECL_VALUE_EXPR (decl);
1442 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1443 decl2 = TREE_OPERAND (decl2, 0);
1444 gcc_assert (DECL_P (decl2));
1445 fixup_remapped_decl (decl2, ctx, false);
1446 fixup_remapped_decl (decl, ctx, true);
1448 else
1449 fixup_remapped_decl (decl, ctx, false);
1451 break;
1453 case OMP_CLAUSE_COPYPRIVATE:
1454 case OMP_CLAUSE_COPYIN:
1455 case OMP_CLAUSE_DEFAULT:
1456 case OMP_CLAUSE_IF:
1457 case OMP_CLAUSE_NUM_THREADS:
1458 case OMP_CLAUSE_NUM_TEAMS:
1459 case OMP_CLAUSE_THREAD_LIMIT:
1460 case OMP_CLAUSE_DEVICE:
1461 case OMP_CLAUSE_SCHEDULE:
1462 case OMP_CLAUSE_DIST_SCHEDULE:
1463 case OMP_CLAUSE_NOWAIT:
1464 case OMP_CLAUSE_ORDERED:
1465 case OMP_CLAUSE_COLLAPSE:
1466 case OMP_CLAUSE_UNTIED:
1467 case OMP_CLAUSE_FINAL:
1468 case OMP_CLAUSE_MERGEABLE:
1469 case OMP_CLAUSE_PROC_BIND:
1470 case OMP_CLAUSE_SAFELEN:
1471 case OMP_CLAUSE_SIMDLEN:
1472 case OMP_CLAUSE_ALIGNED:
1473 case OMP_CLAUSE_DEPEND:
1474 case OMP_CLAUSE__LOOPTEMP_:
1475 case OMP_CLAUSE_TO:
1476 case OMP_CLAUSE_FROM:
1477 case OMP_CLAUSE_PRIORITY:
1478 case OMP_CLAUSE_GRAINSIZE:
1479 case OMP_CLAUSE_NUM_TASKS:
1480 case OMP_CLAUSE_THREADS:
1481 case OMP_CLAUSE_SIMD:
1482 case OMP_CLAUSE_NOGROUP:
1483 case OMP_CLAUSE_DEFAULTMAP:
1484 case OMP_CLAUSE_USE_DEVICE_PTR:
1485 case OMP_CLAUSE__CILK_FOR_COUNT_:
1486 case OMP_CLAUSE_ASYNC:
1487 case OMP_CLAUSE_WAIT:
1488 case OMP_CLAUSE_NUM_GANGS:
1489 case OMP_CLAUSE_NUM_WORKERS:
1490 case OMP_CLAUSE_VECTOR_LENGTH:
1491 case OMP_CLAUSE_GANG:
1492 case OMP_CLAUSE_WORKER:
1493 case OMP_CLAUSE_VECTOR:
1494 case OMP_CLAUSE_INDEPENDENT:
1495 case OMP_CLAUSE_AUTO:
1496 case OMP_CLAUSE_SEQ:
1497 case OMP_CLAUSE_TILE:
1498 case OMP_CLAUSE__GRIDDIM_:
1499 case OMP_CLAUSE__SIMT_:
1500 break;
1502 case OMP_CLAUSE__CACHE_:
1503 default:
1504 gcc_unreachable ();
1508 gcc_checking_assert (!scan_array_reductions
1509 || !is_gimple_omp_oacc (ctx->stmt));
1510 if (scan_array_reductions)
1512 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1513 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1514 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1516 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1517 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1519 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1520 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1521 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1522 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1523 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1524 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1528 /* Create a new name for omp child function. Returns an identifier. If
1529 IS_CILK_FOR is true then the suffix for the child function is
1530 "_cilk_for_fn." */
1532 static tree
1533 create_omp_child_function_name (bool task_copy, bool is_cilk_for)
1535 if (is_cilk_for)
1536 return clone_function_name (current_function_decl, "_cilk_for_fn");
1537 return clone_function_name (current_function_decl,
1538 task_copy ? "_omp_cpyfn" : "_omp_fn");
1541 /* Returns the type of the induction variable for the child function for
1542 _Cilk_for and the types for _high and _low variables based on TYPE. */
1544 static tree
1545 cilk_for_check_loop_diff_type (tree type)
1547 if (TYPE_PRECISION (type) <= TYPE_PRECISION (uint32_type_node))
1549 if (TYPE_UNSIGNED (type))
1550 return uint32_type_node;
1551 else
1552 return integer_type_node;
1554 else
1556 if (TYPE_UNSIGNED (type))
1557 return uint64_type_node;
1558 else
1559 return long_long_integer_type_node;
1563 /* Return true if CTX may belong to offloaded code: either if current function
1564 is offloaded, or any enclosing context corresponds to a target region. */
1566 static bool
1567 omp_maybe_offloaded_ctx (omp_context *ctx)
1569 if (cgraph_node::get (current_function_decl)->offloadable)
1570 return true;
1571 for (; ctx; ctx = ctx->outer)
1572 if (is_gimple_omp_offloaded (ctx->stmt))
1573 return true;
1574 return false;
1577 /* Build a decl for the omp child function. It'll not contain a body
1578 yet, just the bare decl. */
1580 static void
1581 create_omp_child_function (omp_context *ctx, bool task_copy)
1583 tree decl, type, name, t;
1585 tree cilk_for_count
1586 = (flag_cilkplus && gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1587 ? omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
1588 OMP_CLAUSE__CILK_FOR_COUNT_) : NULL_TREE;
1589 tree cilk_var_type = NULL_TREE;
1591 name = create_omp_child_function_name (task_copy,
1592 cilk_for_count != NULL_TREE);
1593 if (task_copy)
1594 type = build_function_type_list (void_type_node, ptr_type_node,
1595 ptr_type_node, NULL_TREE);
1596 else if (cilk_for_count)
1598 type = TREE_TYPE (OMP_CLAUSE_OPERAND (cilk_for_count, 0));
1599 cilk_var_type = cilk_for_check_loop_diff_type (type);
1600 type = build_function_type_list (void_type_node, ptr_type_node,
1601 cilk_var_type, cilk_var_type, NULL_TREE);
1603 else
1604 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1606 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1608 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1609 || !task_copy);
1610 if (!task_copy)
1611 ctx->cb.dst_fn = decl;
1612 else
1613 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1615 TREE_STATIC (decl) = 1;
1616 TREE_USED (decl) = 1;
1617 DECL_ARTIFICIAL (decl) = 1;
1618 DECL_IGNORED_P (decl) = 0;
1619 TREE_PUBLIC (decl) = 0;
1620 DECL_UNINLINABLE (decl) = 1;
1621 DECL_EXTERNAL (decl) = 0;
1622 DECL_CONTEXT (decl) = NULL_TREE;
1623 DECL_INITIAL (decl) = make_node (BLOCK);
1624 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1625 if (omp_maybe_offloaded_ctx (ctx))
1627 cgraph_node::get_create (decl)->offloadable = 1;
1628 if (ENABLE_OFFLOADING)
1629 g->have_offload = true;
1632 if (cgraph_node::get_create (decl)->offloadable
1633 && !lookup_attribute ("omp declare target",
1634 DECL_ATTRIBUTES (current_function_decl)))
1636 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1637 ? "omp target entrypoint"
1638 : "omp declare target");
1639 DECL_ATTRIBUTES (decl)
1640 = tree_cons (get_identifier (target_attr),
1641 NULL_TREE, DECL_ATTRIBUTES (decl));
1644 t = build_decl (DECL_SOURCE_LOCATION (decl),
1645 RESULT_DECL, NULL_TREE, void_type_node);
1646 DECL_ARTIFICIAL (t) = 1;
1647 DECL_IGNORED_P (t) = 1;
1648 DECL_CONTEXT (t) = decl;
1649 DECL_RESULT (decl) = t;
1651 /* _Cilk_for's child function requires two extra parameters called
1652 __low and __high that are set the by Cilk runtime when it calls this
1653 function. */
1654 if (cilk_for_count)
1656 t = build_decl (DECL_SOURCE_LOCATION (decl),
1657 PARM_DECL, get_identifier ("__high"), cilk_var_type);
1658 DECL_ARTIFICIAL (t) = 1;
1659 DECL_NAMELESS (t) = 1;
1660 DECL_ARG_TYPE (t) = ptr_type_node;
1661 DECL_CONTEXT (t) = current_function_decl;
1662 TREE_USED (t) = 1;
1663 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1664 DECL_ARGUMENTS (decl) = t;
1666 t = build_decl (DECL_SOURCE_LOCATION (decl),
1667 PARM_DECL, get_identifier ("__low"), cilk_var_type);
1668 DECL_ARTIFICIAL (t) = 1;
1669 DECL_NAMELESS (t) = 1;
1670 DECL_ARG_TYPE (t) = ptr_type_node;
1671 DECL_CONTEXT (t) = current_function_decl;
1672 TREE_USED (t) = 1;
1673 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1674 DECL_ARGUMENTS (decl) = t;
1677 tree data_name = get_identifier (".omp_data_i");
1678 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1679 ptr_type_node);
1680 DECL_ARTIFICIAL (t) = 1;
1681 DECL_NAMELESS (t) = 1;
1682 DECL_ARG_TYPE (t) = ptr_type_node;
1683 DECL_CONTEXT (t) = current_function_decl;
1684 TREE_USED (t) = 1;
1685 TREE_READONLY (t) = 1;
1686 if (cilk_for_count)
1687 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1688 DECL_ARGUMENTS (decl) = t;
1689 if (!task_copy)
1690 ctx->receiver_decl = t;
1691 else
1693 t = build_decl (DECL_SOURCE_LOCATION (decl),
1694 PARM_DECL, get_identifier (".omp_data_o"),
1695 ptr_type_node);
1696 DECL_ARTIFICIAL (t) = 1;
1697 DECL_NAMELESS (t) = 1;
1698 DECL_ARG_TYPE (t) = ptr_type_node;
1699 DECL_CONTEXT (t) = current_function_decl;
1700 TREE_USED (t) = 1;
1701 TREE_ADDRESSABLE (t) = 1;
1702 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1703 DECL_ARGUMENTS (decl) = t;
1706 /* Allocate memory for the function structure. The call to
1707 allocate_struct_function clobbers CFUN, so we need to restore
1708 it afterward. */
1709 push_struct_function (decl);
1710 cfun->function_end_locus = gimple_location (ctx->stmt);
1711 init_tree_ssa (cfun);
1712 pop_cfun ();
1715 /* Callback for walk_gimple_seq. Check if combined parallel
1716 contains gimple_omp_for_combined_into_p OMP_FOR. */
1718 tree
1719 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1720 bool *handled_ops_p,
1721 struct walk_stmt_info *wi)
1723 gimple *stmt = gsi_stmt (*gsi_p);
1725 *handled_ops_p = true;
1726 switch (gimple_code (stmt))
1728 WALK_SUBSTMTS;
1730 case GIMPLE_OMP_FOR:
1731 if (gimple_omp_for_combined_into_p (stmt)
1732 && gimple_omp_for_kind (stmt)
1733 == *(const enum gf_mask *) (wi->info))
1735 wi->info = stmt;
1736 return integer_zero_node;
1738 break;
1739 default:
1740 break;
1742 return NULL;
1745 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1747 static void
1748 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1749 omp_context *outer_ctx)
1751 struct walk_stmt_info wi;
1753 memset (&wi, 0, sizeof (wi));
1754 wi.val_only = true;
1755 wi.info = (void *) &msk;
1756 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1757 if (wi.info != (void *) &msk)
1759 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1760 struct omp_for_data fd;
1761 omp_extract_for_data (for_stmt, &fd, NULL);
1762 /* We need two temporaries with fd.loop.v type (istart/iend)
1763 and then (fd.collapse - 1) temporaries with the same
1764 type for count2 ... countN-1 vars if not constant. */
1765 size_t count = 2, i;
1766 tree type = fd.iter_type;
1767 if (fd.collapse > 1
1768 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1770 count += fd.collapse - 1;
1771 /* If there are lastprivate clauses on the inner
1772 GIMPLE_OMP_FOR, add one more temporaries for the total number
1773 of iterations (product of count1 ... countN-1). */
1774 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1775 OMP_CLAUSE_LASTPRIVATE))
1776 count++;
1777 else if (msk == GF_OMP_FOR_KIND_FOR
1778 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1779 OMP_CLAUSE_LASTPRIVATE))
1780 count++;
1782 for (i = 0; i < count; i++)
1784 tree temp = create_tmp_var (type);
1785 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1786 insert_decl_map (&outer_ctx->cb, temp, temp);
1787 OMP_CLAUSE_DECL (c) = temp;
1788 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1789 gimple_omp_taskreg_set_clauses (stmt, c);
1794 /* Scan an OpenMP parallel directive. */
1796 static void
1797 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1799 omp_context *ctx;
1800 tree name;
1801 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1803 /* Ignore parallel directives with empty bodies, unless there
1804 are copyin clauses. */
1805 if (optimize > 0
1806 && empty_body_p (gimple_omp_body (stmt))
1807 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1808 OMP_CLAUSE_COPYIN) == NULL)
1810 gsi_replace (gsi, gimple_build_nop (), false);
1811 return;
1814 if (gimple_omp_parallel_combined_p (stmt))
1815 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1817 ctx = new_omp_context (stmt, outer_ctx);
1818 taskreg_contexts.safe_push (ctx);
1819 if (taskreg_nesting_level > 1)
1820 ctx->is_nested = true;
1821 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1822 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1823 name = create_tmp_var_name (".omp_data_s");
1824 name = build_decl (gimple_location (stmt),
1825 TYPE_DECL, name, ctx->record_type);
1826 DECL_ARTIFICIAL (name) = 1;
1827 DECL_NAMELESS (name) = 1;
1828 TYPE_NAME (ctx->record_type) = name;
1829 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1830 if (!gimple_omp_parallel_grid_phony (stmt))
1832 create_omp_child_function (ctx, false);
1833 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1836 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1837 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1839 if (TYPE_FIELDS (ctx->record_type) == NULL)
1840 ctx->record_type = ctx->receiver_decl = NULL;
1843 /* Scan an OpenMP task directive. */
1845 static void
1846 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1848 omp_context *ctx;
1849 tree name, t;
1850 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1852 /* Ignore task directives with empty bodies, unless they have depend
1853 clause. */
1854 if (optimize > 0
1855 && empty_body_p (gimple_omp_body (stmt))
1856 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1858 gsi_replace (gsi, gimple_build_nop (), false);
1859 return;
1862 if (gimple_omp_task_taskloop_p (stmt))
1863 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1865 ctx = new_omp_context (stmt, outer_ctx);
1866 taskreg_contexts.safe_push (ctx);
1867 if (taskreg_nesting_level > 1)
1868 ctx->is_nested = true;
1869 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1870 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1871 name = create_tmp_var_name (".omp_data_s");
1872 name = build_decl (gimple_location (stmt),
1873 TYPE_DECL, name, ctx->record_type);
1874 DECL_ARTIFICIAL (name) = 1;
1875 DECL_NAMELESS (name) = 1;
1876 TYPE_NAME (ctx->record_type) = name;
1877 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1878 create_omp_child_function (ctx, false);
1879 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1881 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1883 if (ctx->srecord_type)
1885 name = create_tmp_var_name (".omp_data_a");
1886 name = build_decl (gimple_location (stmt),
1887 TYPE_DECL, name, ctx->srecord_type);
1888 DECL_ARTIFICIAL (name) = 1;
1889 DECL_NAMELESS (name) = 1;
1890 TYPE_NAME (ctx->srecord_type) = name;
1891 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1892 create_omp_child_function (ctx, true);
1895 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1897 if (TYPE_FIELDS (ctx->record_type) == NULL)
1899 ctx->record_type = ctx->receiver_decl = NULL;
1900 t = build_int_cst (long_integer_type_node, 0);
1901 gimple_omp_task_set_arg_size (stmt, t);
1902 t = build_int_cst (long_integer_type_node, 1);
1903 gimple_omp_task_set_arg_align (stmt, t);
1907 /* Helper function for finish_taskreg_scan, called through walk_tree.
1908 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1909 tree, replace it in the expression. */
1911 static tree
1912 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1914 if (VAR_P (*tp))
1916 omp_context *ctx = (omp_context *) data;
1917 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1918 if (t != *tp)
1920 if (DECL_HAS_VALUE_EXPR_P (t))
1921 t = unshare_expr (DECL_VALUE_EXPR (t));
1922 *tp = t;
1924 *walk_subtrees = 0;
1926 else if (IS_TYPE_OR_DECL_P (*tp))
1927 *walk_subtrees = 0;
1928 return NULL_TREE;
1931 /* If any decls have been made addressable during scan_omp,
1932 adjust their fields if needed, and layout record types
1933 of parallel/task constructs. */
1935 static void
1936 finish_taskreg_scan (omp_context *ctx)
1938 if (ctx->record_type == NULL_TREE)
1939 return;
1941 /* If any task_shared_vars were needed, verify all
1942 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1943 statements if use_pointer_for_field hasn't changed
1944 because of that. If it did, update field types now. */
1945 if (task_shared_vars)
1947 tree c;
1949 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1950 c; c = OMP_CLAUSE_CHAIN (c))
1951 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1952 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1954 tree decl = OMP_CLAUSE_DECL (c);
1956 /* Global variables don't need to be copied,
1957 the receiver side will use them directly. */
1958 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1959 continue;
1960 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1961 || !use_pointer_for_field (decl, ctx))
1962 continue;
1963 tree field = lookup_field (decl, ctx);
1964 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1965 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1966 continue;
1967 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1968 TREE_THIS_VOLATILE (field) = 0;
1969 DECL_USER_ALIGN (field) = 0;
1970 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1971 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1972 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1973 if (ctx->srecord_type)
1975 tree sfield = lookup_sfield (decl, ctx);
1976 TREE_TYPE (sfield) = TREE_TYPE (field);
1977 TREE_THIS_VOLATILE (sfield) = 0;
1978 DECL_USER_ALIGN (sfield) = 0;
1979 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1980 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1981 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1986 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1988 layout_type (ctx->record_type);
1989 fixup_child_record_type (ctx);
1991 else
1993 location_t loc = gimple_location (ctx->stmt);
1994 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1995 /* Move VLA fields to the end. */
1996 p = &TYPE_FIELDS (ctx->record_type);
1997 while (*p)
1998 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1999 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2001 *q = *p;
2002 *p = TREE_CHAIN (*p);
2003 TREE_CHAIN (*q) = NULL_TREE;
2004 q = &TREE_CHAIN (*q);
2006 else
2007 p = &DECL_CHAIN (*p);
2008 *p = vla_fields;
2009 if (gimple_omp_task_taskloop_p (ctx->stmt))
2011 /* Move fields corresponding to first and second _looptemp_
2012 clause first. There are filled by GOMP_taskloop
2013 and thus need to be in specific positions. */
2014 tree c1 = gimple_omp_task_clauses (ctx->stmt);
2015 c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
2016 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2017 OMP_CLAUSE__LOOPTEMP_);
2018 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2019 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2020 p = &TYPE_FIELDS (ctx->record_type);
2021 while (*p)
2022 if (*p == f1 || *p == f2)
2023 *p = DECL_CHAIN (*p);
2024 else
2025 p = &DECL_CHAIN (*p);
2026 DECL_CHAIN (f1) = f2;
2027 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2028 TYPE_FIELDS (ctx->record_type) = f1;
2029 if (ctx->srecord_type)
2031 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2032 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2033 p = &TYPE_FIELDS (ctx->srecord_type);
2034 while (*p)
2035 if (*p == f1 || *p == f2)
2036 *p = DECL_CHAIN (*p);
2037 else
2038 p = &DECL_CHAIN (*p);
2039 DECL_CHAIN (f1) = f2;
2040 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2041 TYPE_FIELDS (ctx->srecord_type) = f1;
2044 layout_type (ctx->record_type);
2045 fixup_child_record_type (ctx);
2046 if (ctx->srecord_type)
2047 layout_type (ctx->srecord_type);
2048 tree t = fold_convert_loc (loc, long_integer_type_node,
2049 TYPE_SIZE_UNIT (ctx->record_type));
2050 if (TREE_CODE (t) != INTEGER_CST)
2052 t = unshare_expr (t);
2053 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2055 gimple_omp_task_set_arg_size (ctx->stmt, t);
2056 t = build_int_cst (long_integer_type_node,
2057 TYPE_ALIGN_UNIT (ctx->record_type));
2058 gimple_omp_task_set_arg_align (ctx->stmt, t);
2062 /* Find the enclosing offload context. */
2064 static omp_context *
2065 enclosing_target_ctx (omp_context *ctx)
2067 for (; ctx; ctx = ctx->outer)
2068 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2069 break;
2071 return ctx;
2074 /* Return true if ctx is part of an oacc kernels region. */
2076 static bool
2077 ctx_in_oacc_kernels_region (omp_context *ctx)
2079 for (;ctx != NULL; ctx = ctx->outer)
2081 gimple *stmt = ctx->stmt;
2082 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2083 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2084 return true;
2087 return false;
2090 /* Check the parallelism clauses inside a kernels regions.
2091 Until kernels handling moves to use the same loop indirection
2092 scheme as parallel, we need to do this checking early. */
2094 static unsigned
2095 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2097 bool checking = true;
2098 unsigned outer_mask = 0;
2099 unsigned this_mask = 0;
2100 bool has_seq = false, has_auto = false;
2102 if (ctx->outer)
2103 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2104 if (!stmt)
2106 checking = false;
2107 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2108 return outer_mask;
2109 stmt = as_a <gomp_for *> (ctx->stmt);
2112 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2114 switch (OMP_CLAUSE_CODE (c))
2116 case OMP_CLAUSE_GANG:
2117 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2118 break;
2119 case OMP_CLAUSE_WORKER:
2120 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2121 break;
2122 case OMP_CLAUSE_VECTOR:
2123 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2124 break;
2125 case OMP_CLAUSE_SEQ:
2126 has_seq = true;
2127 break;
2128 case OMP_CLAUSE_AUTO:
2129 has_auto = true;
2130 break;
2131 default:
2132 break;
2136 if (checking)
2138 if (has_seq && (this_mask || has_auto))
2139 error_at (gimple_location (stmt), "%<seq%> overrides other"
2140 " OpenACC loop specifiers");
2141 else if (has_auto && this_mask)
2142 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2143 " OpenACC loop specifiers");
2145 if (this_mask & outer_mask)
2146 error_at (gimple_location (stmt), "inner loop uses same"
2147 " OpenACC parallelism as containing loop");
2150 return outer_mask | this_mask;
2153 /* Scan a GIMPLE_OMP_FOR. */
2155 static omp_context *
2156 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2158 omp_context *ctx;
2159 size_t i;
2160 tree clauses = gimple_omp_for_clauses (stmt);
2162 ctx = new_omp_context (stmt, outer_ctx);
2164 if (is_gimple_omp_oacc (stmt))
2166 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2168 if (!tgt || is_oacc_parallel (tgt))
2169 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2171 char const *check = NULL;
2173 switch (OMP_CLAUSE_CODE (c))
2175 case OMP_CLAUSE_GANG:
2176 check = "gang";
2177 break;
2179 case OMP_CLAUSE_WORKER:
2180 check = "worker";
2181 break;
2183 case OMP_CLAUSE_VECTOR:
2184 check = "vector";
2185 break;
2187 default:
2188 break;
2191 if (check && OMP_CLAUSE_OPERAND (c, 0))
2192 error_at (gimple_location (stmt),
2193 "argument not permitted on %qs clause in"
2194 " OpenACC %<parallel%>", check);
2197 if (tgt && is_oacc_kernels (tgt))
2199 /* Strip out reductions, as they are not handled yet. */
2200 tree *prev_ptr = &clauses;
2202 while (tree probe = *prev_ptr)
2204 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2206 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2207 *prev_ptr = *next_ptr;
2208 else
2209 prev_ptr = next_ptr;
2212 gimple_omp_for_set_clauses (stmt, clauses);
2213 check_oacc_kernel_gwv (stmt, ctx);
2217 scan_sharing_clauses (clauses, ctx);
2219 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2220 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2222 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2223 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2224 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2225 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2227 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2228 return ctx;
2231 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2233 static void
2234 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2235 omp_context *outer_ctx)
2237 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2238 gsi_replace (gsi, bind, false);
2239 gimple_seq seq = NULL;
2240 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2241 tree cond = create_tmp_var_raw (integer_type_node);
2242 DECL_CONTEXT (cond) = current_function_decl;
2243 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2244 gimple_bind_set_vars (bind, cond);
2245 gimple_call_set_lhs (g, cond);
2246 gimple_seq_add_stmt (&seq, g);
2247 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2248 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2249 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2250 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2251 gimple_seq_add_stmt (&seq, g);
2252 g = gimple_build_label (lab1);
2253 gimple_seq_add_stmt (&seq, g);
2254 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2255 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2256 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2257 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2258 gimple_omp_for_set_clauses (new_stmt, clause);
2259 gimple_seq_add_stmt (&seq, new_stmt);
2260 g = gimple_build_goto (lab3);
2261 gimple_seq_add_stmt (&seq, g);
2262 g = gimple_build_label (lab2);
2263 gimple_seq_add_stmt (&seq, g);
2264 gimple_seq_add_stmt (&seq, stmt);
2265 g = gimple_build_label (lab3);
2266 gimple_seq_add_stmt (&seq, g);
2267 gimple_bind_set_body (bind, seq);
2268 update_stmt (bind);
2269 scan_omp_for (new_stmt, outer_ctx);
2270 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2273 /* Scan an OpenMP sections directive. */
2275 static void
2276 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2278 omp_context *ctx;
2280 ctx = new_omp_context (stmt, outer_ctx);
2281 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2282 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2285 /* Scan an OpenMP single directive. */
2287 static void
2288 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2290 omp_context *ctx;
2291 tree name;
2293 ctx = new_omp_context (stmt, outer_ctx);
2294 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2295 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2296 name = create_tmp_var_name (".omp_copy_s");
2297 name = build_decl (gimple_location (stmt),
2298 TYPE_DECL, name, ctx->record_type);
2299 TYPE_NAME (ctx->record_type) = name;
2301 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2302 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2304 if (TYPE_FIELDS (ctx->record_type) == NULL)
2305 ctx->record_type = NULL;
2306 else
2307 layout_type (ctx->record_type);
2310 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2311 used in the corresponding offloaded function are restrict. */
2313 static bool
2314 omp_target_base_pointers_restrict_p (tree clauses)
2316 /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2317 used by OpenACC. */
2318 if (flag_openacc == 0)
2319 return false;
2321 /* I. Basic example:
2323 void foo (void)
2325 unsigned int a[2], b[2];
2327 #pragma acc kernels \
2328 copyout (a) \
2329 copyout (b)
2331 a[0] = 0;
2332 b[0] = 1;
2336 After gimplification, we have:
2338 #pragma omp target oacc_kernels \
2339 map(force_from:a [len: 8]) \
2340 map(force_from:b [len: 8])
2342 a[0] = 0;
2343 b[0] = 1;
2346 Because both mappings have the force prefix, we know that they will be
2347 allocated when calling the corresponding offloaded function, which means we
2348 can mark the base pointers for a and b in the offloaded function as
2349 restrict. */
2351 tree c;
2352 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2354 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
2355 return false;
2357 switch (OMP_CLAUSE_MAP_KIND (c))
2359 case GOMP_MAP_FORCE_ALLOC:
2360 case GOMP_MAP_FORCE_TO:
2361 case GOMP_MAP_FORCE_FROM:
2362 case GOMP_MAP_FORCE_TOFROM:
2363 break;
2364 default:
2365 return false;
2369 return true;
2372 /* Scan a GIMPLE_OMP_TARGET. */
2374 static void
2375 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2377 omp_context *ctx;
2378 tree name;
2379 bool offloaded = is_gimple_omp_offloaded (stmt);
2380 tree clauses = gimple_omp_target_clauses (stmt);
2382 ctx = new_omp_context (stmt, outer_ctx);
2383 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2384 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2385 name = create_tmp_var_name (".omp_data_t");
2386 name = build_decl (gimple_location (stmt),
2387 TYPE_DECL, name, ctx->record_type);
2388 DECL_ARTIFICIAL (name) = 1;
2389 DECL_NAMELESS (name) = 1;
2390 TYPE_NAME (ctx->record_type) = name;
2391 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2393 bool base_pointers_restrict = false;
2394 if (offloaded)
2396 create_omp_child_function (ctx, false);
2397 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2399 base_pointers_restrict = omp_target_base_pointers_restrict_p (clauses);
2400 if (base_pointers_restrict
2401 && dump_file && (dump_flags & TDF_DETAILS))
2402 fprintf (dump_file,
2403 "Base pointers in offloaded function are restrict\n");
2406 scan_sharing_clauses (clauses, ctx, base_pointers_restrict);
2407 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2409 if (TYPE_FIELDS (ctx->record_type) == NULL)
2410 ctx->record_type = ctx->receiver_decl = NULL;
2411 else
2413 TYPE_FIELDS (ctx->record_type)
2414 = nreverse (TYPE_FIELDS (ctx->record_type));
2415 if (flag_checking)
2417 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2418 for (tree field = TYPE_FIELDS (ctx->record_type);
2419 field;
2420 field = DECL_CHAIN (field))
2421 gcc_assert (DECL_ALIGN (field) == align);
2423 layout_type (ctx->record_type);
2424 if (offloaded)
2425 fixup_child_record_type (ctx);
2429 /* Scan an OpenMP teams directive. */
2431 static void
2432 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2434 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2435 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2436 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2439 /* Check nesting restrictions. */
2440 static bool
2441 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2443 tree c;
2445 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2446 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2447 the original copy of its contents. */
2448 return true;
2450 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2451 inside an OpenACC CTX. */
2452 if (!(is_gimple_omp (stmt)
2453 && is_gimple_omp_oacc (stmt))
2454 /* Except for atomic codes that we share with OpenMP. */
2455 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2456 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2458 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2460 error_at (gimple_location (stmt),
2461 "non-OpenACC construct inside of OpenACC routine");
2462 return false;
2464 else
2465 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2466 if (is_gimple_omp (octx->stmt)
2467 && is_gimple_omp_oacc (octx->stmt))
2469 error_at (gimple_location (stmt),
2470 "non-OpenACC construct inside of OpenACC region");
2471 return false;
2475 if (ctx != NULL)
2477 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2478 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2480 c = NULL_TREE;
2481 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2483 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2484 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2486 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2487 && (ctx->outer == NULL
2488 || !gimple_omp_for_combined_into_p (ctx->stmt)
2489 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2490 || (gimple_omp_for_kind (ctx->outer->stmt)
2491 != GF_OMP_FOR_KIND_FOR)
2492 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2494 error_at (gimple_location (stmt),
2495 "%<ordered simd threads%> must be closely "
2496 "nested inside of %<for simd%> region");
2497 return false;
2499 return true;
2502 error_at (gimple_location (stmt),
2503 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2504 " may not be nested inside %<simd%> region");
2505 return false;
2507 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2509 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2510 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2511 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2512 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2514 error_at (gimple_location (stmt),
2515 "only %<distribute%> or %<parallel%> regions are "
2516 "allowed to be strictly nested inside %<teams%> "
2517 "region");
2518 return false;
2522 switch (gimple_code (stmt))
2524 case GIMPLE_OMP_FOR:
2525 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2526 return true;
2527 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2529 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2531 error_at (gimple_location (stmt),
2532 "%<distribute%> region must be strictly nested "
2533 "inside %<teams%> construct");
2534 return false;
2536 return true;
2538 /* We split taskloop into task and nested taskloop in it. */
2539 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2540 return true;
2541 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2543 bool ok = false;
2545 if (ctx)
2546 switch (gimple_code (ctx->stmt))
2548 case GIMPLE_OMP_FOR:
2549 ok = (gimple_omp_for_kind (ctx->stmt)
2550 == GF_OMP_FOR_KIND_OACC_LOOP);
2551 break;
2553 case GIMPLE_OMP_TARGET:
2554 switch (gimple_omp_target_kind (ctx->stmt))
2556 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2557 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2558 ok = true;
2559 break;
2561 default:
2562 break;
2565 default:
2566 break;
2568 else if (oacc_get_fn_attrib (current_function_decl))
2569 ok = true;
2570 if (!ok)
2572 error_at (gimple_location (stmt),
2573 "OpenACC loop directive must be associated with"
2574 " an OpenACC compute region");
2575 return false;
2578 /* FALLTHRU */
2579 case GIMPLE_CALL:
2580 if (is_gimple_call (stmt)
2581 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2582 == BUILT_IN_GOMP_CANCEL
2583 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2584 == BUILT_IN_GOMP_CANCELLATION_POINT))
2586 const char *bad = NULL;
2587 const char *kind = NULL;
2588 const char *construct
2589 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2590 == BUILT_IN_GOMP_CANCEL)
2591 ? "#pragma omp cancel"
2592 : "#pragma omp cancellation point";
2593 if (ctx == NULL)
2595 error_at (gimple_location (stmt), "orphaned %qs construct",
2596 construct);
2597 return false;
2599 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2600 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2601 : 0)
2603 case 1:
2604 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2605 bad = "#pragma omp parallel";
2606 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2607 == BUILT_IN_GOMP_CANCEL
2608 && !integer_zerop (gimple_call_arg (stmt, 1)))
2609 ctx->cancellable = true;
2610 kind = "parallel";
2611 break;
2612 case 2:
2613 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2614 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2615 bad = "#pragma omp for";
2616 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2617 == BUILT_IN_GOMP_CANCEL
2618 && !integer_zerop (gimple_call_arg (stmt, 1)))
2620 ctx->cancellable = true;
2621 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2622 OMP_CLAUSE_NOWAIT))
2623 warning_at (gimple_location (stmt), 0,
2624 "%<#pragma omp cancel for%> inside "
2625 "%<nowait%> for construct");
2626 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2627 OMP_CLAUSE_ORDERED))
2628 warning_at (gimple_location (stmt), 0,
2629 "%<#pragma omp cancel for%> inside "
2630 "%<ordered%> for construct");
2632 kind = "for";
2633 break;
2634 case 4:
2635 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2636 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2637 bad = "#pragma omp sections";
2638 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2639 == BUILT_IN_GOMP_CANCEL
2640 && !integer_zerop (gimple_call_arg (stmt, 1)))
2642 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2644 ctx->cancellable = true;
2645 if (omp_find_clause (gimple_omp_sections_clauses
2646 (ctx->stmt),
2647 OMP_CLAUSE_NOWAIT))
2648 warning_at (gimple_location (stmt), 0,
2649 "%<#pragma omp cancel sections%> inside "
2650 "%<nowait%> sections construct");
2652 else
2654 gcc_assert (ctx->outer
2655 && gimple_code (ctx->outer->stmt)
2656 == GIMPLE_OMP_SECTIONS);
2657 ctx->outer->cancellable = true;
2658 if (omp_find_clause (gimple_omp_sections_clauses
2659 (ctx->outer->stmt),
2660 OMP_CLAUSE_NOWAIT))
2661 warning_at (gimple_location (stmt), 0,
2662 "%<#pragma omp cancel sections%> inside "
2663 "%<nowait%> sections construct");
2666 kind = "sections";
2667 break;
2668 case 8:
2669 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2670 bad = "#pragma omp task";
2671 else
2673 for (omp_context *octx = ctx->outer;
2674 octx; octx = octx->outer)
2676 switch (gimple_code (octx->stmt))
2678 case GIMPLE_OMP_TASKGROUP:
2679 break;
2680 case GIMPLE_OMP_TARGET:
2681 if (gimple_omp_target_kind (octx->stmt)
2682 != GF_OMP_TARGET_KIND_REGION)
2683 continue;
2684 /* FALLTHRU */
2685 case GIMPLE_OMP_PARALLEL:
2686 case GIMPLE_OMP_TEAMS:
2687 error_at (gimple_location (stmt),
2688 "%<%s taskgroup%> construct not closely "
2689 "nested inside of %<taskgroup%> region",
2690 construct);
2691 return false;
2692 default:
2693 continue;
2695 break;
2697 ctx->cancellable = true;
2699 kind = "taskgroup";
2700 break;
2701 default:
2702 error_at (gimple_location (stmt), "invalid arguments");
2703 return false;
2705 if (bad)
2707 error_at (gimple_location (stmt),
2708 "%<%s %s%> construct not closely nested inside of %qs",
2709 construct, kind, bad);
2710 return false;
2713 /* FALLTHRU */
2714 case GIMPLE_OMP_SECTIONS:
2715 case GIMPLE_OMP_SINGLE:
2716 for (; ctx != NULL; ctx = ctx->outer)
2717 switch (gimple_code (ctx->stmt))
2719 case GIMPLE_OMP_FOR:
2720 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2721 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2722 break;
2723 /* FALLTHRU */
2724 case GIMPLE_OMP_SECTIONS:
2725 case GIMPLE_OMP_SINGLE:
2726 case GIMPLE_OMP_ORDERED:
2727 case GIMPLE_OMP_MASTER:
2728 case GIMPLE_OMP_TASK:
2729 case GIMPLE_OMP_CRITICAL:
2730 if (is_gimple_call (stmt))
2732 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2733 != BUILT_IN_GOMP_BARRIER)
2734 return true;
2735 error_at (gimple_location (stmt),
2736 "barrier region may not be closely nested inside "
2737 "of work-sharing, %<critical%>, %<ordered%>, "
2738 "%<master%>, explicit %<task%> or %<taskloop%> "
2739 "region");
2740 return false;
2742 error_at (gimple_location (stmt),
2743 "work-sharing region may not be closely nested inside "
2744 "of work-sharing, %<critical%>, %<ordered%>, "
2745 "%<master%>, explicit %<task%> or %<taskloop%> region");
2746 return false;
2747 case GIMPLE_OMP_PARALLEL:
2748 case GIMPLE_OMP_TEAMS:
2749 return true;
2750 case GIMPLE_OMP_TARGET:
2751 if (gimple_omp_target_kind (ctx->stmt)
2752 == GF_OMP_TARGET_KIND_REGION)
2753 return true;
2754 break;
2755 default:
2756 break;
2758 break;
2759 case GIMPLE_OMP_MASTER:
2760 for (; ctx != NULL; ctx = ctx->outer)
2761 switch (gimple_code (ctx->stmt))
2763 case GIMPLE_OMP_FOR:
2764 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2765 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2766 break;
2767 /* FALLTHRU */
2768 case GIMPLE_OMP_SECTIONS:
2769 case GIMPLE_OMP_SINGLE:
2770 case GIMPLE_OMP_TASK:
2771 error_at (gimple_location (stmt),
2772 "%<master%> region may not be closely nested inside "
2773 "of work-sharing, explicit %<task%> or %<taskloop%> "
2774 "region");
2775 return false;
2776 case GIMPLE_OMP_PARALLEL:
2777 case GIMPLE_OMP_TEAMS:
2778 return true;
2779 case GIMPLE_OMP_TARGET:
2780 if (gimple_omp_target_kind (ctx->stmt)
2781 == GF_OMP_TARGET_KIND_REGION)
2782 return true;
2783 break;
2784 default:
2785 break;
2787 break;
2788 case GIMPLE_OMP_TASK:
2789 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2790 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2791 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2792 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2794 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2795 error_at (OMP_CLAUSE_LOCATION (c),
2796 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2797 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2798 return false;
2800 break;
2801 case GIMPLE_OMP_ORDERED:
2802 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2803 c; c = OMP_CLAUSE_CHAIN (c))
2805 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2807 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2808 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2809 continue;
2811 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2812 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2813 || kind == OMP_CLAUSE_DEPEND_SINK)
2815 tree oclause;
2816 /* Look for containing ordered(N) loop. */
2817 if (ctx == NULL
2818 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2819 || (oclause
2820 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2821 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2823 error_at (OMP_CLAUSE_LOCATION (c),
2824 "%<ordered%> construct with %<depend%> clause "
2825 "must be closely nested inside an %<ordered%> "
2826 "loop");
2827 return false;
2829 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2831 error_at (OMP_CLAUSE_LOCATION (c),
2832 "%<ordered%> construct with %<depend%> clause "
2833 "must be closely nested inside a loop with "
2834 "%<ordered%> clause with a parameter");
2835 return false;
2838 else
2840 error_at (OMP_CLAUSE_LOCATION (c),
2841 "invalid depend kind in omp %<ordered%> %<depend%>");
2842 return false;
2845 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2846 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2848 /* ordered simd must be closely nested inside of simd region,
2849 and simd region must not encounter constructs other than
2850 ordered simd, therefore ordered simd may be either orphaned,
2851 or ctx->stmt must be simd. The latter case is handled already
2852 earlier. */
2853 if (ctx != NULL)
2855 error_at (gimple_location (stmt),
2856 "%<ordered%> %<simd%> must be closely nested inside "
2857 "%<simd%> region");
2858 return false;
2861 for (; ctx != NULL; ctx = ctx->outer)
2862 switch (gimple_code (ctx->stmt))
2864 case GIMPLE_OMP_CRITICAL:
2865 case GIMPLE_OMP_TASK:
2866 case GIMPLE_OMP_ORDERED:
2867 ordered_in_taskloop:
2868 error_at (gimple_location (stmt),
2869 "%<ordered%> region may not be closely nested inside "
2870 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2871 "%<taskloop%> region");
2872 return false;
2873 case GIMPLE_OMP_FOR:
2874 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2875 goto ordered_in_taskloop;
2876 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2877 OMP_CLAUSE_ORDERED) == NULL)
2879 error_at (gimple_location (stmt),
2880 "%<ordered%> region must be closely nested inside "
2881 "a loop region with an %<ordered%> clause");
2882 return false;
2884 return true;
2885 case GIMPLE_OMP_TARGET:
2886 if (gimple_omp_target_kind (ctx->stmt)
2887 != GF_OMP_TARGET_KIND_REGION)
2888 break;
2889 /* FALLTHRU */
2890 case GIMPLE_OMP_PARALLEL:
2891 case GIMPLE_OMP_TEAMS:
2892 error_at (gimple_location (stmt),
2893 "%<ordered%> region must be closely nested inside "
2894 "a loop region with an %<ordered%> clause");
2895 return false;
2896 default:
2897 break;
2899 break;
2900 case GIMPLE_OMP_CRITICAL:
2902 tree this_stmt_name
2903 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2904 for (; ctx != NULL; ctx = ctx->outer)
2905 if (gomp_critical *other_crit
2906 = dyn_cast <gomp_critical *> (ctx->stmt))
2907 if (this_stmt_name == gimple_omp_critical_name (other_crit))
2909 error_at (gimple_location (stmt),
2910 "%<critical%> region may not be nested inside "
2911 "a %<critical%> region with the same name");
2912 return false;
2915 break;
2916 case GIMPLE_OMP_TEAMS:
2917 if (ctx == NULL
2918 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2919 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2921 error_at (gimple_location (stmt),
2922 "%<teams%> construct not closely nested inside of "
2923 "%<target%> construct");
2924 return false;
2926 break;
2927 case GIMPLE_OMP_TARGET:
2928 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2929 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2930 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2931 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2933 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2934 error_at (OMP_CLAUSE_LOCATION (c),
2935 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2936 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2937 return false;
2939 if (is_gimple_omp_offloaded (stmt)
2940 && oacc_get_fn_attrib (cfun->decl) != NULL)
2942 error_at (gimple_location (stmt),
2943 "OpenACC region inside of OpenACC routine, nested "
2944 "parallelism not supported yet");
2945 return false;
2947 for (; ctx != NULL; ctx = ctx->outer)
2949 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2951 if (is_gimple_omp (stmt)
2952 && is_gimple_omp_oacc (stmt)
2953 && is_gimple_omp (ctx->stmt))
2955 error_at (gimple_location (stmt),
2956 "OpenACC construct inside of non-OpenACC region");
2957 return false;
2959 continue;
2962 const char *stmt_name, *ctx_stmt_name;
2963 switch (gimple_omp_target_kind (stmt))
2965 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2966 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2967 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2968 case GF_OMP_TARGET_KIND_ENTER_DATA:
2969 stmt_name = "target enter data"; break;
2970 case GF_OMP_TARGET_KIND_EXIT_DATA:
2971 stmt_name = "target exit data"; break;
2972 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2973 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2974 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2975 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
2976 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2977 stmt_name = "enter/exit data"; break;
2978 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2979 break;
2980 default: gcc_unreachable ();
2982 switch (gimple_omp_target_kind (ctx->stmt))
2984 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2985 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
2986 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2987 ctx_stmt_name = "parallel"; break;
2988 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2989 ctx_stmt_name = "kernels"; break;
2990 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
2991 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2992 ctx_stmt_name = "host_data"; break;
2993 default: gcc_unreachable ();
2996 /* OpenACC/OpenMP mismatch? */
2997 if (is_gimple_omp_oacc (stmt)
2998 != is_gimple_omp_oacc (ctx->stmt))
3000 error_at (gimple_location (stmt),
3001 "%s %qs construct inside of %s %qs region",
3002 (is_gimple_omp_oacc (stmt)
3003 ? "OpenACC" : "OpenMP"), stmt_name,
3004 (is_gimple_omp_oacc (ctx->stmt)
3005 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3006 return false;
3008 if (is_gimple_omp_offloaded (ctx->stmt))
3010 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3011 if (is_gimple_omp_oacc (ctx->stmt))
3013 error_at (gimple_location (stmt),
3014 "%qs construct inside of %qs region",
3015 stmt_name, ctx_stmt_name);
3016 return false;
3018 else
3020 warning_at (gimple_location (stmt), 0,
3021 "%qs construct inside of %qs region",
3022 stmt_name, ctx_stmt_name);
3026 break;
3027 default:
3028 break;
3030 return true;
3034 /* Helper function scan_omp.
3036 Callback for walk_tree or operators in walk_gimple_stmt used to
3037 scan for OMP directives in TP. */
3039 static tree
3040 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3042 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3043 omp_context *ctx = (omp_context *) wi->info;
3044 tree t = *tp;
3046 switch (TREE_CODE (t))
3048 case VAR_DECL:
3049 case PARM_DECL:
3050 case LABEL_DECL:
3051 case RESULT_DECL:
3052 if (ctx)
3054 tree repl = remap_decl (t, &ctx->cb);
3055 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3056 *tp = repl;
3058 break;
3060 default:
3061 if (ctx && TYPE_P (t))
3062 *tp = remap_type (t, &ctx->cb);
3063 else if (!DECL_P (t))
3065 *walk_subtrees = 1;
3066 if (ctx)
3068 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3069 if (tem != TREE_TYPE (t))
3071 if (TREE_CODE (t) == INTEGER_CST)
3072 *tp = wide_int_to_tree (tem, t);
3073 else
3074 TREE_TYPE (t) = tem;
3078 break;
3081 return NULL_TREE;
3084 /* Return true if FNDECL is a setjmp or a longjmp. */
3086 static bool
3087 setjmp_or_longjmp_p (const_tree fndecl)
3089 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3090 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
3091 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
3092 return true;
3094 tree declname = DECL_NAME (fndecl);
3095 if (!declname)
3096 return false;
3097 const char *name = IDENTIFIER_POINTER (declname);
3098 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3102 /* Helper function for scan_omp.
3104 Callback for walk_gimple_stmt used to scan for OMP directives in
3105 the current statement in GSI. */
3107 static tree
3108 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3109 struct walk_stmt_info *wi)
3111 gimple *stmt = gsi_stmt (*gsi);
3112 omp_context *ctx = (omp_context *) wi->info;
3114 if (gimple_has_location (stmt))
3115 input_location = gimple_location (stmt);
3117 /* Check the nesting restrictions. */
3118 bool remove = false;
3119 if (is_gimple_omp (stmt))
3120 remove = !check_omp_nesting_restrictions (stmt, ctx);
3121 else if (is_gimple_call (stmt))
3123 tree fndecl = gimple_call_fndecl (stmt);
3124 if (fndecl)
3126 if (setjmp_or_longjmp_p (fndecl)
3127 && ctx
3128 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3129 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3131 remove = true;
3132 error_at (gimple_location (stmt),
3133 "setjmp/longjmp inside simd construct");
3135 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3136 switch (DECL_FUNCTION_CODE (fndecl))
3138 case BUILT_IN_GOMP_BARRIER:
3139 case BUILT_IN_GOMP_CANCEL:
3140 case BUILT_IN_GOMP_CANCELLATION_POINT:
3141 case BUILT_IN_GOMP_TASKYIELD:
3142 case BUILT_IN_GOMP_TASKWAIT:
3143 case BUILT_IN_GOMP_TASKGROUP_START:
3144 case BUILT_IN_GOMP_TASKGROUP_END:
3145 remove = !check_omp_nesting_restrictions (stmt, ctx);
3146 break;
3147 default:
3148 break;
3152 if (remove)
3154 stmt = gimple_build_nop ();
3155 gsi_replace (gsi, stmt, false);
3158 *handled_ops_p = true;
3160 switch (gimple_code (stmt))
3162 case GIMPLE_OMP_PARALLEL:
3163 taskreg_nesting_level++;
3164 scan_omp_parallel (gsi, ctx);
3165 taskreg_nesting_level--;
3166 break;
3168 case GIMPLE_OMP_TASK:
3169 taskreg_nesting_level++;
3170 scan_omp_task (gsi, ctx);
3171 taskreg_nesting_level--;
3172 break;
3174 case GIMPLE_OMP_FOR:
3175 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3176 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3177 && omp_maybe_offloaded_ctx (ctx)
3178 && omp_max_simt_vf ())
3179 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3180 else
3181 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3182 break;
3184 case GIMPLE_OMP_SECTIONS:
3185 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3186 break;
3188 case GIMPLE_OMP_SINGLE:
3189 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3190 break;
3192 case GIMPLE_OMP_SECTION:
3193 case GIMPLE_OMP_MASTER:
3194 case GIMPLE_OMP_TASKGROUP:
3195 case GIMPLE_OMP_ORDERED:
3196 case GIMPLE_OMP_CRITICAL:
3197 case GIMPLE_OMP_GRID_BODY:
3198 ctx = new_omp_context (stmt, ctx);
3199 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3200 break;
3202 case GIMPLE_OMP_TARGET:
3203 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3204 break;
3206 case GIMPLE_OMP_TEAMS:
3207 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3208 break;
3210 case GIMPLE_BIND:
3212 tree var;
3214 *handled_ops_p = false;
3215 if (ctx)
3216 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3217 var ;
3218 var = DECL_CHAIN (var))
3219 insert_decl_map (&ctx->cb, var, var);
3221 break;
3222 default:
3223 *handled_ops_p = false;
3224 break;
3227 return NULL_TREE;
3231 /* Scan all the statements starting at the current statement. CTX
3232 contains context information about the OMP directives and
3233 clauses found during the scan. */
3235 static void
3236 scan_omp (gimple_seq *body_p, omp_context *ctx)
3238 location_t saved_location;
3239 struct walk_stmt_info wi;
3241 memset (&wi, 0, sizeof (wi));
3242 wi.info = ctx;
3243 wi.want_locations = true;
3245 saved_location = input_location;
3246 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3247 input_location = saved_location;
3250 /* Re-gimplification and code generation routines. */
3252 /* If a context was created for STMT when it was scanned, return it. */
3254 static omp_context *
3255 maybe_lookup_ctx (gimple *stmt)
3257 splay_tree_node n;
3258 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3259 return n ? (omp_context *) n->value : NULL;
3263 /* Find the mapping for DECL in CTX or the immediately enclosing
3264 context that has a mapping for DECL.
3266 If CTX is a nested parallel directive, we may have to use the decl
3267 mappings created in CTX's parent context. Suppose that we have the
3268 following parallel nesting (variable UIDs showed for clarity):
3270 iD.1562 = 0;
3271 #omp parallel shared(iD.1562) -> outer parallel
3272 iD.1562 = iD.1562 + 1;
3274 #omp parallel shared (iD.1562) -> inner parallel
3275 iD.1562 = iD.1562 - 1;
3277 Each parallel structure will create a distinct .omp_data_s structure
3278 for copying iD.1562 in/out of the directive:
3280 outer parallel .omp_data_s.1.i -> iD.1562
3281 inner parallel .omp_data_s.2.i -> iD.1562
3283 A shared variable mapping will produce a copy-out operation before
3284 the parallel directive and a copy-in operation after it. So, in
3285 this case we would have:
3287 iD.1562 = 0;
3288 .omp_data_o.1.i = iD.1562;
3289 #omp parallel shared(iD.1562) -> outer parallel
3290 .omp_data_i.1 = &.omp_data_o.1
3291 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3293 .omp_data_o.2.i = iD.1562; -> **
3294 #omp parallel shared(iD.1562) -> inner parallel
3295 .omp_data_i.2 = &.omp_data_o.2
3296 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3299 ** This is a problem. The symbol iD.1562 cannot be referenced
3300 inside the body of the outer parallel region. But since we are
3301 emitting this copy operation while expanding the inner parallel
3302 directive, we need to access the CTX structure of the outer
3303 parallel directive to get the correct mapping:
3305 .omp_data_o.2.i = .omp_data_i.1->i
3307 Since there may be other workshare or parallel directives enclosing
3308 the parallel directive, it may be necessary to walk up the context
3309 parent chain. This is not a problem in general because nested
3310 parallelism happens only rarely. */
3312 static tree
3313 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3315 tree t;
3316 omp_context *up;
3318 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3319 t = maybe_lookup_decl (decl, up);
3321 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3323 return t ? t : decl;
3327 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3328 in outer contexts. */
3330 static tree
3331 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3333 tree t = NULL;
3334 omp_context *up;
3336 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3337 t = maybe_lookup_decl (decl, up);
3339 return t ? t : decl;
3343 /* Construct the initialization value for reduction operation OP. */
3345 tree
3346 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3348 switch (op)
3350 case PLUS_EXPR:
3351 case MINUS_EXPR:
3352 case BIT_IOR_EXPR:
3353 case BIT_XOR_EXPR:
3354 case TRUTH_OR_EXPR:
3355 case TRUTH_ORIF_EXPR:
3356 case TRUTH_XOR_EXPR:
3357 case NE_EXPR:
3358 return build_zero_cst (type);
3360 case MULT_EXPR:
3361 case TRUTH_AND_EXPR:
3362 case TRUTH_ANDIF_EXPR:
3363 case EQ_EXPR:
3364 return fold_convert_loc (loc, type, integer_one_node);
3366 case BIT_AND_EXPR:
3367 return fold_convert_loc (loc, type, integer_minus_one_node);
3369 case MAX_EXPR:
3370 if (SCALAR_FLOAT_TYPE_P (type))
3372 REAL_VALUE_TYPE max, min;
3373 if (HONOR_INFINITIES (type))
3375 real_inf (&max);
3376 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3378 else
3379 real_maxval (&min, 1, TYPE_MODE (type));
3380 return build_real (type, min);
3382 else if (POINTER_TYPE_P (type))
3384 wide_int min
3385 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3386 return wide_int_to_tree (type, min);
3388 else
3390 gcc_assert (INTEGRAL_TYPE_P (type));
3391 return TYPE_MIN_VALUE (type);
3394 case MIN_EXPR:
3395 if (SCALAR_FLOAT_TYPE_P (type))
3397 REAL_VALUE_TYPE max;
3398 if (HONOR_INFINITIES (type))
3399 real_inf (&max);
3400 else
3401 real_maxval (&max, 0, TYPE_MODE (type));
3402 return build_real (type, max);
3404 else if (POINTER_TYPE_P (type))
3406 wide_int max
3407 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3408 return wide_int_to_tree (type, max);
3410 else
3412 gcc_assert (INTEGRAL_TYPE_P (type));
3413 return TYPE_MAX_VALUE (type);
3416 default:
3417 gcc_unreachable ();
3421 /* Construct the initialization value for reduction CLAUSE. */
3423 tree
3424 omp_reduction_init (tree clause, tree type)
3426 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3427 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3430 /* Return alignment to be assumed for var in CLAUSE, which should be
3431 OMP_CLAUSE_ALIGNED. */
3433 static tree
3434 omp_clause_aligned_alignment (tree clause)
3436 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3437 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3439 /* Otherwise return implementation defined alignment. */
3440 unsigned int al = 1;
3441 machine_mode mode, vmode;
3442 int vs = targetm.vectorize.autovectorize_vector_sizes ();
3443 if (vs)
3444 vs = 1 << floor_log2 (vs);
3445 static enum mode_class classes[]
3446 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3447 for (int i = 0; i < 4; i += 2)
3448 for (mode = GET_CLASS_NARROWEST_MODE (classes[i]);
3449 mode != VOIDmode;
3450 mode = GET_MODE_WIDER_MODE (mode))
3452 vmode = targetm.vectorize.preferred_simd_mode (mode);
3453 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3454 continue;
3455 while (vs
3456 && GET_MODE_SIZE (vmode) < vs
3457 && GET_MODE_2XWIDER_MODE (vmode) != VOIDmode)
3458 vmode = GET_MODE_2XWIDER_MODE (vmode);
3460 tree type = lang_hooks.types.type_for_mode (mode, 1);
3461 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3462 continue;
3463 type = build_vector_type (type, GET_MODE_SIZE (vmode)
3464 / GET_MODE_SIZE (mode));
3465 if (TYPE_MODE (type) != vmode)
3466 continue;
3467 if (TYPE_ALIGN_UNIT (type) > al)
3468 al = TYPE_ALIGN_UNIT (type);
3470 return build_int_cst (integer_type_node, al);
3474 /* This structure is part of the interface between lower_rec_simd_input_clauses
3475 and lower_rec_input_clauses. */
3477 struct omplow_simd_context {
3478 tree idx;
3479 tree lane;
3480 vec<tree, va_heap> simt_eargs;
3481 gimple_seq simt_dlist;
3482 int max_vf;
3483 bool is_simt;
3486 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3487 privatization. */
3489 static bool
3490 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3491 omplow_simd_context *sctx, tree &ivar, tree &lvar)
3493 if (sctx->max_vf == 0)
3495 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3496 if (sctx->max_vf > 1)
3498 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3499 OMP_CLAUSE_SAFELEN);
3500 if (c
3501 && (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) != INTEGER_CST
3502 || tree_int_cst_sgn (OMP_CLAUSE_SAFELEN_EXPR (c)) != 1))
3503 sctx->max_vf = 1;
3504 else if (c && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
3505 sctx->max_vf) == -1)
3506 sctx->max_vf = tree_to_shwi (OMP_CLAUSE_SAFELEN_EXPR (c));
3508 if (sctx->max_vf > 1)
3510 sctx->idx = create_tmp_var (unsigned_type_node);
3511 sctx->lane = create_tmp_var (unsigned_type_node);
3514 if (sctx->max_vf == 1)
3515 return false;
3517 if (sctx->is_simt)
3519 if (is_gimple_reg (new_var))
3521 ivar = lvar = new_var;
3522 return true;
3524 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3525 ivar = lvar = create_tmp_var (type);
3526 TREE_ADDRESSABLE (ivar) = 1;
3527 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3528 NULL, DECL_ATTRIBUTES (ivar));
3529 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3530 tree clobber = build_constructor (type, NULL);
3531 TREE_THIS_VOLATILE (clobber) = 1;
3532 gimple *g = gimple_build_assign (ivar, clobber);
3533 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3535 else
3537 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3538 tree avar = create_tmp_var_raw (atype);
3539 if (TREE_ADDRESSABLE (new_var))
3540 TREE_ADDRESSABLE (avar) = 1;
3541 DECL_ATTRIBUTES (avar)
3542 = tree_cons (get_identifier ("omp simd array"), NULL,
3543 DECL_ATTRIBUTES (avar));
3544 gimple_add_tmp_var (avar);
3545 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3546 NULL_TREE, NULL_TREE);
3547 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3548 NULL_TREE, NULL_TREE);
3550 if (DECL_P (new_var))
3552 SET_DECL_VALUE_EXPR (new_var, lvar);
3553 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3555 return true;
3558 /* Helper function of lower_rec_input_clauses. For a reference
3559 in simd reduction, add an underlying variable it will reference. */
3561 static void
3562 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3564 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3565 if (TREE_CONSTANT (z))
3567 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3568 get_name (new_vard));
3569 gimple_add_tmp_var (z);
3570 TREE_ADDRESSABLE (z) = 1;
3571 z = build_fold_addr_expr_loc (loc, z);
3572 gimplify_assign (new_vard, z, ilist);
3576 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3577 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3578 private variables. Initialization statements go in ILIST, while calls
3579 to destructors go in DLIST. */
3581 static void
3582 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3583 omp_context *ctx, struct omp_for_data *fd)
3585 tree c, dtor, copyin_seq, x, ptr;
3586 bool copyin_by_ref = false;
3587 bool lastprivate_firstprivate = false;
3588 bool reduction_omp_orig_ref = false;
3589 int pass;
3590 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3591 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3592 omplow_simd_context sctx = omplow_simd_context ();
3593 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3594 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3595 gimple_seq llist[3] = { };
3597 copyin_seq = NULL;
3598 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3600 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3601 with data sharing clauses referencing variable sized vars. That
3602 is unnecessarily hard to support and very unlikely to result in
3603 vectorized code anyway. */
3604 if (is_simd)
3605 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3606 switch (OMP_CLAUSE_CODE (c))
3608 case OMP_CLAUSE_LINEAR:
3609 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3610 sctx.max_vf = 1;
3611 /* FALLTHRU */
3612 case OMP_CLAUSE_PRIVATE:
3613 case OMP_CLAUSE_FIRSTPRIVATE:
3614 case OMP_CLAUSE_LASTPRIVATE:
3615 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3616 sctx.max_vf = 1;
3617 break;
3618 case OMP_CLAUSE_REDUCTION:
3619 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3620 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3621 sctx.max_vf = 1;
3622 break;
3623 default:
3624 continue;
3627 /* Add a placeholder for simduid. */
3628 if (sctx.is_simt && sctx.max_vf != 1)
3629 sctx.simt_eargs.safe_push (NULL_TREE);
3631 /* Do all the fixed sized types in the first pass, and the variable sized
3632 types in the second pass. This makes sure that the scalar arguments to
3633 the variable sized types are processed before we use them in the
3634 variable sized operations. */
3635 for (pass = 0; pass < 2; ++pass)
3637 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3639 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3640 tree var, new_var;
3641 bool by_ref;
3642 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3644 switch (c_kind)
3646 case OMP_CLAUSE_PRIVATE:
3647 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3648 continue;
3649 break;
3650 case OMP_CLAUSE_SHARED:
3651 /* Ignore shared directives in teams construct. */
3652 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3653 continue;
3654 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3656 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3657 || is_global_var (OMP_CLAUSE_DECL (c)));
3658 continue;
3660 case OMP_CLAUSE_FIRSTPRIVATE:
3661 case OMP_CLAUSE_COPYIN:
3662 break;
3663 case OMP_CLAUSE_LINEAR:
3664 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3665 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3666 lastprivate_firstprivate = true;
3667 break;
3668 case OMP_CLAUSE_REDUCTION:
3669 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3670 reduction_omp_orig_ref = true;
3671 break;
3672 case OMP_CLAUSE__LOOPTEMP_:
3673 /* Handle _looptemp_ clauses only on parallel/task. */
3674 if (fd)
3675 continue;
3676 break;
3677 case OMP_CLAUSE_LASTPRIVATE:
3678 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3680 lastprivate_firstprivate = true;
3681 if (pass != 0 || is_taskloop_ctx (ctx))
3682 continue;
3684 /* Even without corresponding firstprivate, if
3685 decl is Fortran allocatable, it needs outer var
3686 reference. */
3687 else if (pass == 0
3688 && lang_hooks.decls.omp_private_outer_ref
3689 (OMP_CLAUSE_DECL (c)))
3690 lastprivate_firstprivate = true;
3691 break;
3692 case OMP_CLAUSE_ALIGNED:
3693 if (pass == 0)
3694 continue;
3695 var = OMP_CLAUSE_DECL (c);
3696 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3697 && !is_global_var (var))
3699 new_var = maybe_lookup_decl (var, ctx);
3700 if (new_var == NULL_TREE)
3701 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3702 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3703 tree alarg = omp_clause_aligned_alignment (c);
3704 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3705 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3706 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3707 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3708 gimplify_and_add (x, ilist);
3710 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3711 && is_global_var (var))
3713 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3714 new_var = lookup_decl (var, ctx);
3715 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3716 t = build_fold_addr_expr_loc (clause_loc, t);
3717 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3718 tree alarg = omp_clause_aligned_alignment (c);
3719 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3720 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3721 t = fold_convert_loc (clause_loc, ptype, t);
3722 x = create_tmp_var (ptype);
3723 t = build2 (MODIFY_EXPR, ptype, x, t);
3724 gimplify_and_add (t, ilist);
3725 t = build_simple_mem_ref_loc (clause_loc, x);
3726 SET_DECL_VALUE_EXPR (new_var, t);
3727 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3729 continue;
3730 default:
3731 continue;
3734 new_var = var = OMP_CLAUSE_DECL (c);
3735 if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3737 var = TREE_OPERAND (var, 0);
3738 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3739 var = TREE_OPERAND (var, 0);
3740 if (TREE_CODE (var) == INDIRECT_REF
3741 || TREE_CODE (var) == ADDR_EXPR)
3742 var = TREE_OPERAND (var, 0);
3743 if (is_variable_sized (var))
3745 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3746 var = DECL_VALUE_EXPR (var);
3747 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3748 var = TREE_OPERAND (var, 0);
3749 gcc_assert (DECL_P (var));
3751 new_var = var;
3753 if (c_kind != OMP_CLAUSE_COPYIN)
3754 new_var = lookup_decl (var, ctx);
3756 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3758 if (pass != 0)
3759 continue;
3761 /* C/C++ array section reductions. */
3762 else if (c_kind == OMP_CLAUSE_REDUCTION
3763 && var != OMP_CLAUSE_DECL (c))
3765 if (pass == 0)
3766 continue;
3768 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3769 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3770 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3772 tree b = TREE_OPERAND (orig_var, 1);
3773 b = maybe_lookup_decl (b, ctx);
3774 if (b == NULL)
3776 b = TREE_OPERAND (orig_var, 1);
3777 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3779 if (integer_zerop (bias))
3780 bias = b;
3781 else
3783 bias = fold_convert_loc (clause_loc,
3784 TREE_TYPE (b), bias);
3785 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3786 TREE_TYPE (b), b, bias);
3788 orig_var = TREE_OPERAND (orig_var, 0);
3790 if (TREE_CODE (orig_var) == INDIRECT_REF
3791 || TREE_CODE (orig_var) == ADDR_EXPR)
3792 orig_var = TREE_OPERAND (orig_var, 0);
3793 tree d = OMP_CLAUSE_DECL (c);
3794 tree type = TREE_TYPE (d);
3795 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3796 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3797 const char *name = get_name (orig_var);
3798 if (TREE_CONSTANT (v))
3800 x = create_tmp_var_raw (type, name);
3801 gimple_add_tmp_var (x);
3802 TREE_ADDRESSABLE (x) = 1;
3803 x = build_fold_addr_expr_loc (clause_loc, x);
3805 else
3807 tree atmp
3808 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3809 tree t = maybe_lookup_decl (v, ctx);
3810 if (t)
3811 v = t;
3812 else
3813 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3814 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3815 t = fold_build2_loc (clause_loc, PLUS_EXPR,
3816 TREE_TYPE (v), v,
3817 build_int_cst (TREE_TYPE (v), 1));
3818 t = fold_build2_loc (clause_loc, MULT_EXPR,
3819 TREE_TYPE (v), t,
3820 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3821 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3822 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3825 tree ptype = build_pointer_type (TREE_TYPE (type));
3826 x = fold_convert_loc (clause_loc, ptype, x);
3827 tree y = create_tmp_var (ptype, name);
3828 gimplify_assign (y, x, ilist);
3829 x = y;
3830 tree yb = y;
3832 if (!integer_zerop (bias))
3834 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3835 bias);
3836 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3838 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3839 pointer_sized_int_node, yb, bias);
3840 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3841 yb = create_tmp_var (ptype, name);
3842 gimplify_assign (yb, x, ilist);
3843 x = yb;
3846 d = TREE_OPERAND (d, 0);
3847 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3848 d = TREE_OPERAND (d, 0);
3849 if (TREE_CODE (d) == ADDR_EXPR)
3851 if (orig_var != var)
3853 gcc_assert (is_variable_sized (orig_var));
3854 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3856 gimplify_assign (new_var, x, ilist);
3857 tree new_orig_var = lookup_decl (orig_var, ctx);
3858 tree t = build_fold_indirect_ref (new_var);
3859 DECL_IGNORED_P (new_var) = 0;
3860 TREE_THIS_NOTRAP (t);
3861 SET_DECL_VALUE_EXPR (new_orig_var, t);
3862 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3864 else
3866 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3867 build_int_cst (ptype, 0));
3868 SET_DECL_VALUE_EXPR (new_var, x);
3869 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3872 else
3874 gcc_assert (orig_var == var);
3875 if (TREE_CODE (d) == INDIRECT_REF)
3877 x = create_tmp_var (ptype, name);
3878 TREE_ADDRESSABLE (x) = 1;
3879 gimplify_assign (x, yb, ilist);
3880 x = build_fold_addr_expr_loc (clause_loc, x);
3882 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3883 gimplify_assign (new_var, x, ilist);
3885 tree y1 = create_tmp_var (ptype, NULL);
3886 gimplify_assign (y1, y, ilist);
3887 tree i2 = NULL_TREE, y2 = NULL_TREE;
3888 tree body2 = NULL_TREE, end2 = NULL_TREE;
3889 tree y3 = NULL_TREE, y4 = NULL_TREE;
3890 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3892 y2 = create_tmp_var (ptype, NULL);
3893 gimplify_assign (y2, y, ilist);
3894 tree ref = build_outer_var_ref (var, ctx);
3895 /* For ref build_outer_var_ref already performs this. */
3896 if (TREE_CODE (d) == INDIRECT_REF)
3897 gcc_assert (omp_is_reference (var));
3898 else if (TREE_CODE (d) == ADDR_EXPR)
3899 ref = build_fold_addr_expr (ref);
3900 else if (omp_is_reference (var))
3901 ref = build_fold_addr_expr (ref);
3902 ref = fold_convert_loc (clause_loc, ptype, ref);
3903 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3904 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3906 y3 = create_tmp_var (ptype, NULL);
3907 gimplify_assign (y3, unshare_expr (ref), ilist);
3909 if (is_simd)
3911 y4 = create_tmp_var (ptype, NULL);
3912 gimplify_assign (y4, ref, dlist);
3915 tree i = create_tmp_var (TREE_TYPE (v), NULL);
3916 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3917 tree body = create_artificial_label (UNKNOWN_LOCATION);
3918 tree end = create_artificial_label (UNKNOWN_LOCATION);
3919 gimple_seq_add_stmt (ilist, gimple_build_label (body));
3920 if (y2)
3922 i2 = create_tmp_var (TREE_TYPE (v), NULL);
3923 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3924 body2 = create_artificial_label (UNKNOWN_LOCATION);
3925 end2 = create_artificial_label (UNKNOWN_LOCATION);
3926 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3928 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3930 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3931 tree decl_placeholder
3932 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3933 SET_DECL_VALUE_EXPR (decl_placeholder,
3934 build_simple_mem_ref (y1));
3935 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3936 SET_DECL_VALUE_EXPR (placeholder,
3937 y3 ? build_simple_mem_ref (y3)
3938 : error_mark_node);
3939 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3940 x = lang_hooks.decls.omp_clause_default_ctor
3941 (c, build_simple_mem_ref (y1),
3942 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3943 if (x)
3944 gimplify_and_add (x, ilist);
3945 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3947 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3948 lower_omp (&tseq, ctx);
3949 gimple_seq_add_seq (ilist, tseq);
3951 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3952 if (is_simd)
3954 SET_DECL_VALUE_EXPR (decl_placeholder,
3955 build_simple_mem_ref (y2));
3956 SET_DECL_VALUE_EXPR (placeholder,
3957 build_simple_mem_ref (y4));
3958 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3959 lower_omp (&tseq, ctx);
3960 gimple_seq_add_seq (dlist, tseq);
3961 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3963 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3964 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3965 x = lang_hooks.decls.omp_clause_dtor
3966 (c, build_simple_mem_ref (y2));
3967 if (x)
3969 gimple_seq tseq = NULL;
3970 dtor = x;
3971 gimplify_stmt (&dtor, &tseq);
3972 gimple_seq_add_seq (dlist, tseq);
3975 else
3977 x = omp_reduction_init (c, TREE_TYPE (type));
3978 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3980 /* reduction(-:var) sums up the partial results, so it
3981 acts identically to reduction(+:var). */
3982 if (code == MINUS_EXPR)
3983 code = PLUS_EXPR;
3985 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
3986 if (is_simd)
3988 x = build2 (code, TREE_TYPE (type),
3989 build_simple_mem_ref (y4),
3990 build_simple_mem_ref (y2));
3991 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
3994 gimple *g
3995 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
3996 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3997 gimple_seq_add_stmt (ilist, g);
3998 if (y3)
4000 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4001 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4002 gimple_seq_add_stmt (ilist, g);
4004 g = gimple_build_assign (i, PLUS_EXPR, i,
4005 build_int_cst (TREE_TYPE (i), 1));
4006 gimple_seq_add_stmt (ilist, g);
4007 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4008 gimple_seq_add_stmt (ilist, g);
4009 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4010 if (y2)
4012 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4013 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4014 gimple_seq_add_stmt (dlist, g);
4015 if (y4)
4017 g = gimple_build_assign
4018 (y4, POINTER_PLUS_EXPR, y4,
4019 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4020 gimple_seq_add_stmt (dlist, g);
4022 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4023 build_int_cst (TREE_TYPE (i2), 1));
4024 gimple_seq_add_stmt (dlist, g);
4025 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4026 gimple_seq_add_stmt (dlist, g);
4027 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4029 continue;
4031 else if (is_variable_sized (var))
4033 /* For variable sized types, we need to allocate the
4034 actual storage here. Call alloca and store the
4035 result in the pointer decl that we created elsewhere. */
4036 if (pass == 0)
4037 continue;
4039 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4041 gcall *stmt;
4042 tree tmp, atmp;
4044 ptr = DECL_VALUE_EXPR (new_var);
4045 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4046 ptr = TREE_OPERAND (ptr, 0);
4047 gcc_assert (DECL_P (ptr));
4048 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4050 /* void *tmp = __builtin_alloca */
4051 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4052 stmt = gimple_build_call (atmp, 2, x,
4053 size_int (DECL_ALIGN (var)));
4054 tmp = create_tmp_var_raw (ptr_type_node);
4055 gimple_add_tmp_var (tmp);
4056 gimple_call_set_lhs (stmt, tmp);
4058 gimple_seq_add_stmt (ilist, stmt);
4060 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4061 gimplify_assign (ptr, x, ilist);
4064 else if (omp_is_reference (var))
4066 /* For references that are being privatized for Fortran,
4067 allocate new backing storage for the new pointer
4068 variable. This allows us to avoid changing all the
4069 code that expects a pointer to something that expects
4070 a direct variable. */
4071 if (pass == 0)
4072 continue;
4074 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4075 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4077 x = build_receiver_ref (var, false, ctx);
4078 x = build_fold_addr_expr_loc (clause_loc, x);
4080 else if (TREE_CONSTANT (x))
4082 /* For reduction in SIMD loop, defer adding the
4083 initialization of the reference, because if we decide
4084 to use SIMD array for it, the initilization could cause
4085 expansion ICE. */
4086 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4087 x = NULL_TREE;
4088 else
4090 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4091 get_name (var));
4092 gimple_add_tmp_var (x);
4093 TREE_ADDRESSABLE (x) = 1;
4094 x = build_fold_addr_expr_loc (clause_loc, x);
4097 else
4099 tree atmp
4100 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4101 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4102 tree al = size_int (TYPE_ALIGN (rtype));
4103 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4106 if (x)
4108 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4109 gimplify_assign (new_var, x, ilist);
4112 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4114 else if (c_kind == OMP_CLAUSE_REDUCTION
4115 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4117 if (pass == 0)
4118 continue;
4120 else if (pass != 0)
4121 continue;
4123 switch (OMP_CLAUSE_CODE (c))
4125 case OMP_CLAUSE_SHARED:
4126 /* Ignore shared directives in teams construct. */
4127 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4128 continue;
4129 /* Shared global vars are just accessed directly. */
4130 if (is_global_var (new_var))
4131 break;
4132 /* For taskloop firstprivate/lastprivate, represented
4133 as firstprivate and shared clause on the task, new_var
4134 is the firstprivate var. */
4135 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4136 break;
4137 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4138 needs to be delayed until after fixup_child_record_type so
4139 that we get the correct type during the dereference. */
4140 by_ref = use_pointer_for_field (var, ctx);
4141 x = build_receiver_ref (var, by_ref, ctx);
4142 SET_DECL_VALUE_EXPR (new_var, x);
4143 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4145 /* ??? If VAR is not passed by reference, and the variable
4146 hasn't been initialized yet, then we'll get a warning for
4147 the store into the omp_data_s structure. Ideally, we'd be
4148 able to notice this and not store anything at all, but
4149 we're generating code too early. Suppress the warning. */
4150 if (!by_ref)
4151 TREE_NO_WARNING (var) = 1;
4152 break;
4154 case OMP_CLAUSE_LASTPRIVATE:
4155 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4156 break;
4157 /* FALLTHRU */
4159 case OMP_CLAUSE_PRIVATE:
4160 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4161 x = build_outer_var_ref (var, ctx);
4162 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4164 if (is_task_ctx (ctx))
4165 x = build_receiver_ref (var, false, ctx);
4166 else
4167 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4169 else
4170 x = NULL;
4171 do_private:
4172 tree nx;
4173 nx = lang_hooks.decls.omp_clause_default_ctor
4174 (c, unshare_expr (new_var), x);
4175 if (is_simd)
4177 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4178 if ((TREE_ADDRESSABLE (new_var) || nx || y
4179 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4180 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4181 ivar, lvar))
4183 if (nx)
4184 x = lang_hooks.decls.omp_clause_default_ctor
4185 (c, unshare_expr (ivar), x);
4186 if (nx && x)
4187 gimplify_and_add (x, &llist[0]);
4188 if (y)
4190 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4191 if (y)
4193 gimple_seq tseq = NULL;
4195 dtor = y;
4196 gimplify_stmt (&dtor, &tseq);
4197 gimple_seq_add_seq (&llist[1], tseq);
4200 break;
4203 if (nx)
4204 gimplify_and_add (nx, ilist);
4205 /* FALLTHRU */
4207 do_dtor:
4208 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4209 if (x)
4211 gimple_seq tseq = NULL;
4213 dtor = x;
4214 gimplify_stmt (&dtor, &tseq);
4215 gimple_seq_add_seq (dlist, tseq);
4217 break;
4219 case OMP_CLAUSE_LINEAR:
4220 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4221 goto do_firstprivate;
4222 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4223 x = NULL;
4224 else
4225 x = build_outer_var_ref (var, ctx);
4226 goto do_private;
4228 case OMP_CLAUSE_FIRSTPRIVATE:
4229 if (is_task_ctx (ctx))
4231 if (omp_is_reference (var) || is_variable_sized (var))
4232 goto do_dtor;
4233 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4234 ctx))
4235 || use_pointer_for_field (var, NULL))
4237 x = build_receiver_ref (var, false, ctx);
4238 SET_DECL_VALUE_EXPR (new_var, x);
4239 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4240 goto do_dtor;
4243 do_firstprivate:
4244 x = build_outer_var_ref (var, ctx);
4245 if (is_simd)
4247 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4248 && gimple_omp_for_combined_into_p (ctx->stmt))
4250 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4251 tree stept = TREE_TYPE (t);
4252 tree ct = omp_find_clause (clauses,
4253 OMP_CLAUSE__LOOPTEMP_);
4254 gcc_assert (ct);
4255 tree l = OMP_CLAUSE_DECL (ct);
4256 tree n1 = fd->loop.n1;
4257 tree step = fd->loop.step;
4258 tree itype = TREE_TYPE (l);
4259 if (POINTER_TYPE_P (itype))
4260 itype = signed_type_for (itype);
4261 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4262 if (TYPE_UNSIGNED (itype)
4263 && fd->loop.cond_code == GT_EXPR)
4264 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4265 fold_build1 (NEGATE_EXPR, itype, l),
4266 fold_build1 (NEGATE_EXPR,
4267 itype, step));
4268 else
4269 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4270 t = fold_build2 (MULT_EXPR, stept,
4271 fold_convert (stept, l), t);
4273 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4275 x = lang_hooks.decls.omp_clause_linear_ctor
4276 (c, new_var, x, t);
4277 gimplify_and_add (x, ilist);
4278 goto do_dtor;
4281 if (POINTER_TYPE_P (TREE_TYPE (x)))
4282 x = fold_build2 (POINTER_PLUS_EXPR,
4283 TREE_TYPE (x), x, t);
4284 else
4285 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4288 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4289 || TREE_ADDRESSABLE (new_var))
4290 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4291 ivar, lvar))
4293 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4295 tree iv = create_tmp_var (TREE_TYPE (new_var));
4296 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4297 gimplify_and_add (x, ilist);
4298 gimple_stmt_iterator gsi
4299 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4300 gassign *g
4301 = gimple_build_assign (unshare_expr (lvar), iv);
4302 gsi_insert_before_without_update (&gsi, g,
4303 GSI_SAME_STMT);
4304 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4305 enum tree_code code = PLUS_EXPR;
4306 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4307 code = POINTER_PLUS_EXPR;
4308 g = gimple_build_assign (iv, code, iv, t);
4309 gsi_insert_before_without_update (&gsi, g,
4310 GSI_SAME_STMT);
4311 break;
4313 x = lang_hooks.decls.omp_clause_copy_ctor
4314 (c, unshare_expr (ivar), x);
4315 gimplify_and_add (x, &llist[0]);
4316 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4317 if (x)
4319 gimple_seq tseq = NULL;
4321 dtor = x;
4322 gimplify_stmt (&dtor, &tseq);
4323 gimple_seq_add_seq (&llist[1], tseq);
4325 break;
4328 x = lang_hooks.decls.omp_clause_copy_ctor
4329 (c, unshare_expr (new_var), x);
4330 gimplify_and_add (x, ilist);
4331 goto do_dtor;
4333 case OMP_CLAUSE__LOOPTEMP_:
4334 gcc_assert (is_taskreg_ctx (ctx));
4335 x = build_outer_var_ref (var, ctx);
4336 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4337 gimplify_and_add (x, ilist);
4338 break;
4340 case OMP_CLAUSE_COPYIN:
4341 by_ref = use_pointer_for_field (var, NULL);
4342 x = build_receiver_ref (var, by_ref, ctx);
4343 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4344 append_to_statement_list (x, &copyin_seq);
4345 copyin_by_ref |= by_ref;
4346 break;
4348 case OMP_CLAUSE_REDUCTION:
4349 /* OpenACC reductions are initialized using the
4350 GOACC_REDUCTION internal function. */
4351 if (is_gimple_omp_oacc (ctx->stmt))
4352 break;
4353 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4355 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4356 gimple *tseq;
4357 x = build_outer_var_ref (var, ctx);
4359 if (omp_is_reference (var)
4360 && !useless_type_conversion_p (TREE_TYPE (placeholder),
4361 TREE_TYPE (x)))
4362 x = build_fold_addr_expr_loc (clause_loc, x);
4363 SET_DECL_VALUE_EXPR (placeholder, x);
4364 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4365 tree new_vard = new_var;
4366 if (omp_is_reference (var))
4368 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4369 new_vard = TREE_OPERAND (new_var, 0);
4370 gcc_assert (DECL_P (new_vard));
4372 if (is_simd
4373 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4374 ivar, lvar))
4376 if (new_vard == new_var)
4378 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4379 SET_DECL_VALUE_EXPR (new_var, ivar);
4381 else
4383 SET_DECL_VALUE_EXPR (new_vard,
4384 build_fold_addr_expr (ivar));
4385 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4387 x = lang_hooks.decls.omp_clause_default_ctor
4388 (c, unshare_expr (ivar),
4389 build_outer_var_ref (var, ctx));
4390 if (x)
4391 gimplify_and_add (x, &llist[0]);
4392 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4394 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4395 lower_omp (&tseq, ctx);
4396 gimple_seq_add_seq (&llist[0], tseq);
4398 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4399 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4400 lower_omp (&tseq, ctx);
4401 gimple_seq_add_seq (&llist[1], tseq);
4402 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4403 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4404 if (new_vard == new_var)
4405 SET_DECL_VALUE_EXPR (new_var, lvar);
4406 else
4407 SET_DECL_VALUE_EXPR (new_vard,
4408 build_fold_addr_expr (lvar));
4409 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4410 if (x)
4412 tseq = NULL;
4413 dtor = x;
4414 gimplify_stmt (&dtor, &tseq);
4415 gimple_seq_add_seq (&llist[1], tseq);
4417 break;
4419 /* If this is a reference to constant size reduction var
4420 with placeholder, we haven't emitted the initializer
4421 for it because it is undesirable if SIMD arrays are used.
4422 But if they aren't used, we need to emit the deferred
4423 initialization now. */
4424 else if (omp_is_reference (var) && is_simd)
4425 handle_simd_reference (clause_loc, new_vard, ilist);
4426 x = lang_hooks.decls.omp_clause_default_ctor
4427 (c, unshare_expr (new_var),
4428 build_outer_var_ref (var, ctx));
4429 if (x)
4430 gimplify_and_add (x, ilist);
4431 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4433 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4434 lower_omp (&tseq, ctx);
4435 gimple_seq_add_seq (ilist, tseq);
4437 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4438 if (is_simd)
4440 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4441 lower_omp (&tseq, ctx);
4442 gimple_seq_add_seq (dlist, tseq);
4443 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4445 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4446 goto do_dtor;
4448 else
4450 x = omp_reduction_init (c, TREE_TYPE (new_var));
4451 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4452 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4454 /* reduction(-:var) sums up the partial results, so it
4455 acts identically to reduction(+:var). */
4456 if (code == MINUS_EXPR)
4457 code = PLUS_EXPR;
4459 tree new_vard = new_var;
4460 if (is_simd && omp_is_reference (var))
4462 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4463 new_vard = TREE_OPERAND (new_var, 0);
4464 gcc_assert (DECL_P (new_vard));
4466 if (is_simd
4467 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4468 ivar, lvar))
4470 tree ref = build_outer_var_ref (var, ctx);
4472 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4474 if (sctx.is_simt)
4476 if (!simt_lane)
4477 simt_lane = create_tmp_var (unsigned_type_node);
4478 x = build_call_expr_internal_loc
4479 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4480 TREE_TYPE (ivar), 2, ivar, simt_lane);
4481 x = build2 (code, TREE_TYPE (ivar), ivar, x);
4482 gimplify_assign (ivar, x, &llist[2]);
4484 x = build2 (code, TREE_TYPE (ref), ref, ivar);
4485 ref = build_outer_var_ref (var, ctx);
4486 gimplify_assign (ref, x, &llist[1]);
4488 if (new_vard != new_var)
4490 SET_DECL_VALUE_EXPR (new_vard,
4491 build_fold_addr_expr (lvar));
4492 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4495 else
4497 if (omp_is_reference (var) && is_simd)
4498 handle_simd_reference (clause_loc, new_vard, ilist);
4499 gimplify_assign (new_var, x, ilist);
4500 if (is_simd)
4502 tree ref = build_outer_var_ref (var, ctx);
4504 x = build2 (code, TREE_TYPE (ref), ref, new_var);
4505 ref = build_outer_var_ref (var, ctx);
4506 gimplify_assign (ref, x, dlist);
4510 break;
4512 default:
4513 gcc_unreachable ();
4518 if (sctx.max_vf == 1)
4519 sctx.is_simt = false;
4521 if (sctx.lane || sctx.is_simt)
4523 uid = create_tmp_var (ptr_type_node, "simduid");
4524 /* Don't want uninit warnings on simduid, it is always uninitialized,
4525 but we use it not for the value, but for the DECL_UID only. */
4526 TREE_NO_WARNING (uid) = 1;
4527 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4528 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4529 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4530 gimple_omp_for_set_clauses (ctx->stmt, c);
4532 /* Emit calls denoting privatized variables and initializing a pointer to
4533 structure that holds private variables as fields after ompdevlow pass. */
4534 if (sctx.is_simt)
4536 sctx.simt_eargs[0] = uid;
4537 gimple *g
4538 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4539 gimple_call_set_lhs (g, uid);
4540 gimple_seq_add_stmt (ilist, g);
4541 sctx.simt_eargs.release ();
4543 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4544 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4545 gimple_call_set_lhs (g, simtrec);
4546 gimple_seq_add_stmt (ilist, g);
4548 if (sctx.lane)
4550 gimple *g
4551 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4552 gimple_call_set_lhs (g, sctx.lane);
4553 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4554 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4555 g = gimple_build_assign (sctx.lane, INTEGER_CST,
4556 build_int_cst (unsigned_type_node, 0));
4557 gimple_seq_add_stmt (ilist, g);
4558 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4559 if (llist[2])
4561 tree simt_vf = create_tmp_var (unsigned_type_node);
4562 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4563 gimple_call_set_lhs (g, simt_vf);
4564 gimple_seq_add_stmt (dlist, g);
4566 tree t = build_int_cst (unsigned_type_node, 1);
4567 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4568 gimple_seq_add_stmt (dlist, g);
4570 t = build_int_cst (unsigned_type_node, 0);
4571 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4572 gimple_seq_add_stmt (dlist, g);
4574 tree body = create_artificial_label (UNKNOWN_LOCATION);
4575 tree header = create_artificial_label (UNKNOWN_LOCATION);
4576 tree end = create_artificial_label (UNKNOWN_LOCATION);
4577 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4578 gimple_seq_add_stmt (dlist, gimple_build_label (body));
4580 gimple_seq_add_seq (dlist, llist[2]);
4582 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4583 gimple_seq_add_stmt (dlist, g);
4585 gimple_seq_add_stmt (dlist, gimple_build_label (header));
4586 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4587 gimple_seq_add_stmt (dlist, g);
4589 gimple_seq_add_stmt (dlist, gimple_build_label (end));
4591 for (int i = 0; i < 2; i++)
4592 if (llist[i])
4594 tree vf = create_tmp_var (unsigned_type_node);
4595 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4596 gimple_call_set_lhs (g, vf);
4597 gimple_seq *seq = i == 0 ? ilist : dlist;
4598 gimple_seq_add_stmt (seq, g);
4599 tree t = build_int_cst (unsigned_type_node, 0);
4600 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4601 gimple_seq_add_stmt (seq, g);
4602 tree body = create_artificial_label (UNKNOWN_LOCATION);
4603 tree header = create_artificial_label (UNKNOWN_LOCATION);
4604 tree end = create_artificial_label (UNKNOWN_LOCATION);
4605 gimple_seq_add_stmt (seq, gimple_build_goto (header));
4606 gimple_seq_add_stmt (seq, gimple_build_label (body));
4607 gimple_seq_add_seq (seq, llist[i]);
4608 t = build_int_cst (unsigned_type_node, 1);
4609 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
4610 gimple_seq_add_stmt (seq, g);
4611 gimple_seq_add_stmt (seq, gimple_build_label (header));
4612 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
4613 gimple_seq_add_stmt (seq, g);
4614 gimple_seq_add_stmt (seq, gimple_build_label (end));
4617 if (sctx.is_simt)
4619 gimple_seq_add_seq (dlist, sctx.simt_dlist);
4620 gimple *g
4621 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4622 gimple_seq_add_stmt (dlist, g);
4625 /* The copyin sequence is not to be executed by the main thread, since
4626 that would result in self-copies. Perhaps not visible to scalars,
4627 but it certainly is to C++ operator=. */
4628 if (copyin_seq)
4630 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4632 x = build2 (NE_EXPR, boolean_type_node, x,
4633 build_int_cst (TREE_TYPE (x), 0));
4634 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4635 gimplify_and_add (x, ilist);
4638 /* If any copyin variable is passed by reference, we must ensure the
4639 master thread doesn't modify it before it is copied over in all
4640 threads. Similarly for variables in both firstprivate and
4641 lastprivate clauses we need to ensure the lastprivate copying
4642 happens after firstprivate copying in all threads. And similarly
4643 for UDRs if initializer expression refers to omp_orig. */
4644 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4646 /* Don't add any barrier for #pragma omp simd or
4647 #pragma omp distribute. */
4648 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4649 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4650 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4653 /* If max_vf is non-zero, then we can use only a vectorization factor
4654 up to the max_vf we chose. So stick it into the safelen clause. */
4655 if (sctx.max_vf)
4657 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4658 OMP_CLAUSE_SAFELEN);
4659 if (c == NULL_TREE
4660 || (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) == INTEGER_CST
4661 && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
4662 sctx.max_vf) == 1))
4664 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4665 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4666 sctx.max_vf);
4667 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4668 gimple_omp_for_set_clauses (ctx->stmt, c);
4674 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4675 both parallel and workshare constructs. PREDICATE may be NULL if it's
4676 always true. */
4678 static void
4679 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4680 omp_context *ctx)
4682 tree x, c, label = NULL, orig_clauses = clauses;
4683 bool par_clauses = false;
4684 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4686 /* Early exit if there are no lastprivate or linear clauses. */
4687 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4688 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4689 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4690 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4691 break;
4692 if (clauses == NULL)
4694 /* If this was a workshare clause, see if it had been combined
4695 with its parallel. In that case, look for the clauses on the
4696 parallel statement itself. */
4697 if (is_parallel_ctx (ctx))
4698 return;
4700 ctx = ctx->outer;
4701 if (ctx == NULL || !is_parallel_ctx (ctx))
4702 return;
4704 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4705 OMP_CLAUSE_LASTPRIVATE);
4706 if (clauses == NULL)
4707 return;
4708 par_clauses = true;
4711 bool maybe_simt = false;
4712 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4713 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4715 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4716 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4717 if (simduid)
4718 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4721 if (predicate)
4723 gcond *stmt;
4724 tree label_true, arm1, arm2;
4725 enum tree_code pred_code = TREE_CODE (predicate);
4727 label = create_artificial_label (UNKNOWN_LOCATION);
4728 label_true = create_artificial_label (UNKNOWN_LOCATION);
4729 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4731 arm1 = TREE_OPERAND (predicate, 0);
4732 arm2 = TREE_OPERAND (predicate, 1);
4733 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4734 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4736 else
4738 arm1 = predicate;
4739 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4740 arm2 = boolean_false_node;
4741 pred_code = NE_EXPR;
4743 if (maybe_simt)
4745 c = build2 (pred_code, boolean_type_node, arm1, arm2);
4746 c = fold_convert (integer_type_node, c);
4747 simtcond = create_tmp_var (integer_type_node);
4748 gimplify_assign (simtcond, c, stmt_list);
4749 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4750 1, simtcond);
4751 c = create_tmp_var (integer_type_node);
4752 gimple_call_set_lhs (g, c);
4753 gimple_seq_add_stmt (stmt_list, g);
4754 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4755 label_true, label);
4757 else
4758 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4759 gimple_seq_add_stmt (stmt_list, stmt);
4760 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4763 for (c = clauses; c ;)
4765 tree var, new_var;
4766 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4768 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4769 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4770 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4772 var = OMP_CLAUSE_DECL (c);
4773 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4774 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4775 && is_taskloop_ctx (ctx))
4777 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4778 new_var = lookup_decl (var, ctx->outer);
4780 else
4782 new_var = lookup_decl (var, ctx);
4783 /* Avoid uninitialized warnings for lastprivate and
4784 for linear iterators. */
4785 if (predicate
4786 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4787 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4788 TREE_NO_WARNING (new_var) = 1;
4791 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4793 tree val = DECL_VALUE_EXPR (new_var);
4794 if (TREE_CODE (val) == ARRAY_REF
4795 && VAR_P (TREE_OPERAND (val, 0))
4796 && lookup_attribute ("omp simd array",
4797 DECL_ATTRIBUTES (TREE_OPERAND (val,
4798 0))))
4800 if (lastlane == NULL)
4802 lastlane = create_tmp_var (unsigned_type_node);
4803 gcall *g
4804 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4805 2, simduid,
4806 TREE_OPERAND (val, 1));
4807 gimple_call_set_lhs (g, lastlane);
4808 gimple_seq_add_stmt (stmt_list, g);
4810 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4811 TREE_OPERAND (val, 0), lastlane,
4812 NULL_TREE, NULL_TREE);
4815 else if (maybe_simt)
4817 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
4818 ? DECL_VALUE_EXPR (new_var)
4819 : new_var);
4820 if (simtlast == NULL)
4822 simtlast = create_tmp_var (unsigned_type_node);
4823 gcall *g = gimple_build_call_internal
4824 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4825 gimple_call_set_lhs (g, simtlast);
4826 gimple_seq_add_stmt (stmt_list, g);
4828 x = build_call_expr_internal_loc
4829 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4830 TREE_TYPE (val), 2, val, simtlast);
4831 new_var = unshare_expr (new_var);
4832 gimplify_assign (new_var, x, stmt_list);
4833 new_var = unshare_expr (new_var);
4836 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4837 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4839 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4840 gimple_seq_add_seq (stmt_list,
4841 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4842 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4844 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4845 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4847 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4848 gimple_seq_add_seq (stmt_list,
4849 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4850 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4853 x = NULL_TREE;
4854 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4855 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4857 gcc_checking_assert (is_taskloop_ctx (ctx));
4858 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4859 ctx->outer->outer);
4860 if (is_global_var (ovar))
4861 x = ovar;
4863 if (!x)
4864 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4865 if (omp_is_reference (var))
4866 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4867 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4868 gimplify_and_add (x, stmt_list);
4870 c = OMP_CLAUSE_CHAIN (c);
4871 if (c == NULL && !par_clauses)
4873 /* If this was a workshare clause, see if it had been combined
4874 with its parallel. In that case, continue looking for the
4875 clauses also on the parallel statement itself. */
4876 if (is_parallel_ctx (ctx))
4877 break;
4879 ctx = ctx->outer;
4880 if (ctx == NULL || !is_parallel_ctx (ctx))
4881 break;
4883 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4884 OMP_CLAUSE_LASTPRIVATE);
4885 par_clauses = true;
4889 if (label)
4890 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4893 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4894 (which might be a placeholder). INNER is true if this is an inner
4895 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4896 join markers. Generate the before-loop forking sequence in
4897 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4898 general form of these sequences is
4900 GOACC_REDUCTION_SETUP
4901 GOACC_FORK
4902 GOACC_REDUCTION_INIT
4904 GOACC_REDUCTION_FINI
4905 GOACC_JOIN
4906 GOACC_REDUCTION_TEARDOWN. */
4908 static void
4909 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4910 gcall *fork, gcall *join, gimple_seq *fork_seq,
4911 gimple_seq *join_seq, omp_context *ctx)
4913 gimple_seq before_fork = NULL;
4914 gimple_seq after_fork = NULL;
4915 gimple_seq before_join = NULL;
4916 gimple_seq after_join = NULL;
4917 tree init_code = NULL_TREE, fini_code = NULL_TREE,
4918 setup_code = NULL_TREE, teardown_code = NULL_TREE;
4919 unsigned offset = 0;
4921 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4922 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4924 tree orig = OMP_CLAUSE_DECL (c);
4925 tree var = maybe_lookup_decl (orig, ctx);
4926 tree ref_to_res = NULL_TREE;
4927 tree incoming, outgoing, v1, v2, v3;
4928 bool is_private = false;
4930 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4931 if (rcode == MINUS_EXPR)
4932 rcode = PLUS_EXPR;
4933 else if (rcode == TRUTH_ANDIF_EXPR)
4934 rcode = BIT_AND_EXPR;
4935 else if (rcode == TRUTH_ORIF_EXPR)
4936 rcode = BIT_IOR_EXPR;
4937 tree op = build_int_cst (unsigned_type_node, rcode);
4939 if (!var)
4940 var = orig;
4942 incoming = outgoing = var;
4944 if (!inner)
4946 /* See if an outer construct also reduces this variable. */
4947 omp_context *outer = ctx;
4949 while (omp_context *probe = outer->outer)
4951 enum gimple_code type = gimple_code (probe->stmt);
4952 tree cls;
4954 switch (type)
4956 case GIMPLE_OMP_FOR:
4957 cls = gimple_omp_for_clauses (probe->stmt);
4958 break;
4960 case GIMPLE_OMP_TARGET:
4961 if (gimple_omp_target_kind (probe->stmt)
4962 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
4963 goto do_lookup;
4965 cls = gimple_omp_target_clauses (probe->stmt);
4966 break;
4968 default:
4969 goto do_lookup;
4972 outer = probe;
4973 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
4974 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4975 && orig == OMP_CLAUSE_DECL (cls))
4977 incoming = outgoing = lookup_decl (orig, probe);
4978 goto has_outer_reduction;
4980 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4981 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4982 && orig == OMP_CLAUSE_DECL (cls))
4984 is_private = true;
4985 goto do_lookup;
4989 do_lookup:
4990 /* This is the outermost construct with this reduction,
4991 see if there's a mapping for it. */
4992 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
4993 && maybe_lookup_field (orig, outer) && !is_private)
4995 ref_to_res = build_receiver_ref (orig, false, outer);
4996 if (omp_is_reference (orig))
4997 ref_to_res = build_simple_mem_ref (ref_to_res);
4999 tree type = TREE_TYPE (var);
5000 if (POINTER_TYPE_P (type))
5001 type = TREE_TYPE (type);
5003 outgoing = var;
5004 incoming = omp_reduction_init_op (loc, rcode, type);
5006 else
5008 /* Try to look at enclosing contexts for reduction var,
5009 use original if no mapping found. */
5010 tree t = NULL_TREE;
5011 omp_context *c = ctx->outer;
5012 while (c && !t)
5014 t = maybe_lookup_decl (orig, c);
5015 c = c->outer;
5017 incoming = outgoing = (t ? t : orig);
5020 has_outer_reduction:;
5023 if (!ref_to_res)
5024 ref_to_res = integer_zero_node;
5026 if (omp_is_reference (orig))
5028 tree type = TREE_TYPE (var);
5029 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
5031 if (!inner)
5033 tree x = create_tmp_var (TREE_TYPE (type), id);
5034 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
5037 v1 = create_tmp_var (type, id);
5038 v2 = create_tmp_var (type, id);
5039 v3 = create_tmp_var (type, id);
5041 gimplify_assign (v1, var, fork_seq);
5042 gimplify_assign (v2, var, fork_seq);
5043 gimplify_assign (v3, var, fork_seq);
5045 var = build_simple_mem_ref (var);
5046 v1 = build_simple_mem_ref (v1);
5047 v2 = build_simple_mem_ref (v2);
5048 v3 = build_simple_mem_ref (v3);
5049 outgoing = build_simple_mem_ref (outgoing);
5051 if (!TREE_CONSTANT (incoming))
5052 incoming = build_simple_mem_ref (incoming);
5054 else
5055 v1 = v2 = v3 = var;
5057 /* Determine position in reduction buffer, which may be used
5058 by target. */
5059 enum machine_mode mode = TYPE_MODE (TREE_TYPE (var));
5060 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
5061 offset = (offset + align - 1) & ~(align - 1);
5062 tree off = build_int_cst (sizetype, offset);
5063 offset += GET_MODE_SIZE (mode);
5065 if (!init_code)
5067 init_code = build_int_cst (integer_type_node,
5068 IFN_GOACC_REDUCTION_INIT);
5069 fini_code = build_int_cst (integer_type_node,
5070 IFN_GOACC_REDUCTION_FINI);
5071 setup_code = build_int_cst (integer_type_node,
5072 IFN_GOACC_REDUCTION_SETUP);
5073 teardown_code = build_int_cst (integer_type_node,
5074 IFN_GOACC_REDUCTION_TEARDOWN);
5077 tree setup_call
5078 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5079 TREE_TYPE (var), 6, setup_code,
5080 unshare_expr (ref_to_res),
5081 incoming, level, op, off);
5082 tree init_call
5083 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5084 TREE_TYPE (var), 6, init_code,
5085 unshare_expr (ref_to_res),
5086 v1, level, op, off);
5087 tree fini_call
5088 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5089 TREE_TYPE (var), 6, fini_code,
5090 unshare_expr (ref_to_res),
5091 v2, level, op, off);
5092 tree teardown_call
5093 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5094 TREE_TYPE (var), 6, teardown_code,
5095 ref_to_res, v3, level, op, off);
5097 gimplify_assign (v1, setup_call, &before_fork);
5098 gimplify_assign (v2, init_call, &after_fork);
5099 gimplify_assign (v3, fini_call, &before_join);
5100 gimplify_assign (outgoing, teardown_call, &after_join);
5103 /* Now stitch things together. */
5104 gimple_seq_add_seq (fork_seq, before_fork);
5105 if (fork)
5106 gimple_seq_add_stmt (fork_seq, fork);
5107 gimple_seq_add_seq (fork_seq, after_fork);
5109 gimple_seq_add_seq (join_seq, before_join);
5110 if (join)
5111 gimple_seq_add_stmt (join_seq, join);
5112 gimple_seq_add_seq (join_seq, after_join);
5115 /* Generate code to implement the REDUCTION clauses. */
5117 static void
5118 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5120 gimple_seq sub_seq = NULL;
5121 gimple *stmt;
5122 tree x, c;
5123 int count = 0;
5125 /* OpenACC loop reductions are handled elsewhere. */
5126 if (is_gimple_omp_oacc (ctx->stmt))
5127 return;
5129 /* SIMD reductions are handled in lower_rec_input_clauses. */
5130 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5131 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5132 return;
5134 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5135 update in that case, otherwise use a lock. */
5136 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5137 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5139 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5140 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5142 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5143 count = -1;
5144 break;
5146 count++;
5149 if (count == 0)
5150 return;
5152 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5154 tree var, ref, new_var, orig_var;
5155 enum tree_code code;
5156 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5158 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5159 continue;
5161 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
5162 orig_var = var = OMP_CLAUSE_DECL (c);
5163 if (TREE_CODE (var) == MEM_REF)
5165 var = TREE_OPERAND (var, 0);
5166 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5167 var = TREE_OPERAND (var, 0);
5168 if (TREE_CODE (var) == ADDR_EXPR)
5169 var = TREE_OPERAND (var, 0);
5170 else
5172 /* If this is a pointer or referenced based array
5173 section, the var could be private in the outer
5174 context e.g. on orphaned loop construct. Pretend this
5175 is private variable's outer reference. */
5176 ccode = OMP_CLAUSE_PRIVATE;
5177 if (TREE_CODE (var) == INDIRECT_REF)
5178 var = TREE_OPERAND (var, 0);
5180 orig_var = var;
5181 if (is_variable_sized (var))
5183 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5184 var = DECL_VALUE_EXPR (var);
5185 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5186 var = TREE_OPERAND (var, 0);
5187 gcc_assert (DECL_P (var));
5190 new_var = lookup_decl (var, ctx);
5191 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5192 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5193 ref = build_outer_var_ref (var, ctx, ccode);
5194 code = OMP_CLAUSE_REDUCTION_CODE (c);
5196 /* reduction(-:var) sums up the partial results, so it acts
5197 identically to reduction(+:var). */
5198 if (code == MINUS_EXPR)
5199 code = PLUS_EXPR;
5201 if (count == 1)
5203 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5205 addr = save_expr (addr);
5206 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5207 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5208 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5209 gimplify_and_add (x, stmt_seqp);
5210 return;
5212 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5214 tree d = OMP_CLAUSE_DECL (c);
5215 tree type = TREE_TYPE (d);
5216 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5217 tree i = create_tmp_var (TREE_TYPE (v), NULL);
5218 tree ptype = build_pointer_type (TREE_TYPE (type));
5219 tree bias = TREE_OPERAND (d, 1);
5220 d = TREE_OPERAND (d, 0);
5221 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5223 tree b = TREE_OPERAND (d, 1);
5224 b = maybe_lookup_decl (b, ctx);
5225 if (b == NULL)
5227 b = TREE_OPERAND (d, 1);
5228 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5230 if (integer_zerop (bias))
5231 bias = b;
5232 else
5234 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5235 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5236 TREE_TYPE (b), b, bias);
5238 d = TREE_OPERAND (d, 0);
5240 /* For ref build_outer_var_ref already performs this, so
5241 only new_var needs a dereference. */
5242 if (TREE_CODE (d) == INDIRECT_REF)
5244 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5245 gcc_assert (omp_is_reference (var) && var == orig_var);
5247 else if (TREE_CODE (d) == ADDR_EXPR)
5249 if (orig_var == var)
5251 new_var = build_fold_addr_expr (new_var);
5252 ref = build_fold_addr_expr (ref);
5255 else
5257 gcc_assert (orig_var == var);
5258 if (omp_is_reference (var))
5259 ref = build_fold_addr_expr (ref);
5261 if (DECL_P (v))
5263 tree t = maybe_lookup_decl (v, ctx);
5264 if (t)
5265 v = t;
5266 else
5267 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5268 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5270 if (!integer_zerop (bias))
5272 bias = fold_convert_loc (clause_loc, sizetype, bias);
5273 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5274 TREE_TYPE (new_var), new_var,
5275 unshare_expr (bias));
5276 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5277 TREE_TYPE (ref), ref, bias);
5279 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5280 ref = fold_convert_loc (clause_loc, ptype, ref);
5281 tree m = create_tmp_var (ptype, NULL);
5282 gimplify_assign (m, new_var, stmt_seqp);
5283 new_var = m;
5284 m = create_tmp_var (ptype, NULL);
5285 gimplify_assign (m, ref, stmt_seqp);
5286 ref = m;
5287 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5288 tree body = create_artificial_label (UNKNOWN_LOCATION);
5289 tree end = create_artificial_label (UNKNOWN_LOCATION);
5290 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5291 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5292 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5293 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5295 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5296 tree decl_placeholder
5297 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5298 SET_DECL_VALUE_EXPR (placeholder, out);
5299 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5300 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5301 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5302 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5303 gimple_seq_add_seq (&sub_seq,
5304 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5305 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5306 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5307 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5309 else
5311 x = build2 (code, TREE_TYPE (out), out, priv);
5312 out = unshare_expr (out);
5313 gimplify_assign (out, x, &sub_seq);
5315 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5316 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5317 gimple_seq_add_stmt (&sub_seq, g);
5318 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5319 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5320 gimple_seq_add_stmt (&sub_seq, g);
5321 g = gimple_build_assign (i, PLUS_EXPR, i,
5322 build_int_cst (TREE_TYPE (i), 1));
5323 gimple_seq_add_stmt (&sub_seq, g);
5324 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5325 gimple_seq_add_stmt (&sub_seq, g);
5326 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5328 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5330 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5332 if (omp_is_reference (var)
5333 && !useless_type_conversion_p (TREE_TYPE (placeholder),
5334 TREE_TYPE (ref)))
5335 ref = build_fold_addr_expr_loc (clause_loc, ref);
5336 SET_DECL_VALUE_EXPR (placeholder, ref);
5337 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5338 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5339 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5340 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5341 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5343 else
5345 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5346 ref = build_outer_var_ref (var, ctx);
5347 gimplify_assign (ref, x, &sub_seq);
5351 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5353 gimple_seq_add_stmt (stmt_seqp, stmt);
5355 gimple_seq_add_seq (stmt_seqp, sub_seq);
5357 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5359 gimple_seq_add_stmt (stmt_seqp, stmt);
5363 /* Generate code to implement the COPYPRIVATE clauses. */
5365 static void
5366 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5367 omp_context *ctx)
5369 tree c;
5371 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5373 tree var, new_var, ref, x;
5374 bool by_ref;
5375 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5377 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5378 continue;
5380 var = OMP_CLAUSE_DECL (c);
5381 by_ref = use_pointer_for_field (var, NULL);
5383 ref = build_sender_ref (var, ctx);
5384 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5385 if (by_ref)
5387 x = build_fold_addr_expr_loc (clause_loc, new_var);
5388 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5390 gimplify_assign (ref, x, slist);
5392 ref = build_receiver_ref (var, false, ctx);
5393 if (by_ref)
5395 ref = fold_convert_loc (clause_loc,
5396 build_pointer_type (TREE_TYPE (new_var)),
5397 ref);
5398 ref = build_fold_indirect_ref_loc (clause_loc, ref);
5400 if (omp_is_reference (var))
5402 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5403 ref = build_simple_mem_ref_loc (clause_loc, ref);
5404 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5406 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5407 gimplify_and_add (x, rlist);
5412 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5413 and REDUCTION from the sender (aka parent) side. */
5415 static void
5416 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5417 omp_context *ctx)
5419 tree c, t;
5420 int ignored_looptemp = 0;
5421 bool is_taskloop = false;
5423 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5424 by GOMP_taskloop. */
5425 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5427 ignored_looptemp = 2;
5428 is_taskloop = true;
5431 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5433 tree val, ref, x, var;
5434 bool by_ref, do_in = false, do_out = false;
5435 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5437 switch (OMP_CLAUSE_CODE (c))
5439 case OMP_CLAUSE_PRIVATE:
5440 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5441 break;
5442 continue;
5443 case OMP_CLAUSE_FIRSTPRIVATE:
5444 case OMP_CLAUSE_COPYIN:
5445 case OMP_CLAUSE_LASTPRIVATE:
5446 case OMP_CLAUSE_REDUCTION:
5447 break;
5448 case OMP_CLAUSE_SHARED:
5449 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5450 break;
5451 continue;
5452 case OMP_CLAUSE__LOOPTEMP_:
5453 if (ignored_looptemp)
5455 ignored_looptemp--;
5456 continue;
5458 break;
5459 default:
5460 continue;
5463 val = OMP_CLAUSE_DECL (c);
5464 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5465 && TREE_CODE (val) == MEM_REF)
5467 val = TREE_OPERAND (val, 0);
5468 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5469 val = TREE_OPERAND (val, 0);
5470 if (TREE_CODE (val) == INDIRECT_REF
5471 || TREE_CODE (val) == ADDR_EXPR)
5472 val = TREE_OPERAND (val, 0);
5473 if (is_variable_sized (val))
5474 continue;
5477 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5478 outer taskloop region. */
5479 omp_context *ctx_for_o = ctx;
5480 if (is_taskloop
5481 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5482 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5483 ctx_for_o = ctx->outer;
5485 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5487 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5488 && is_global_var (var))
5489 continue;
5491 t = omp_member_access_dummy_var (var);
5492 if (t)
5494 var = DECL_VALUE_EXPR (var);
5495 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5496 if (o != t)
5497 var = unshare_and_remap (var, t, o);
5498 else
5499 var = unshare_expr (var);
5502 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5504 /* Handle taskloop firstprivate/lastprivate, where the
5505 lastprivate on GIMPLE_OMP_TASK is represented as
5506 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5507 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5508 x = omp_build_component_ref (ctx->sender_decl, f);
5509 if (use_pointer_for_field (val, ctx))
5510 var = build_fold_addr_expr (var);
5511 gimplify_assign (x, var, ilist);
5512 DECL_ABSTRACT_ORIGIN (f) = NULL;
5513 continue;
5516 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5517 || val == OMP_CLAUSE_DECL (c))
5518 && is_variable_sized (val))
5519 continue;
5520 by_ref = use_pointer_for_field (val, NULL);
5522 switch (OMP_CLAUSE_CODE (c))
5524 case OMP_CLAUSE_FIRSTPRIVATE:
5525 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5526 && !by_ref
5527 && is_task_ctx (ctx))
5528 TREE_NO_WARNING (var) = 1;
5529 do_in = true;
5530 break;
5532 case OMP_CLAUSE_PRIVATE:
5533 case OMP_CLAUSE_COPYIN:
5534 case OMP_CLAUSE__LOOPTEMP_:
5535 do_in = true;
5536 break;
5538 case OMP_CLAUSE_LASTPRIVATE:
5539 if (by_ref || omp_is_reference (val))
5541 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5542 continue;
5543 do_in = true;
5545 else
5547 do_out = true;
5548 if (lang_hooks.decls.omp_private_outer_ref (val))
5549 do_in = true;
5551 break;
5553 case OMP_CLAUSE_REDUCTION:
5554 do_in = true;
5555 if (val == OMP_CLAUSE_DECL (c))
5556 do_out = !(by_ref || omp_is_reference (val));
5557 else
5558 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5559 break;
5561 default:
5562 gcc_unreachable ();
5565 if (do_in)
5567 ref = build_sender_ref (val, ctx);
5568 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5569 gimplify_assign (ref, x, ilist);
5570 if (is_task_ctx (ctx))
5571 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5574 if (do_out)
5576 ref = build_sender_ref (val, ctx);
5577 gimplify_assign (var, ref, olist);
5582 /* Generate code to implement SHARED from the sender (aka parent)
5583 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5584 list things that got automatically shared. */
5586 static void
5587 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5589 tree var, ovar, nvar, t, f, x, record_type;
5591 if (ctx->record_type == NULL)
5592 return;
5594 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5595 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5597 ovar = DECL_ABSTRACT_ORIGIN (f);
5598 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5599 continue;
5601 nvar = maybe_lookup_decl (ovar, ctx);
5602 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5603 continue;
5605 /* If CTX is a nested parallel directive. Find the immediately
5606 enclosing parallel or workshare construct that contains a
5607 mapping for OVAR. */
5608 var = lookup_decl_in_outer_ctx (ovar, ctx);
5610 t = omp_member_access_dummy_var (var);
5611 if (t)
5613 var = DECL_VALUE_EXPR (var);
5614 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5615 if (o != t)
5616 var = unshare_and_remap (var, t, o);
5617 else
5618 var = unshare_expr (var);
5621 if (use_pointer_for_field (ovar, ctx))
5623 x = build_sender_ref (ovar, ctx);
5624 var = build_fold_addr_expr (var);
5625 gimplify_assign (x, var, ilist);
5627 else
5629 x = build_sender_ref (ovar, ctx);
5630 gimplify_assign (x, var, ilist);
5632 if (!TREE_READONLY (var)
5633 /* We don't need to receive a new reference to a result
5634 or parm decl. In fact we may not store to it as we will
5635 invalidate any pending RSO and generate wrong gimple
5636 during inlining. */
5637 && !((TREE_CODE (var) == RESULT_DECL
5638 || TREE_CODE (var) == PARM_DECL)
5639 && DECL_BY_REFERENCE (var)))
5641 x = build_sender_ref (ovar, ctx);
5642 gimplify_assign (var, x, olist);
5648 /* Emit an OpenACC head marker call, encapulating the partitioning and
5649 other information that must be processed by the target compiler.
5650 Return the maximum number of dimensions the associated loop might
5651 be partitioned over. */
5653 static unsigned
5654 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5655 gimple_seq *seq, omp_context *ctx)
5657 unsigned levels = 0;
5658 unsigned tag = 0;
5659 tree gang_static = NULL_TREE;
5660 auto_vec<tree, 5> args;
5662 args.quick_push (build_int_cst
5663 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5664 args.quick_push (ddvar);
5665 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5667 switch (OMP_CLAUSE_CODE (c))
5669 case OMP_CLAUSE_GANG:
5670 tag |= OLF_DIM_GANG;
5671 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5672 /* static:* is represented by -1, and we can ignore it, as
5673 scheduling is always static. */
5674 if (gang_static && integer_minus_onep (gang_static))
5675 gang_static = NULL_TREE;
5676 levels++;
5677 break;
5679 case OMP_CLAUSE_WORKER:
5680 tag |= OLF_DIM_WORKER;
5681 levels++;
5682 break;
5684 case OMP_CLAUSE_VECTOR:
5685 tag |= OLF_DIM_VECTOR;
5686 levels++;
5687 break;
5689 case OMP_CLAUSE_SEQ:
5690 tag |= OLF_SEQ;
5691 break;
5693 case OMP_CLAUSE_AUTO:
5694 tag |= OLF_AUTO;
5695 break;
5697 case OMP_CLAUSE_INDEPENDENT:
5698 tag |= OLF_INDEPENDENT;
5699 break;
5701 case OMP_CLAUSE_TILE:
5702 tag |= OLF_TILE;
5703 break;
5705 default:
5706 continue;
5710 if (gang_static)
5712 if (DECL_P (gang_static))
5713 gang_static = build_outer_var_ref (gang_static, ctx);
5714 tag |= OLF_GANG_STATIC;
5717 /* In a parallel region, loops are implicitly INDEPENDENT. */
5718 omp_context *tgt = enclosing_target_ctx (ctx);
5719 if (!tgt || is_oacc_parallel (tgt))
5720 tag |= OLF_INDEPENDENT;
5722 if (tag & OLF_TILE)
5723 /* Tiling could use all 3 levels. */
5724 levels = 3;
5725 else
5727 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5728 Ensure at least one level, or 2 for possible auto
5729 partitioning */
5730 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5731 << OLF_DIM_BASE) | OLF_SEQ));
5733 if (levels < 1u + maybe_auto)
5734 levels = 1u + maybe_auto;
5737 args.quick_push (build_int_cst (integer_type_node, levels));
5738 args.quick_push (build_int_cst (integer_type_node, tag));
5739 if (gang_static)
5740 args.quick_push (gang_static);
5742 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5743 gimple_set_location (call, loc);
5744 gimple_set_lhs (call, ddvar);
5745 gimple_seq_add_stmt (seq, call);
5747 return levels;
5750 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5751 partitioning level of the enclosed region. */
5753 static void
5754 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5755 tree tofollow, gimple_seq *seq)
5757 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5758 : IFN_UNIQUE_OACC_TAIL_MARK);
5759 tree marker = build_int_cst (integer_type_node, marker_kind);
5760 int nargs = 2 + (tofollow != NULL_TREE);
5761 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5762 marker, ddvar, tofollow);
5763 gimple_set_location (call, loc);
5764 gimple_set_lhs (call, ddvar);
5765 gimple_seq_add_stmt (seq, call);
5768 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5769 the loop clauses, from which we extract reductions. Initialize
5770 HEAD and TAIL. */
5772 static void
5773 lower_oacc_head_tail (location_t loc, tree clauses,
5774 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5776 bool inner = false;
5777 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5778 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5780 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5781 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5782 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5784 gcc_assert (count);
5785 for (unsigned done = 1; count; count--, done++)
5787 gimple_seq fork_seq = NULL;
5788 gimple_seq join_seq = NULL;
5790 tree place = build_int_cst (integer_type_node, -1);
5791 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5792 fork_kind, ddvar, place);
5793 gimple_set_location (fork, loc);
5794 gimple_set_lhs (fork, ddvar);
5796 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5797 join_kind, ddvar, place);
5798 gimple_set_location (join, loc);
5799 gimple_set_lhs (join, ddvar);
5801 /* Mark the beginning of this level sequence. */
5802 if (inner)
5803 lower_oacc_loop_marker (loc, ddvar, true,
5804 build_int_cst (integer_type_node, count),
5805 &fork_seq);
5806 lower_oacc_loop_marker (loc, ddvar, false,
5807 build_int_cst (integer_type_node, done),
5808 &join_seq);
5810 lower_oacc_reductions (loc, clauses, place, inner,
5811 fork, join, &fork_seq, &join_seq, ctx);
5813 /* Append this level to head. */
5814 gimple_seq_add_seq (head, fork_seq);
5815 /* Prepend it to tail. */
5816 gimple_seq_add_seq (&join_seq, *tail);
5817 *tail = join_seq;
5819 inner = true;
5822 /* Mark the end of the sequence. */
5823 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5824 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5827 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5828 catch handler and return it. This prevents programs from violating the
5829 structured block semantics with throws. */
5831 static gimple_seq
5832 maybe_catch_exception (gimple_seq body)
5834 gimple *g;
5835 tree decl;
5837 if (!flag_exceptions)
5838 return body;
5840 if (lang_hooks.eh_protect_cleanup_actions != NULL)
5841 decl = lang_hooks.eh_protect_cleanup_actions ();
5842 else
5843 decl = builtin_decl_explicit (BUILT_IN_TRAP);
5845 g = gimple_build_eh_must_not_throw (decl);
5846 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5847 GIMPLE_TRY_CATCH);
5849 return gimple_seq_alloc_with_stmt (g);
5853 /* Routines to lower OMP directives into OMP-GIMPLE. */
5855 /* If ctx is a worksharing context inside of a cancellable parallel
5856 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5857 and conditional branch to parallel's cancel_label to handle
5858 cancellation in the implicit barrier. */
5860 static void
5861 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5863 gimple *omp_return = gimple_seq_last_stmt (*body);
5864 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5865 if (gimple_omp_return_nowait_p (omp_return))
5866 return;
5867 if (ctx->outer
5868 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5869 && ctx->outer->cancellable)
5871 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5872 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5873 tree lhs = create_tmp_var (c_bool_type);
5874 gimple_omp_return_set_lhs (omp_return, lhs);
5875 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5876 gimple *g = gimple_build_cond (NE_EXPR, lhs,
5877 fold_convert (c_bool_type,
5878 boolean_false_node),
5879 ctx->outer->cancel_label, fallthru_label);
5880 gimple_seq_add_stmt (body, g);
5881 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5885 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5886 CTX is the enclosing OMP context for the current statement. */
5888 static void
5889 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5891 tree block, control;
5892 gimple_stmt_iterator tgsi;
5893 gomp_sections *stmt;
5894 gimple *t;
5895 gbind *new_stmt, *bind;
5896 gimple_seq ilist, dlist, olist, new_body;
5898 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5900 push_gimplify_context ();
5902 dlist = NULL;
5903 ilist = NULL;
5904 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5905 &ilist, &dlist, ctx, NULL);
5907 new_body = gimple_omp_body (stmt);
5908 gimple_omp_set_body (stmt, NULL);
5909 tgsi = gsi_start (new_body);
5910 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5912 omp_context *sctx;
5913 gimple *sec_start;
5915 sec_start = gsi_stmt (tgsi);
5916 sctx = maybe_lookup_ctx (sec_start);
5917 gcc_assert (sctx);
5919 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5920 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5921 GSI_CONTINUE_LINKING);
5922 gimple_omp_set_body (sec_start, NULL);
5924 if (gsi_one_before_end_p (tgsi))
5926 gimple_seq l = NULL;
5927 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5928 &l, ctx);
5929 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5930 gimple_omp_section_set_last (sec_start);
5933 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5934 GSI_CONTINUE_LINKING);
5937 block = make_node (BLOCK);
5938 bind = gimple_build_bind (NULL, new_body, block);
5940 olist = NULL;
5941 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
5943 block = make_node (BLOCK);
5944 new_stmt = gimple_build_bind (NULL, NULL, block);
5945 gsi_replace (gsi_p, new_stmt, true);
5947 pop_gimplify_context (new_stmt);
5948 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5949 BLOCK_VARS (block) = gimple_bind_vars (bind);
5950 if (BLOCK_VARS (block))
5951 TREE_USED (block) = 1;
5953 new_body = NULL;
5954 gimple_seq_add_seq (&new_body, ilist);
5955 gimple_seq_add_stmt (&new_body, stmt);
5956 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5957 gimple_seq_add_stmt (&new_body, bind);
5959 control = create_tmp_var (unsigned_type_node, ".section");
5960 t = gimple_build_omp_continue (control, control);
5961 gimple_omp_sections_set_control (stmt, control);
5962 gimple_seq_add_stmt (&new_body, t);
5964 gimple_seq_add_seq (&new_body, olist);
5965 if (ctx->cancellable)
5966 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5967 gimple_seq_add_seq (&new_body, dlist);
5969 new_body = maybe_catch_exception (new_body);
5971 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5972 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5973 t = gimple_build_omp_return (nowait);
5974 gimple_seq_add_stmt (&new_body, t);
5975 maybe_add_implicit_barrier_cancel (ctx, &new_body);
5977 gimple_bind_set_body (new_stmt, new_body);
5981 /* A subroutine of lower_omp_single. Expand the simple form of
5982 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5984 if (GOMP_single_start ())
5985 BODY;
5986 [ GOMP_barrier (); ] -> unless 'nowait' is present.
5988 FIXME. It may be better to delay expanding the logic of this until
5989 pass_expand_omp. The expanded logic may make the job more difficult
5990 to a synchronization analysis pass. */
5992 static void
5993 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
5995 location_t loc = gimple_location (single_stmt);
5996 tree tlabel = create_artificial_label (loc);
5997 tree flabel = create_artificial_label (loc);
5998 gimple *call, *cond;
5999 tree lhs, decl;
6001 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
6002 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
6003 call = gimple_build_call (decl, 0);
6004 gimple_call_set_lhs (call, lhs);
6005 gimple_seq_add_stmt (pre_p, call);
6007 cond = gimple_build_cond (EQ_EXPR, lhs,
6008 fold_convert_loc (loc, TREE_TYPE (lhs),
6009 boolean_true_node),
6010 tlabel, flabel);
6011 gimple_seq_add_stmt (pre_p, cond);
6012 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
6013 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6014 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
6018 /* A subroutine of lower_omp_single. Expand the simple form of
6019 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
6021 #pragma omp single copyprivate (a, b, c)
6023 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
6026 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6028 BODY;
6029 copyout.a = a;
6030 copyout.b = b;
6031 copyout.c = c;
6032 GOMP_single_copy_end (&copyout);
6034 else
6036 a = copyout_p->a;
6037 b = copyout_p->b;
6038 c = copyout_p->c;
6040 GOMP_barrier ();
6043 FIXME. It may be better to delay expanding the logic of this until
6044 pass_expand_omp. The expanded logic may make the job more difficult
6045 to a synchronization analysis pass. */
6047 static void
6048 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
6049 omp_context *ctx)
6051 tree ptr_type, t, l0, l1, l2, bfn_decl;
6052 gimple_seq copyin_seq;
6053 location_t loc = gimple_location (single_stmt);
6055 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
6057 ptr_type = build_pointer_type (ctx->record_type);
6058 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
6060 l0 = create_artificial_label (loc);
6061 l1 = create_artificial_label (loc);
6062 l2 = create_artificial_label (loc);
6064 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6065 t = build_call_expr_loc (loc, bfn_decl, 0);
6066 t = fold_convert_loc (loc, ptr_type, t);
6067 gimplify_assign (ctx->receiver_decl, t, pre_p);
6069 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6070 build_int_cst (ptr_type, 0));
6071 t = build3 (COND_EXPR, void_type_node, t,
6072 build_and_jump (&l0), build_and_jump (&l1));
6073 gimplify_and_add (t, pre_p);
6075 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6077 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6079 copyin_seq = NULL;
6080 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6081 &copyin_seq, ctx);
6083 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6084 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6085 t = build_call_expr_loc (loc, bfn_decl, 1, t);
6086 gimplify_and_add (t, pre_p);
6088 t = build_and_jump (&l2);
6089 gimplify_and_add (t, pre_p);
6091 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6093 gimple_seq_add_seq (pre_p, copyin_seq);
6095 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6099 /* Expand code for an OpenMP single directive. */
6101 static void
6102 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6104 tree block;
6105 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6106 gbind *bind;
6107 gimple_seq bind_body, bind_body_tail = NULL, dlist;
6109 push_gimplify_context ();
6111 block = make_node (BLOCK);
6112 bind = gimple_build_bind (NULL, NULL, block);
6113 gsi_replace (gsi_p, bind, true);
6114 bind_body = NULL;
6115 dlist = NULL;
6116 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6117 &bind_body, &dlist, ctx, NULL);
6118 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6120 gimple_seq_add_stmt (&bind_body, single_stmt);
6122 if (ctx->record_type)
6123 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6124 else
6125 lower_omp_single_simple (single_stmt, &bind_body);
6127 gimple_omp_set_body (single_stmt, NULL);
6129 gimple_seq_add_seq (&bind_body, dlist);
6131 bind_body = maybe_catch_exception (bind_body);
6133 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6134 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6135 gimple *g = gimple_build_omp_return (nowait);
6136 gimple_seq_add_stmt (&bind_body_tail, g);
6137 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6138 if (ctx->record_type)
6140 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6141 tree clobber = build_constructor (ctx->record_type, NULL);
6142 TREE_THIS_VOLATILE (clobber) = 1;
6143 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6144 clobber), GSI_SAME_STMT);
6146 gimple_seq_add_seq (&bind_body, bind_body_tail);
6147 gimple_bind_set_body (bind, bind_body);
6149 pop_gimplify_context (bind);
6151 gimple_bind_append_vars (bind, ctx->block_vars);
6152 BLOCK_VARS (block) = ctx->block_vars;
6153 if (BLOCK_VARS (block))
6154 TREE_USED (block) = 1;
6158 /* Expand code for an OpenMP master directive. */
6160 static void
6161 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6163 tree block, lab = NULL, x, bfn_decl;
6164 gimple *stmt = gsi_stmt (*gsi_p);
6165 gbind *bind;
6166 location_t loc = gimple_location (stmt);
6167 gimple_seq tseq;
6169 push_gimplify_context ();
6171 block = make_node (BLOCK);
6172 bind = gimple_build_bind (NULL, NULL, block);
6173 gsi_replace (gsi_p, bind, true);
6174 gimple_bind_add_stmt (bind, stmt);
6176 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6177 x = build_call_expr_loc (loc, bfn_decl, 0);
6178 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6179 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6180 tseq = NULL;
6181 gimplify_and_add (x, &tseq);
6182 gimple_bind_add_seq (bind, tseq);
6184 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6185 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6186 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6187 gimple_omp_set_body (stmt, NULL);
6189 gimple_bind_add_stmt (bind, gimple_build_label (lab));
6191 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6193 pop_gimplify_context (bind);
6195 gimple_bind_append_vars (bind, ctx->block_vars);
6196 BLOCK_VARS (block) = ctx->block_vars;
6200 /* Expand code for an OpenMP taskgroup directive. */
6202 static void
6203 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6205 gimple *stmt = gsi_stmt (*gsi_p);
6206 gcall *x;
6207 gbind *bind;
6208 tree block = make_node (BLOCK);
6210 bind = gimple_build_bind (NULL, NULL, block);
6211 gsi_replace (gsi_p, bind, true);
6212 gimple_bind_add_stmt (bind, stmt);
6214 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6216 gimple_bind_add_stmt (bind, x);
6218 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6219 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6220 gimple_omp_set_body (stmt, NULL);
6222 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6224 gimple_bind_append_vars (bind, ctx->block_vars);
6225 BLOCK_VARS (block) = ctx->block_vars;
6229 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6231 static void
6232 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6233 omp_context *ctx)
6235 struct omp_for_data fd;
6236 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6237 return;
6239 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6240 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6241 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6242 if (!fd.ordered)
6243 return;
6245 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6246 tree c = gimple_omp_ordered_clauses (ord_stmt);
6247 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6248 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6250 /* Merge depend clauses from multiple adjacent
6251 #pragma omp ordered depend(sink:...) constructs
6252 into one #pragma omp ordered depend(sink:...), so that
6253 we can optimize them together. */
6254 gimple_stmt_iterator gsi = *gsi_p;
6255 gsi_next (&gsi);
6256 while (!gsi_end_p (gsi))
6258 gimple *stmt = gsi_stmt (gsi);
6259 if (is_gimple_debug (stmt)
6260 || gimple_code (stmt) == GIMPLE_NOP)
6262 gsi_next (&gsi);
6263 continue;
6265 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6266 break;
6267 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6268 c = gimple_omp_ordered_clauses (ord_stmt2);
6269 if (c == NULL_TREE
6270 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6271 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6272 break;
6273 while (*list_p)
6274 list_p = &OMP_CLAUSE_CHAIN (*list_p);
6275 *list_p = c;
6276 gsi_remove (&gsi, true);
6280 /* Canonicalize sink dependence clauses into one folded clause if
6281 possible.
6283 The basic algorithm is to create a sink vector whose first
6284 element is the GCD of all the first elements, and whose remaining
6285 elements are the minimum of the subsequent columns.
6287 We ignore dependence vectors whose first element is zero because
6288 such dependencies are known to be executed by the same thread.
6290 We take into account the direction of the loop, so a minimum
6291 becomes a maximum if the loop is iterating forwards. We also
6292 ignore sink clauses where the loop direction is unknown, or where
6293 the offsets are clearly invalid because they are not a multiple
6294 of the loop increment.
6296 For example:
6298 #pragma omp for ordered(2)
6299 for (i=0; i < N; ++i)
6300 for (j=0; j < M; ++j)
6302 #pragma omp ordered \
6303 depend(sink:i-8,j-2) \
6304 depend(sink:i,j-1) \ // Completely ignored because i+0.
6305 depend(sink:i-4,j-3) \
6306 depend(sink:i-6,j-4)
6307 #pragma omp ordered depend(source)
6310 Folded clause is:
6312 depend(sink:-gcd(8,4,6),-min(2,3,4))
6313 -or-
6314 depend(sink:-2,-2)
6317 /* FIXME: Computing GCD's where the first element is zero is
6318 non-trivial in the presence of collapsed loops. Do this later. */
6319 if (fd.collapse > 1)
6320 return;
6322 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6323 memset (folded_deps, 0, sizeof (*folded_deps) * (2 * len - 1));
6324 tree folded_dep = NULL_TREE;
6325 /* TRUE if the first dimension's offset is negative. */
6326 bool neg_offset_p = false;
6328 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6329 unsigned int i;
6330 while ((c = *list_p) != NULL)
6332 bool remove = false;
6334 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6335 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6336 goto next_ordered_clause;
6338 tree vec;
6339 for (vec = OMP_CLAUSE_DECL (c), i = 0;
6340 vec && TREE_CODE (vec) == TREE_LIST;
6341 vec = TREE_CHAIN (vec), ++i)
6343 gcc_assert (i < len);
6345 /* omp_extract_for_data has canonicalized the condition. */
6346 gcc_assert (fd.loops[i].cond_code == LT_EXPR
6347 || fd.loops[i].cond_code == GT_EXPR);
6348 bool forward = fd.loops[i].cond_code == LT_EXPR;
6349 bool maybe_lexically_later = true;
6351 /* While the committee makes up its mind, bail if we have any
6352 non-constant steps. */
6353 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6354 goto lower_omp_ordered_ret;
6356 tree itype = TREE_TYPE (TREE_VALUE (vec));
6357 if (POINTER_TYPE_P (itype))
6358 itype = sizetype;
6359 wide_int offset = wide_int::from (TREE_PURPOSE (vec),
6360 TYPE_PRECISION (itype),
6361 TYPE_SIGN (itype));
6363 /* Ignore invalid offsets that are not multiples of the step. */
6364 if (!wi::multiple_of_p
6365 (wi::abs (offset), wi::abs ((wide_int) fd.loops[i].step),
6366 UNSIGNED))
6368 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6369 "ignoring sink clause with offset that is not "
6370 "a multiple of the loop step");
6371 remove = true;
6372 goto next_ordered_clause;
6375 /* Calculate the first dimension. The first dimension of
6376 the folded dependency vector is the GCD of the first
6377 elements, while ignoring any first elements whose offset
6378 is 0. */
6379 if (i == 0)
6381 /* Ignore dependence vectors whose first dimension is 0. */
6382 if (offset == 0)
6384 remove = true;
6385 goto next_ordered_clause;
6387 else
6389 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6391 error_at (OMP_CLAUSE_LOCATION (c),
6392 "first offset must be in opposite direction "
6393 "of loop iterations");
6394 goto lower_omp_ordered_ret;
6396 if (forward)
6397 offset = -offset;
6398 neg_offset_p = forward;
6399 /* Initialize the first time around. */
6400 if (folded_dep == NULL_TREE)
6402 folded_dep = c;
6403 folded_deps[0] = offset;
6405 else
6406 folded_deps[0] = wi::gcd (folded_deps[0],
6407 offset, UNSIGNED);
6410 /* Calculate minimum for the remaining dimensions. */
6411 else
6413 folded_deps[len + i - 1] = offset;
6414 if (folded_dep == c)
6415 folded_deps[i] = offset;
6416 else if (maybe_lexically_later
6417 && !wi::eq_p (folded_deps[i], offset))
6419 if (forward ^ wi::gts_p (folded_deps[i], offset))
6421 unsigned int j;
6422 folded_dep = c;
6423 for (j = 1; j <= i; j++)
6424 folded_deps[j] = folded_deps[len + j - 1];
6426 else
6427 maybe_lexically_later = false;
6431 gcc_assert (i == len);
6433 remove = true;
6435 next_ordered_clause:
6436 if (remove)
6437 *list_p = OMP_CLAUSE_CHAIN (c);
6438 else
6439 list_p = &OMP_CLAUSE_CHAIN (c);
6442 if (folded_dep)
6444 if (neg_offset_p)
6445 folded_deps[0] = -folded_deps[0];
6447 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6448 if (POINTER_TYPE_P (itype))
6449 itype = sizetype;
6451 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6452 = wide_int_to_tree (itype, folded_deps[0]);
6453 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6454 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6457 lower_omp_ordered_ret:
6459 /* Ordered without clauses is #pragma omp threads, while we want
6460 a nop instead if we remove all clauses. */
6461 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6462 gsi_replace (gsi_p, gimple_build_nop (), true);
6466 /* Expand code for an OpenMP ordered directive. */
6468 static void
6469 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6471 tree block;
6472 gimple *stmt = gsi_stmt (*gsi_p), *g;
6473 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6474 gcall *x;
6475 gbind *bind;
6476 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6477 OMP_CLAUSE_SIMD);
6478 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6479 loop. */
6480 bool maybe_simt
6481 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6482 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6483 OMP_CLAUSE_THREADS);
6485 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6486 OMP_CLAUSE_DEPEND))
6488 /* FIXME: This is needs to be moved to the expansion to verify various
6489 conditions only testable on cfg with dominators computed, and also
6490 all the depend clauses to be merged still might need to be available
6491 for the runtime checks. */
6492 if (0)
6493 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6494 return;
6497 push_gimplify_context ();
6499 block = make_node (BLOCK);
6500 bind = gimple_build_bind (NULL, NULL, block);
6501 gsi_replace (gsi_p, bind, true);
6502 gimple_bind_add_stmt (bind, stmt);
6504 if (simd)
6506 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6507 build_int_cst (NULL_TREE, threads));
6508 cfun->has_simduid_loops = true;
6510 else
6511 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6513 gimple_bind_add_stmt (bind, x);
6515 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6516 if (maybe_simt)
6518 counter = create_tmp_var (integer_type_node);
6519 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6520 gimple_call_set_lhs (g, counter);
6521 gimple_bind_add_stmt (bind, g);
6523 body = create_artificial_label (UNKNOWN_LOCATION);
6524 test = create_artificial_label (UNKNOWN_LOCATION);
6525 gimple_bind_add_stmt (bind, gimple_build_label (body));
6527 tree simt_pred = create_tmp_var (integer_type_node);
6528 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6529 gimple_call_set_lhs (g, simt_pred);
6530 gimple_bind_add_stmt (bind, g);
6532 tree t = create_artificial_label (UNKNOWN_LOCATION);
6533 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6534 gimple_bind_add_stmt (bind, g);
6536 gimple_bind_add_stmt (bind, gimple_build_label (t));
6538 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6539 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6540 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6541 gimple_omp_set_body (stmt, NULL);
6543 if (maybe_simt)
6545 gimple_bind_add_stmt (bind, gimple_build_label (test));
6546 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6547 gimple_bind_add_stmt (bind, g);
6549 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6550 tree nonneg = create_tmp_var (integer_type_node);
6551 gimple_seq tseq = NULL;
6552 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6553 gimple_bind_add_seq (bind, tseq);
6555 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6556 gimple_call_set_lhs (g, nonneg);
6557 gimple_bind_add_stmt (bind, g);
6559 tree end = create_artificial_label (UNKNOWN_LOCATION);
6560 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6561 gimple_bind_add_stmt (bind, g);
6563 gimple_bind_add_stmt (bind, gimple_build_label (end));
6565 if (simd)
6566 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6567 build_int_cst (NULL_TREE, threads));
6568 else
6569 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6571 gimple_bind_add_stmt (bind, x);
6573 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6575 pop_gimplify_context (bind);
6577 gimple_bind_append_vars (bind, ctx->block_vars);
6578 BLOCK_VARS (block) = gimple_bind_vars (bind);
6582 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6583 substitution of a couple of function calls. But in the NAMED case,
6584 requires that languages coordinate a symbol name. It is therefore
6585 best put here in common code. */
6587 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6589 static void
6590 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6592 tree block;
6593 tree name, lock, unlock;
6594 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6595 gbind *bind;
6596 location_t loc = gimple_location (stmt);
6597 gimple_seq tbody;
6599 name = gimple_omp_critical_name (stmt);
6600 if (name)
6602 tree decl;
6604 if (!critical_name_mutexes)
6605 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6607 tree *n = critical_name_mutexes->get (name);
6608 if (n == NULL)
6610 char *new_str;
6612 decl = create_tmp_var_raw (ptr_type_node);
6614 new_str = ACONCAT ((".gomp_critical_user_",
6615 IDENTIFIER_POINTER (name), NULL));
6616 DECL_NAME (decl) = get_identifier (new_str);
6617 TREE_PUBLIC (decl) = 1;
6618 TREE_STATIC (decl) = 1;
6619 DECL_COMMON (decl) = 1;
6620 DECL_ARTIFICIAL (decl) = 1;
6621 DECL_IGNORED_P (decl) = 1;
6623 varpool_node::finalize_decl (decl);
6625 critical_name_mutexes->put (name, decl);
6627 else
6628 decl = *n;
6630 /* If '#pragma omp critical' is inside offloaded region or
6631 inside function marked as offloadable, the symbol must be
6632 marked as offloadable too. */
6633 omp_context *octx;
6634 if (cgraph_node::get (current_function_decl)->offloadable)
6635 varpool_node::get_create (decl)->offloadable = 1;
6636 else
6637 for (octx = ctx->outer; octx; octx = octx->outer)
6638 if (is_gimple_omp_offloaded (octx->stmt))
6640 varpool_node::get_create (decl)->offloadable = 1;
6641 break;
6644 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6645 lock = build_call_expr_loc (loc, lock, 1,
6646 build_fold_addr_expr_loc (loc, decl));
6648 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6649 unlock = build_call_expr_loc (loc, unlock, 1,
6650 build_fold_addr_expr_loc (loc, decl));
6652 else
6654 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6655 lock = build_call_expr_loc (loc, lock, 0);
6657 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6658 unlock = build_call_expr_loc (loc, unlock, 0);
6661 push_gimplify_context ();
6663 block = make_node (BLOCK);
6664 bind = gimple_build_bind (NULL, NULL, block);
6665 gsi_replace (gsi_p, bind, true);
6666 gimple_bind_add_stmt (bind, stmt);
6668 tbody = gimple_bind_body (bind);
6669 gimplify_and_add (lock, &tbody);
6670 gimple_bind_set_body (bind, tbody);
6672 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6673 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6674 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6675 gimple_omp_set_body (stmt, NULL);
6677 tbody = gimple_bind_body (bind);
6678 gimplify_and_add (unlock, &tbody);
6679 gimple_bind_set_body (bind, tbody);
6681 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6683 pop_gimplify_context (bind);
6684 gimple_bind_append_vars (bind, ctx->block_vars);
6685 BLOCK_VARS (block) = gimple_bind_vars (bind);
6688 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6689 for a lastprivate clause. Given a loop control predicate of (V
6690 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6691 is appended to *DLIST, iterator initialization is appended to
6692 *BODY_P. */
6694 static void
6695 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6696 gimple_seq *dlist, struct omp_context *ctx)
6698 tree clauses, cond, vinit;
6699 enum tree_code cond_code;
6700 gimple_seq stmts;
6702 cond_code = fd->loop.cond_code;
6703 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6705 /* When possible, use a strict equality expression. This can let VRP
6706 type optimizations deduce the value and remove a copy. */
6707 if (tree_fits_shwi_p (fd->loop.step))
6709 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6710 if (step == 1 || step == -1)
6711 cond_code = EQ_EXPR;
6714 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6715 || gimple_omp_for_grid_phony (fd->for_stmt))
6716 cond = omp_grid_lastprivate_predicate (fd);
6717 else
6719 tree n2 = fd->loop.n2;
6720 if (fd->collapse > 1
6721 && TREE_CODE (n2) != INTEGER_CST
6722 && gimple_omp_for_combined_into_p (fd->for_stmt))
6724 struct omp_context *taskreg_ctx = NULL;
6725 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6727 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6728 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6729 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6731 if (gimple_omp_for_combined_into_p (gfor))
6733 gcc_assert (ctx->outer->outer
6734 && is_parallel_ctx (ctx->outer->outer));
6735 taskreg_ctx = ctx->outer->outer;
6737 else
6739 struct omp_for_data outer_fd;
6740 omp_extract_for_data (gfor, &outer_fd, NULL);
6741 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6744 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6745 taskreg_ctx = ctx->outer->outer;
6747 else if (is_taskreg_ctx (ctx->outer))
6748 taskreg_ctx = ctx->outer;
6749 if (taskreg_ctx)
6751 int i;
6752 tree taskreg_clauses
6753 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6754 tree innerc = omp_find_clause (taskreg_clauses,
6755 OMP_CLAUSE__LOOPTEMP_);
6756 gcc_assert (innerc);
6757 for (i = 0; i < fd->collapse; i++)
6759 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6760 OMP_CLAUSE__LOOPTEMP_);
6761 gcc_assert (innerc);
6763 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6764 OMP_CLAUSE__LOOPTEMP_);
6765 if (innerc)
6766 n2 = fold_convert (TREE_TYPE (n2),
6767 lookup_decl (OMP_CLAUSE_DECL (innerc),
6768 taskreg_ctx));
6771 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6774 clauses = gimple_omp_for_clauses (fd->for_stmt);
6775 stmts = NULL;
6776 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6777 if (!gimple_seq_empty_p (stmts))
6779 gimple_seq_add_seq (&stmts, *dlist);
6780 *dlist = stmts;
6782 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6783 vinit = fd->loop.n1;
6784 if (cond_code == EQ_EXPR
6785 && tree_fits_shwi_p (fd->loop.n2)
6786 && ! integer_zerop (fd->loop.n2))
6787 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6788 else
6789 vinit = unshare_expr (vinit);
6791 /* Initialize the iterator variable, so that threads that don't execute
6792 any iterations don't execute the lastprivate clauses by accident. */
6793 gimplify_assign (fd->loop.v, vinit, body_p);
6798 /* Lower code for an OMP loop directive. */
6800 static void
6801 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6803 tree *rhs_p, block;
6804 struct omp_for_data fd, *fdp = NULL;
6805 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6806 gbind *new_stmt;
6807 gimple_seq omp_for_body, body, dlist;
6808 gimple_seq oacc_head = NULL, oacc_tail = NULL;
6809 size_t i;
6811 push_gimplify_context ();
6813 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6815 block = make_node (BLOCK);
6816 new_stmt = gimple_build_bind (NULL, NULL, block);
6817 /* Replace at gsi right away, so that 'stmt' is no member
6818 of a sequence anymore as we're going to add to a different
6819 one below. */
6820 gsi_replace (gsi_p, new_stmt, true);
6822 /* Move declaration of temporaries in the loop body before we make
6823 it go away. */
6824 omp_for_body = gimple_omp_body (stmt);
6825 if (!gimple_seq_empty_p (omp_for_body)
6826 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6828 gbind *inner_bind
6829 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6830 tree vars = gimple_bind_vars (inner_bind);
6831 gimple_bind_append_vars (new_stmt, vars);
6832 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6833 keep them on the inner_bind and it's block. */
6834 gimple_bind_set_vars (inner_bind, NULL_TREE);
6835 if (gimple_bind_block (inner_bind))
6836 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6839 if (gimple_omp_for_combined_into_p (stmt))
6841 omp_extract_for_data (stmt, &fd, NULL);
6842 fdp = &fd;
6844 /* We need two temporaries with fd.loop.v type (istart/iend)
6845 and then (fd.collapse - 1) temporaries with the same
6846 type for count2 ... countN-1 vars if not constant. */
6847 size_t count = 2;
6848 tree type = fd.iter_type;
6849 if (fd.collapse > 1
6850 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6851 count += fd.collapse - 1;
6852 bool taskreg_for
6853 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6854 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6855 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6856 tree simtc = NULL;
6857 tree clauses = *pc;
6858 if (taskreg_for)
6859 outerc
6860 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6861 OMP_CLAUSE__LOOPTEMP_);
6862 if (ctx->simt_stmt)
6863 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6864 OMP_CLAUSE__LOOPTEMP_);
6865 for (i = 0; i < count; i++)
6867 tree temp;
6868 if (taskreg_for)
6870 gcc_assert (outerc);
6871 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6872 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6873 OMP_CLAUSE__LOOPTEMP_);
6875 else
6877 /* If there are 2 adjacent SIMD stmts, one with _simt_
6878 clause, another without, make sure they have the same
6879 decls in _looptemp_ clauses, because the outer stmt
6880 they are combined into will look up just one inner_stmt. */
6881 if (ctx->simt_stmt)
6882 temp = OMP_CLAUSE_DECL (simtc);
6883 else
6884 temp = create_tmp_var (type);
6885 insert_decl_map (&ctx->outer->cb, temp, temp);
6887 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6888 OMP_CLAUSE_DECL (*pc) = temp;
6889 pc = &OMP_CLAUSE_CHAIN (*pc);
6890 if (ctx->simt_stmt)
6891 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6892 OMP_CLAUSE__LOOPTEMP_);
6894 *pc = clauses;
6897 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6898 dlist = NULL;
6899 body = NULL;
6900 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6901 fdp);
6902 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6904 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6906 /* Lower the header expressions. At this point, we can assume that
6907 the header is of the form:
6909 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6911 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6912 using the .omp_data_s mapping, if needed. */
6913 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6915 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6916 if (!is_gimple_min_invariant (*rhs_p))
6917 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6919 rhs_p = gimple_omp_for_final_ptr (stmt, i);
6920 if (!is_gimple_min_invariant (*rhs_p))
6921 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6923 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6924 if (!is_gimple_min_invariant (*rhs_p))
6925 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6928 /* Once lowered, extract the bounds and clauses. */
6929 omp_extract_for_data (stmt, &fd, NULL);
6931 if (is_gimple_omp_oacc (ctx->stmt)
6932 && !ctx_in_oacc_kernels_region (ctx))
6933 lower_oacc_head_tail (gimple_location (stmt),
6934 gimple_omp_for_clauses (stmt),
6935 &oacc_head, &oacc_tail, ctx);
6937 /* Add OpenACC partitioning and reduction markers just before the loop. */
6938 if (oacc_head)
6939 gimple_seq_add_seq (&body, oacc_head);
6941 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
6943 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6944 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
6945 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6946 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6948 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6949 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6950 OMP_CLAUSE_LINEAR_STEP (c)
6951 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6952 ctx);
6955 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6956 && gimple_omp_for_grid_phony (stmt));
6957 if (!phony_loop)
6958 gimple_seq_add_stmt (&body, stmt);
6959 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6961 if (!phony_loop)
6962 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6963 fd.loop.v));
6965 /* After the loop, add exit clauses. */
6966 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
6968 if (ctx->cancellable)
6969 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
6971 gimple_seq_add_seq (&body, dlist);
6973 body = maybe_catch_exception (body);
6975 if (!phony_loop)
6977 /* Region exit marker goes at the end of the loop body. */
6978 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
6979 maybe_add_implicit_barrier_cancel (ctx, &body);
6982 /* Add OpenACC joining and reduction markers just after the loop. */
6983 if (oacc_tail)
6984 gimple_seq_add_seq (&body, oacc_tail);
6986 pop_gimplify_context (new_stmt);
6988 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6989 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
6990 if (BLOCK_VARS (block))
6991 TREE_USED (block) = 1;
6993 gimple_bind_set_body (new_stmt, body);
6994 gimple_omp_set_body (stmt, NULL);
6995 gimple_omp_for_set_pre_body (stmt, NULL);
6998 /* Callback for walk_stmts. Check if the current statement only contains
6999 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
7001 static tree
7002 check_combined_parallel (gimple_stmt_iterator *gsi_p,
7003 bool *handled_ops_p,
7004 struct walk_stmt_info *wi)
7006 int *info = (int *) wi->info;
7007 gimple *stmt = gsi_stmt (*gsi_p);
7009 *handled_ops_p = true;
7010 switch (gimple_code (stmt))
7012 WALK_SUBSTMTS;
7014 case GIMPLE_OMP_FOR:
7015 case GIMPLE_OMP_SECTIONS:
7016 *info = *info == 0 ? 1 : -1;
7017 break;
7018 default:
7019 *info = -1;
7020 break;
7022 return NULL;
7025 struct omp_taskcopy_context
7027 /* This field must be at the beginning, as we do "inheritance": Some
7028 callback functions for tree-inline.c (e.g., omp_copy_decl)
7029 receive a copy_body_data pointer that is up-casted to an
7030 omp_context pointer. */
7031 copy_body_data cb;
7032 omp_context *ctx;
7035 static tree
7036 task_copyfn_copy_decl (tree var, copy_body_data *cb)
7038 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
7040 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
7041 return create_tmp_var (TREE_TYPE (var));
7043 return var;
7046 static tree
7047 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
7049 tree name, new_fields = NULL, type, f;
7051 type = lang_hooks.types.make_type (RECORD_TYPE);
7052 name = DECL_NAME (TYPE_NAME (orig_type));
7053 name = build_decl (gimple_location (tcctx->ctx->stmt),
7054 TYPE_DECL, name, type);
7055 TYPE_NAME (type) = name;
7057 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
7059 tree new_f = copy_node (f);
7060 DECL_CONTEXT (new_f) = type;
7061 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7062 TREE_CHAIN (new_f) = new_fields;
7063 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7064 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7065 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7066 &tcctx->cb, NULL);
7067 new_fields = new_f;
7068 tcctx->cb.decl_map->put (f, new_f);
7070 TYPE_FIELDS (type) = nreverse (new_fields);
7071 layout_type (type);
7072 return type;
7075 /* Create task copyfn. */
7077 static void
7078 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7080 struct function *child_cfun;
7081 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7082 tree record_type, srecord_type, bind, list;
7083 bool record_needs_remap = false, srecord_needs_remap = false;
7084 splay_tree_node n;
7085 struct omp_taskcopy_context tcctx;
7086 location_t loc = gimple_location (task_stmt);
7088 child_fn = gimple_omp_task_copy_fn (task_stmt);
7089 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7090 gcc_assert (child_cfun->cfg == NULL);
7091 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
7093 /* Reset DECL_CONTEXT on function arguments. */
7094 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7095 DECL_CONTEXT (t) = child_fn;
7097 /* Populate the function. */
7098 push_gimplify_context ();
7099 push_cfun (child_cfun);
7101 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7102 TREE_SIDE_EFFECTS (bind) = 1;
7103 list = NULL;
7104 DECL_SAVED_TREE (child_fn) = bind;
7105 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
7107 /* Remap src and dst argument types if needed. */
7108 record_type = ctx->record_type;
7109 srecord_type = ctx->srecord_type;
7110 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7111 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7113 record_needs_remap = true;
7114 break;
7116 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7117 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7119 srecord_needs_remap = true;
7120 break;
7123 if (record_needs_remap || srecord_needs_remap)
7125 memset (&tcctx, '\0', sizeof (tcctx));
7126 tcctx.cb.src_fn = ctx->cb.src_fn;
7127 tcctx.cb.dst_fn = child_fn;
7128 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7129 gcc_checking_assert (tcctx.cb.src_node);
7130 tcctx.cb.dst_node = tcctx.cb.src_node;
7131 tcctx.cb.src_cfun = ctx->cb.src_cfun;
7132 tcctx.cb.copy_decl = task_copyfn_copy_decl;
7133 tcctx.cb.eh_lp_nr = 0;
7134 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7135 tcctx.cb.decl_map = new hash_map<tree, tree>;
7136 tcctx.ctx = ctx;
7138 if (record_needs_remap)
7139 record_type = task_copyfn_remap_type (&tcctx, record_type);
7140 if (srecord_needs_remap)
7141 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7143 else
7144 tcctx.cb.decl_map = NULL;
7146 arg = DECL_ARGUMENTS (child_fn);
7147 TREE_TYPE (arg) = build_pointer_type (record_type);
7148 sarg = DECL_CHAIN (arg);
7149 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7151 /* First pass: initialize temporaries used in record_type and srecord_type
7152 sizes and field offsets. */
7153 if (tcctx.cb.decl_map)
7154 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7155 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7157 tree *p;
7159 decl = OMP_CLAUSE_DECL (c);
7160 p = tcctx.cb.decl_map->get (decl);
7161 if (p == NULL)
7162 continue;
7163 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7164 sf = (tree) n->value;
7165 sf = *tcctx.cb.decl_map->get (sf);
7166 src = build_simple_mem_ref_loc (loc, sarg);
7167 src = omp_build_component_ref (src, sf);
7168 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7169 append_to_statement_list (t, &list);
7172 /* Second pass: copy shared var pointers and copy construct non-VLA
7173 firstprivate vars. */
7174 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7175 switch (OMP_CLAUSE_CODE (c))
7177 splay_tree_key key;
7178 case OMP_CLAUSE_SHARED:
7179 decl = OMP_CLAUSE_DECL (c);
7180 key = (splay_tree_key) decl;
7181 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7182 key = (splay_tree_key) &DECL_UID (decl);
7183 n = splay_tree_lookup (ctx->field_map, key);
7184 if (n == NULL)
7185 break;
7186 f = (tree) n->value;
7187 if (tcctx.cb.decl_map)
7188 f = *tcctx.cb.decl_map->get (f);
7189 n = splay_tree_lookup (ctx->sfield_map, key);
7190 sf = (tree) n->value;
7191 if (tcctx.cb.decl_map)
7192 sf = *tcctx.cb.decl_map->get (sf);
7193 src = build_simple_mem_ref_loc (loc, sarg);
7194 src = omp_build_component_ref (src, sf);
7195 dst = build_simple_mem_ref_loc (loc, arg);
7196 dst = omp_build_component_ref (dst, f);
7197 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7198 append_to_statement_list (t, &list);
7199 break;
7200 case OMP_CLAUSE_FIRSTPRIVATE:
7201 decl = OMP_CLAUSE_DECL (c);
7202 if (is_variable_sized (decl))
7203 break;
7204 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7205 if (n == NULL)
7206 break;
7207 f = (tree) n->value;
7208 if (tcctx.cb.decl_map)
7209 f = *tcctx.cb.decl_map->get (f);
7210 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7211 if (n != NULL)
7213 sf = (tree) n->value;
7214 if (tcctx.cb.decl_map)
7215 sf = *tcctx.cb.decl_map->get (sf);
7216 src = build_simple_mem_ref_loc (loc, sarg);
7217 src = omp_build_component_ref (src, sf);
7218 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7219 src = build_simple_mem_ref_loc (loc, src);
7221 else
7222 src = decl;
7223 dst = build_simple_mem_ref_loc (loc, arg);
7224 dst = omp_build_component_ref (dst, f);
7225 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7226 append_to_statement_list (t, &list);
7227 break;
7228 case OMP_CLAUSE_PRIVATE:
7229 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7230 break;
7231 decl = OMP_CLAUSE_DECL (c);
7232 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7233 f = (tree) n->value;
7234 if (tcctx.cb.decl_map)
7235 f = *tcctx.cb.decl_map->get (f);
7236 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7237 if (n != NULL)
7239 sf = (tree) n->value;
7240 if (tcctx.cb.decl_map)
7241 sf = *tcctx.cb.decl_map->get (sf);
7242 src = build_simple_mem_ref_loc (loc, sarg);
7243 src = omp_build_component_ref (src, sf);
7244 if (use_pointer_for_field (decl, NULL))
7245 src = build_simple_mem_ref_loc (loc, src);
7247 else
7248 src = decl;
7249 dst = build_simple_mem_ref_loc (loc, arg);
7250 dst = omp_build_component_ref (dst, f);
7251 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7252 append_to_statement_list (t, &list);
7253 break;
7254 default:
7255 break;
7258 /* Last pass: handle VLA firstprivates. */
7259 if (tcctx.cb.decl_map)
7260 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7261 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7263 tree ind, ptr, df;
7265 decl = OMP_CLAUSE_DECL (c);
7266 if (!is_variable_sized (decl))
7267 continue;
7268 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7269 if (n == NULL)
7270 continue;
7271 f = (tree) n->value;
7272 f = *tcctx.cb.decl_map->get (f);
7273 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7274 ind = DECL_VALUE_EXPR (decl);
7275 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7276 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7277 n = splay_tree_lookup (ctx->sfield_map,
7278 (splay_tree_key) TREE_OPERAND (ind, 0));
7279 sf = (tree) n->value;
7280 sf = *tcctx.cb.decl_map->get (sf);
7281 src = build_simple_mem_ref_loc (loc, sarg);
7282 src = omp_build_component_ref (src, sf);
7283 src = build_simple_mem_ref_loc (loc, src);
7284 dst = build_simple_mem_ref_loc (loc, arg);
7285 dst = omp_build_component_ref (dst, f);
7286 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7287 append_to_statement_list (t, &list);
7288 n = splay_tree_lookup (ctx->field_map,
7289 (splay_tree_key) TREE_OPERAND (ind, 0));
7290 df = (tree) n->value;
7291 df = *tcctx.cb.decl_map->get (df);
7292 ptr = build_simple_mem_ref_loc (loc, arg);
7293 ptr = omp_build_component_ref (ptr, df);
7294 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7295 build_fold_addr_expr_loc (loc, dst));
7296 append_to_statement_list (t, &list);
7299 t = build1 (RETURN_EXPR, void_type_node, NULL);
7300 append_to_statement_list (t, &list);
7302 if (tcctx.cb.decl_map)
7303 delete tcctx.cb.decl_map;
7304 pop_gimplify_context (NULL);
7305 BIND_EXPR_BODY (bind) = list;
7306 pop_cfun ();
7309 static void
7310 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7312 tree c, clauses;
7313 gimple *g;
7314 size_t n_in = 0, n_out = 0, idx = 2, i;
7316 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7317 gcc_assert (clauses);
7318 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7319 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7320 switch (OMP_CLAUSE_DEPEND_KIND (c))
7322 case OMP_CLAUSE_DEPEND_IN:
7323 n_in++;
7324 break;
7325 case OMP_CLAUSE_DEPEND_OUT:
7326 case OMP_CLAUSE_DEPEND_INOUT:
7327 n_out++;
7328 break;
7329 case OMP_CLAUSE_DEPEND_SOURCE:
7330 case OMP_CLAUSE_DEPEND_SINK:
7331 /* FALLTHRU */
7332 default:
7333 gcc_unreachable ();
7335 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7336 tree array = create_tmp_var (type);
7337 TREE_ADDRESSABLE (array) = 1;
7338 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7339 NULL_TREE);
7340 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7341 gimple_seq_add_stmt (iseq, g);
7342 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7343 NULL_TREE);
7344 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7345 gimple_seq_add_stmt (iseq, g);
7346 for (i = 0; i < 2; i++)
7348 if ((i ? n_in : n_out) == 0)
7349 continue;
7350 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7351 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7352 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7354 tree t = OMP_CLAUSE_DECL (c);
7355 t = fold_convert (ptr_type_node, t);
7356 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7357 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7358 NULL_TREE, NULL_TREE);
7359 g = gimple_build_assign (r, t);
7360 gimple_seq_add_stmt (iseq, g);
7363 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7364 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7365 OMP_CLAUSE_CHAIN (c) = *pclauses;
7366 *pclauses = c;
7367 tree clobber = build_constructor (type, NULL);
7368 TREE_THIS_VOLATILE (clobber) = 1;
7369 g = gimple_build_assign (array, clobber);
7370 gimple_seq_add_stmt (oseq, g);
7373 /* Lower the OpenMP parallel or task directive in the current statement
7374 in GSI_P. CTX holds context information for the directive. */
7376 static void
7377 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7379 tree clauses;
7380 tree child_fn, t;
7381 gimple *stmt = gsi_stmt (*gsi_p);
7382 gbind *par_bind, *bind, *dep_bind = NULL;
7383 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7384 location_t loc = gimple_location (stmt);
7386 clauses = gimple_omp_taskreg_clauses (stmt);
7387 par_bind
7388 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7389 par_body = gimple_bind_body (par_bind);
7390 child_fn = ctx->cb.dst_fn;
7391 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7392 && !gimple_omp_parallel_combined_p (stmt))
7394 struct walk_stmt_info wi;
7395 int ws_num = 0;
7397 memset (&wi, 0, sizeof (wi));
7398 wi.info = &ws_num;
7399 wi.val_only = true;
7400 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7401 if (ws_num == 1)
7402 gimple_omp_parallel_set_combined_p (stmt, true);
7404 gimple_seq dep_ilist = NULL;
7405 gimple_seq dep_olist = NULL;
7406 if (gimple_code (stmt) == GIMPLE_OMP_TASK
7407 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7409 push_gimplify_context ();
7410 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7411 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7412 &dep_ilist, &dep_olist);
7415 if (ctx->srecord_type)
7416 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7418 push_gimplify_context ();
7420 par_olist = NULL;
7421 par_ilist = NULL;
7422 par_rlist = NULL;
7423 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7424 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7425 if (phony_construct && ctx->record_type)
7427 gcc_checking_assert (!ctx->receiver_decl);
7428 ctx->receiver_decl = create_tmp_var
7429 (build_reference_type (ctx->record_type), ".omp_rec");
7431 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7432 lower_omp (&par_body, ctx);
7433 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7434 lower_reduction_clauses (clauses, &par_rlist, ctx);
7436 /* Declare all the variables created by mapping and the variables
7437 declared in the scope of the parallel body. */
7438 record_vars_into (ctx->block_vars, child_fn);
7439 record_vars_into (gimple_bind_vars (par_bind), child_fn);
7441 if (ctx->record_type)
7443 ctx->sender_decl
7444 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7445 : ctx->record_type, ".omp_data_o");
7446 DECL_NAMELESS (ctx->sender_decl) = 1;
7447 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7448 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7451 olist = NULL;
7452 ilist = NULL;
7453 lower_send_clauses (clauses, &ilist, &olist, ctx);
7454 lower_send_shared_vars (&ilist, &olist, ctx);
7456 if (ctx->record_type)
7458 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7459 TREE_THIS_VOLATILE (clobber) = 1;
7460 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7461 clobber));
7464 /* Once all the expansions are done, sequence all the different
7465 fragments inside gimple_omp_body. */
7467 new_body = NULL;
7469 if (ctx->record_type)
7471 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7472 /* fixup_child_record_type might have changed receiver_decl's type. */
7473 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7474 gimple_seq_add_stmt (&new_body,
7475 gimple_build_assign (ctx->receiver_decl, t));
7478 gimple_seq_add_seq (&new_body, par_ilist);
7479 gimple_seq_add_seq (&new_body, par_body);
7480 gimple_seq_add_seq (&new_body, par_rlist);
7481 if (ctx->cancellable)
7482 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7483 gimple_seq_add_seq (&new_body, par_olist);
7484 new_body = maybe_catch_exception (new_body);
7485 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7486 gimple_seq_add_stmt (&new_body,
7487 gimple_build_omp_continue (integer_zero_node,
7488 integer_zero_node));
7489 if (!phony_construct)
7491 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7492 gimple_omp_set_body (stmt, new_body);
7495 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7496 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7497 gimple_bind_add_seq (bind, ilist);
7498 if (!phony_construct)
7499 gimple_bind_add_stmt (bind, stmt);
7500 else
7501 gimple_bind_add_seq (bind, new_body);
7502 gimple_bind_add_seq (bind, olist);
7504 pop_gimplify_context (NULL);
7506 if (dep_bind)
7508 gimple_bind_add_seq (dep_bind, dep_ilist);
7509 gimple_bind_add_stmt (dep_bind, bind);
7510 gimple_bind_add_seq (dep_bind, dep_olist);
7511 pop_gimplify_context (dep_bind);
7515 /* Lower the GIMPLE_OMP_TARGET in the current statement
7516 in GSI_P. CTX holds context information for the directive. */
7518 static void
7519 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7521 tree clauses;
7522 tree child_fn, t, c;
7523 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7524 gbind *tgt_bind, *bind, *dep_bind = NULL;
7525 gimple_seq tgt_body, olist, ilist, fplist, new_body;
7526 location_t loc = gimple_location (stmt);
7527 bool offloaded, data_region;
7528 unsigned int map_cnt = 0;
7530 offloaded = is_gimple_omp_offloaded (stmt);
7531 switch (gimple_omp_target_kind (stmt))
7533 case GF_OMP_TARGET_KIND_REGION:
7534 case GF_OMP_TARGET_KIND_UPDATE:
7535 case GF_OMP_TARGET_KIND_ENTER_DATA:
7536 case GF_OMP_TARGET_KIND_EXIT_DATA:
7537 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7538 case GF_OMP_TARGET_KIND_OACC_KERNELS:
7539 case GF_OMP_TARGET_KIND_OACC_UPDATE:
7540 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7541 case GF_OMP_TARGET_KIND_OACC_DECLARE:
7542 data_region = false;
7543 break;
7544 case GF_OMP_TARGET_KIND_DATA:
7545 case GF_OMP_TARGET_KIND_OACC_DATA:
7546 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7547 data_region = true;
7548 break;
7549 default:
7550 gcc_unreachable ();
7553 clauses = gimple_omp_target_clauses (stmt);
7555 gimple_seq dep_ilist = NULL;
7556 gimple_seq dep_olist = NULL;
7557 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7559 push_gimplify_context ();
7560 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7561 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7562 &dep_ilist, &dep_olist);
7565 tgt_bind = NULL;
7566 tgt_body = NULL;
7567 if (offloaded)
7569 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7570 tgt_body = gimple_bind_body (tgt_bind);
7572 else if (data_region)
7573 tgt_body = gimple_omp_body (stmt);
7574 child_fn = ctx->cb.dst_fn;
7576 push_gimplify_context ();
7577 fplist = NULL;
7579 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7580 switch (OMP_CLAUSE_CODE (c))
7582 tree var, x;
7584 default:
7585 break;
7586 case OMP_CLAUSE_MAP:
7587 #if CHECKING_P
7588 /* First check what we're prepared to handle in the following. */
7589 switch (OMP_CLAUSE_MAP_KIND (c))
7591 case GOMP_MAP_ALLOC:
7592 case GOMP_MAP_TO:
7593 case GOMP_MAP_FROM:
7594 case GOMP_MAP_TOFROM:
7595 case GOMP_MAP_POINTER:
7596 case GOMP_MAP_TO_PSET:
7597 case GOMP_MAP_DELETE:
7598 case GOMP_MAP_RELEASE:
7599 case GOMP_MAP_ALWAYS_TO:
7600 case GOMP_MAP_ALWAYS_FROM:
7601 case GOMP_MAP_ALWAYS_TOFROM:
7602 case GOMP_MAP_FIRSTPRIVATE_POINTER:
7603 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7604 case GOMP_MAP_STRUCT:
7605 case GOMP_MAP_ALWAYS_POINTER:
7606 break;
7607 case GOMP_MAP_FORCE_ALLOC:
7608 case GOMP_MAP_FORCE_TO:
7609 case GOMP_MAP_FORCE_FROM:
7610 case GOMP_MAP_FORCE_TOFROM:
7611 case GOMP_MAP_FORCE_PRESENT:
7612 case GOMP_MAP_FORCE_DEVICEPTR:
7613 case GOMP_MAP_DEVICE_RESIDENT:
7614 case GOMP_MAP_LINK:
7615 gcc_assert (is_gimple_omp_oacc (stmt));
7616 break;
7617 default:
7618 gcc_unreachable ();
7620 #endif
7621 /* FALLTHRU */
7622 case OMP_CLAUSE_TO:
7623 case OMP_CLAUSE_FROM:
7624 oacc_firstprivate:
7625 var = OMP_CLAUSE_DECL (c);
7626 if (!DECL_P (var))
7628 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7629 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7630 && (OMP_CLAUSE_MAP_KIND (c)
7631 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
7632 map_cnt++;
7633 continue;
7636 if (DECL_SIZE (var)
7637 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7639 tree var2 = DECL_VALUE_EXPR (var);
7640 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7641 var2 = TREE_OPERAND (var2, 0);
7642 gcc_assert (DECL_P (var2));
7643 var = var2;
7646 if (offloaded
7647 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7648 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7649 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7651 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7653 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7654 && varpool_node::get_create (var)->offloadable)
7655 continue;
7657 tree type = build_pointer_type (TREE_TYPE (var));
7658 tree new_var = lookup_decl (var, ctx);
7659 x = create_tmp_var_raw (type, get_name (new_var));
7660 gimple_add_tmp_var (x);
7661 x = build_simple_mem_ref (x);
7662 SET_DECL_VALUE_EXPR (new_var, x);
7663 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7665 continue;
7668 if (!maybe_lookup_field (var, ctx))
7669 continue;
7671 /* Don't remap oacc parallel reduction variables, because the
7672 intermediate result must be local to each gang. */
7673 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7674 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7676 x = build_receiver_ref (var, true, ctx);
7677 tree new_var = lookup_decl (var, ctx);
7679 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7680 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7681 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7682 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7683 x = build_simple_mem_ref (x);
7684 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7686 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7687 if (omp_is_reference (new_var))
7689 /* Create a local object to hold the instance
7690 value. */
7691 tree type = TREE_TYPE (TREE_TYPE (new_var));
7692 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7693 tree inst = create_tmp_var (type, id);
7694 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7695 x = build_fold_addr_expr (inst);
7697 gimplify_assign (new_var, x, &fplist);
7699 else if (DECL_P (new_var))
7701 SET_DECL_VALUE_EXPR (new_var, x);
7702 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7704 else
7705 gcc_unreachable ();
7707 map_cnt++;
7708 break;
7710 case OMP_CLAUSE_FIRSTPRIVATE:
7711 if (is_oacc_parallel (ctx))
7712 goto oacc_firstprivate;
7713 map_cnt++;
7714 var = OMP_CLAUSE_DECL (c);
7715 if (!omp_is_reference (var)
7716 && !is_gimple_reg_type (TREE_TYPE (var)))
7718 tree new_var = lookup_decl (var, ctx);
7719 if (is_variable_sized (var))
7721 tree pvar = DECL_VALUE_EXPR (var);
7722 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7723 pvar = TREE_OPERAND (pvar, 0);
7724 gcc_assert (DECL_P (pvar));
7725 tree new_pvar = lookup_decl (pvar, ctx);
7726 x = build_fold_indirect_ref (new_pvar);
7727 TREE_THIS_NOTRAP (x) = 1;
7729 else
7730 x = build_receiver_ref (var, true, ctx);
7731 SET_DECL_VALUE_EXPR (new_var, x);
7732 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7734 break;
7736 case OMP_CLAUSE_PRIVATE:
7737 if (is_gimple_omp_oacc (ctx->stmt))
7738 break;
7739 var = OMP_CLAUSE_DECL (c);
7740 if (is_variable_sized (var))
7742 tree new_var = lookup_decl (var, ctx);
7743 tree pvar = DECL_VALUE_EXPR (var);
7744 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7745 pvar = TREE_OPERAND (pvar, 0);
7746 gcc_assert (DECL_P (pvar));
7747 tree new_pvar = lookup_decl (pvar, ctx);
7748 x = build_fold_indirect_ref (new_pvar);
7749 TREE_THIS_NOTRAP (x) = 1;
7750 SET_DECL_VALUE_EXPR (new_var, x);
7751 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7753 break;
7755 case OMP_CLAUSE_USE_DEVICE_PTR:
7756 case OMP_CLAUSE_IS_DEVICE_PTR:
7757 var = OMP_CLAUSE_DECL (c);
7758 map_cnt++;
7759 if (is_variable_sized (var))
7761 tree new_var = lookup_decl (var, ctx);
7762 tree pvar = DECL_VALUE_EXPR (var);
7763 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7764 pvar = TREE_OPERAND (pvar, 0);
7765 gcc_assert (DECL_P (pvar));
7766 tree new_pvar = lookup_decl (pvar, ctx);
7767 x = build_fold_indirect_ref (new_pvar);
7768 TREE_THIS_NOTRAP (x) = 1;
7769 SET_DECL_VALUE_EXPR (new_var, x);
7770 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7772 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7774 tree new_var = lookup_decl (var, ctx);
7775 tree type = build_pointer_type (TREE_TYPE (var));
7776 x = create_tmp_var_raw (type, get_name (new_var));
7777 gimple_add_tmp_var (x);
7778 x = build_simple_mem_ref (x);
7779 SET_DECL_VALUE_EXPR (new_var, x);
7780 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7782 else
7784 tree new_var = lookup_decl (var, ctx);
7785 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7786 gimple_add_tmp_var (x);
7787 SET_DECL_VALUE_EXPR (new_var, x);
7788 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7790 break;
7793 if (offloaded)
7795 target_nesting_level++;
7796 lower_omp (&tgt_body, ctx);
7797 target_nesting_level--;
7799 else if (data_region)
7800 lower_omp (&tgt_body, ctx);
7802 if (offloaded)
7804 /* Declare all the variables created by mapping and the variables
7805 declared in the scope of the target body. */
7806 record_vars_into (ctx->block_vars, child_fn);
7807 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7810 olist = NULL;
7811 ilist = NULL;
7812 if (ctx->record_type)
7814 ctx->sender_decl
7815 = create_tmp_var (ctx->record_type, ".omp_data_arr");
7816 DECL_NAMELESS (ctx->sender_decl) = 1;
7817 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7818 t = make_tree_vec (3);
7819 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7820 TREE_VEC_ELT (t, 1)
7821 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7822 ".omp_data_sizes");
7823 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7824 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7825 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7826 tree tkind_type = short_unsigned_type_node;
7827 int talign_shift = 8;
7828 TREE_VEC_ELT (t, 2)
7829 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7830 ".omp_data_kinds");
7831 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7832 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7833 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7834 gimple_omp_target_set_data_arg (stmt, t);
7836 vec<constructor_elt, va_gc> *vsize;
7837 vec<constructor_elt, va_gc> *vkind;
7838 vec_alloc (vsize, map_cnt);
7839 vec_alloc (vkind, map_cnt);
7840 unsigned int map_idx = 0;
7842 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7843 switch (OMP_CLAUSE_CODE (c))
7845 tree ovar, nc, s, purpose, var, x, type;
7846 unsigned int talign;
7848 default:
7849 break;
7851 case OMP_CLAUSE_MAP:
7852 case OMP_CLAUSE_TO:
7853 case OMP_CLAUSE_FROM:
7854 oacc_firstprivate_map:
7855 nc = c;
7856 ovar = OMP_CLAUSE_DECL (c);
7857 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7858 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7859 || (OMP_CLAUSE_MAP_KIND (c)
7860 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7861 break;
7862 if (!DECL_P (ovar))
7864 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7865 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7867 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7868 == get_base_address (ovar));
7869 nc = OMP_CLAUSE_CHAIN (c);
7870 ovar = OMP_CLAUSE_DECL (nc);
7872 else
7874 tree x = build_sender_ref (ovar, ctx);
7875 tree v
7876 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7877 gimplify_assign (x, v, &ilist);
7878 nc = NULL_TREE;
7881 else
7883 if (DECL_SIZE (ovar)
7884 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7886 tree ovar2 = DECL_VALUE_EXPR (ovar);
7887 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7888 ovar2 = TREE_OPERAND (ovar2, 0);
7889 gcc_assert (DECL_P (ovar2));
7890 ovar = ovar2;
7892 if (!maybe_lookup_field (ovar, ctx))
7893 continue;
7896 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7897 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7898 talign = DECL_ALIGN_UNIT (ovar);
7899 if (nc)
7901 var = lookup_decl_in_outer_ctx (ovar, ctx);
7902 x = build_sender_ref (ovar, ctx);
7904 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7905 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7906 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7907 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7909 gcc_assert (offloaded);
7910 tree avar
7911 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7912 mark_addressable (avar);
7913 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7914 talign = DECL_ALIGN_UNIT (avar);
7915 avar = build_fold_addr_expr (avar);
7916 gimplify_assign (x, avar, &ilist);
7918 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7920 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7921 if (!omp_is_reference (var))
7923 if (is_gimple_reg (var)
7924 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7925 TREE_NO_WARNING (var) = 1;
7926 var = build_fold_addr_expr (var);
7928 else
7929 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7930 gimplify_assign (x, var, &ilist);
7932 else if (is_gimple_reg (var))
7934 gcc_assert (offloaded);
7935 tree avar = create_tmp_var (TREE_TYPE (var));
7936 mark_addressable (avar);
7937 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7938 if (GOMP_MAP_COPY_TO_P (map_kind)
7939 || map_kind == GOMP_MAP_POINTER
7940 || map_kind == GOMP_MAP_TO_PSET
7941 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7943 /* If we need to initialize a temporary
7944 with VAR because it is not addressable, and
7945 the variable hasn't been initialized yet, then
7946 we'll get a warning for the store to avar.
7947 Don't warn in that case, the mapping might
7948 be implicit. */
7949 TREE_NO_WARNING (var) = 1;
7950 gimplify_assign (avar, var, &ilist);
7952 avar = build_fold_addr_expr (avar);
7953 gimplify_assign (x, avar, &ilist);
7954 if ((GOMP_MAP_COPY_FROM_P (map_kind)
7955 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7956 && !TYPE_READONLY (TREE_TYPE (var)))
7958 x = unshare_expr (x);
7959 x = build_simple_mem_ref (x);
7960 gimplify_assign (var, x, &olist);
7963 else
7965 var = build_fold_addr_expr (var);
7966 gimplify_assign (x, var, &ilist);
7969 s = NULL_TREE;
7970 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7972 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7973 s = TREE_TYPE (ovar);
7974 if (TREE_CODE (s) == REFERENCE_TYPE)
7975 s = TREE_TYPE (s);
7976 s = TYPE_SIZE_UNIT (s);
7978 else
7979 s = OMP_CLAUSE_SIZE (c);
7980 if (s == NULL_TREE)
7981 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
7982 s = fold_convert (size_type_node, s);
7983 purpose = size_int (map_idx++);
7984 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
7985 if (TREE_CODE (s) != INTEGER_CST)
7986 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
7988 unsigned HOST_WIDE_INT tkind, tkind_zero;
7989 switch (OMP_CLAUSE_CODE (c))
7991 case OMP_CLAUSE_MAP:
7992 tkind = OMP_CLAUSE_MAP_KIND (c);
7993 tkind_zero = tkind;
7994 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
7995 switch (tkind)
7997 case GOMP_MAP_ALLOC:
7998 case GOMP_MAP_TO:
7999 case GOMP_MAP_FROM:
8000 case GOMP_MAP_TOFROM:
8001 case GOMP_MAP_ALWAYS_TO:
8002 case GOMP_MAP_ALWAYS_FROM:
8003 case GOMP_MAP_ALWAYS_TOFROM:
8004 case GOMP_MAP_RELEASE:
8005 case GOMP_MAP_FORCE_TO:
8006 case GOMP_MAP_FORCE_FROM:
8007 case GOMP_MAP_FORCE_TOFROM:
8008 case GOMP_MAP_FORCE_PRESENT:
8009 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
8010 break;
8011 case GOMP_MAP_DELETE:
8012 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
8013 default:
8014 break;
8016 if (tkind_zero != tkind)
8018 if (integer_zerop (s))
8019 tkind = tkind_zero;
8020 else if (integer_nonzerop (s))
8021 tkind_zero = tkind;
8023 break;
8024 case OMP_CLAUSE_FIRSTPRIVATE:
8025 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
8026 tkind = GOMP_MAP_TO;
8027 tkind_zero = tkind;
8028 break;
8029 case OMP_CLAUSE_TO:
8030 tkind = GOMP_MAP_TO;
8031 tkind_zero = tkind;
8032 break;
8033 case OMP_CLAUSE_FROM:
8034 tkind = GOMP_MAP_FROM;
8035 tkind_zero = tkind;
8036 break;
8037 default:
8038 gcc_unreachable ();
8040 gcc_checking_assert (tkind
8041 < (HOST_WIDE_INT_C (1U) << talign_shift));
8042 gcc_checking_assert (tkind_zero
8043 < (HOST_WIDE_INT_C (1U) << talign_shift));
8044 talign = ceil_log2 (talign);
8045 tkind |= talign << talign_shift;
8046 tkind_zero |= talign << talign_shift;
8047 gcc_checking_assert (tkind
8048 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8049 gcc_checking_assert (tkind_zero
8050 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8051 if (tkind == tkind_zero)
8052 x = build_int_cstu (tkind_type, tkind);
8053 else
8055 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8056 x = build3 (COND_EXPR, tkind_type,
8057 fold_build2 (EQ_EXPR, boolean_type_node,
8058 unshare_expr (s), size_zero_node),
8059 build_int_cstu (tkind_type, tkind_zero),
8060 build_int_cstu (tkind_type, tkind));
8062 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8063 if (nc && nc != c)
8064 c = nc;
8065 break;
8067 case OMP_CLAUSE_FIRSTPRIVATE:
8068 if (is_oacc_parallel (ctx))
8069 goto oacc_firstprivate_map;
8070 ovar = OMP_CLAUSE_DECL (c);
8071 if (omp_is_reference (ovar))
8072 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8073 else
8074 talign = DECL_ALIGN_UNIT (ovar);
8075 var = lookup_decl_in_outer_ctx (ovar, ctx);
8076 x = build_sender_ref (ovar, ctx);
8077 tkind = GOMP_MAP_FIRSTPRIVATE;
8078 type = TREE_TYPE (ovar);
8079 if (omp_is_reference (ovar))
8080 type = TREE_TYPE (type);
8081 if ((INTEGRAL_TYPE_P (type)
8082 && TYPE_PRECISION (type) <= POINTER_SIZE)
8083 || TREE_CODE (type) == POINTER_TYPE)
8085 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8086 tree t = var;
8087 if (omp_is_reference (var))
8088 t = build_simple_mem_ref (var);
8089 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8090 TREE_NO_WARNING (var) = 1;
8091 if (TREE_CODE (type) != POINTER_TYPE)
8092 t = fold_convert (pointer_sized_int_node, t);
8093 t = fold_convert (TREE_TYPE (x), t);
8094 gimplify_assign (x, t, &ilist);
8096 else if (omp_is_reference (var))
8097 gimplify_assign (x, var, &ilist);
8098 else if (is_gimple_reg (var))
8100 tree avar = create_tmp_var (TREE_TYPE (var));
8101 mark_addressable (avar);
8102 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8103 TREE_NO_WARNING (var) = 1;
8104 gimplify_assign (avar, var, &ilist);
8105 avar = build_fold_addr_expr (avar);
8106 gimplify_assign (x, avar, &ilist);
8108 else
8110 var = build_fold_addr_expr (var);
8111 gimplify_assign (x, var, &ilist);
8113 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8114 s = size_int (0);
8115 else if (omp_is_reference (ovar))
8116 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8117 else
8118 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8119 s = fold_convert (size_type_node, s);
8120 purpose = size_int (map_idx++);
8121 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8122 if (TREE_CODE (s) != INTEGER_CST)
8123 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8125 gcc_checking_assert (tkind
8126 < (HOST_WIDE_INT_C (1U) << talign_shift));
8127 talign = ceil_log2 (talign);
8128 tkind |= talign << talign_shift;
8129 gcc_checking_assert (tkind
8130 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8131 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8132 build_int_cstu (tkind_type, tkind));
8133 break;
8135 case OMP_CLAUSE_USE_DEVICE_PTR:
8136 case OMP_CLAUSE_IS_DEVICE_PTR:
8137 ovar = OMP_CLAUSE_DECL (c);
8138 var = lookup_decl_in_outer_ctx (ovar, ctx);
8139 x = build_sender_ref (ovar, ctx);
8140 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8141 tkind = GOMP_MAP_USE_DEVICE_PTR;
8142 else
8143 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8144 type = TREE_TYPE (ovar);
8145 if (TREE_CODE (type) == ARRAY_TYPE)
8146 var = build_fold_addr_expr (var);
8147 else
8149 if (omp_is_reference (ovar))
8151 type = TREE_TYPE (type);
8152 if (TREE_CODE (type) != ARRAY_TYPE)
8153 var = build_simple_mem_ref (var);
8154 var = fold_convert (TREE_TYPE (x), var);
8157 gimplify_assign (x, var, &ilist);
8158 s = size_int (0);
8159 purpose = size_int (map_idx++);
8160 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8161 gcc_checking_assert (tkind
8162 < (HOST_WIDE_INT_C (1U) << talign_shift));
8163 gcc_checking_assert (tkind
8164 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8165 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8166 build_int_cstu (tkind_type, tkind));
8167 break;
8170 gcc_assert (map_idx == map_cnt);
8172 DECL_INITIAL (TREE_VEC_ELT (t, 1))
8173 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8174 DECL_INITIAL (TREE_VEC_ELT (t, 2))
8175 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8176 for (int i = 1; i <= 2; i++)
8177 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8179 gimple_seq initlist = NULL;
8180 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8181 TREE_VEC_ELT (t, i)),
8182 &initlist, true, NULL_TREE);
8183 gimple_seq_add_seq (&ilist, initlist);
8185 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8186 NULL);
8187 TREE_THIS_VOLATILE (clobber) = 1;
8188 gimple_seq_add_stmt (&olist,
8189 gimple_build_assign (TREE_VEC_ELT (t, i),
8190 clobber));
8193 tree clobber = build_constructor (ctx->record_type, NULL);
8194 TREE_THIS_VOLATILE (clobber) = 1;
8195 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8196 clobber));
8199 /* Once all the expansions are done, sequence all the different
8200 fragments inside gimple_omp_body. */
8202 new_body = NULL;
8204 if (offloaded
8205 && ctx->record_type)
8207 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8208 /* fixup_child_record_type might have changed receiver_decl's type. */
8209 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8210 gimple_seq_add_stmt (&new_body,
8211 gimple_build_assign (ctx->receiver_decl, t));
8213 gimple_seq_add_seq (&new_body, fplist);
8215 if (offloaded || data_region)
8217 tree prev = NULL_TREE;
8218 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8219 switch (OMP_CLAUSE_CODE (c))
8221 tree var, x;
8222 default:
8223 break;
8224 case OMP_CLAUSE_FIRSTPRIVATE:
8225 if (is_gimple_omp_oacc (ctx->stmt))
8226 break;
8227 var = OMP_CLAUSE_DECL (c);
8228 if (omp_is_reference (var)
8229 || is_gimple_reg_type (TREE_TYPE (var)))
8231 tree new_var = lookup_decl (var, ctx);
8232 tree type;
8233 type = TREE_TYPE (var);
8234 if (omp_is_reference (var))
8235 type = TREE_TYPE (type);
8236 if ((INTEGRAL_TYPE_P (type)
8237 && TYPE_PRECISION (type) <= POINTER_SIZE)
8238 || TREE_CODE (type) == POINTER_TYPE)
8240 x = build_receiver_ref (var, false, ctx);
8241 if (TREE_CODE (type) != POINTER_TYPE)
8242 x = fold_convert (pointer_sized_int_node, x);
8243 x = fold_convert (type, x);
8244 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8245 fb_rvalue);
8246 if (omp_is_reference (var))
8248 tree v = create_tmp_var_raw (type, get_name (var));
8249 gimple_add_tmp_var (v);
8250 TREE_ADDRESSABLE (v) = 1;
8251 gimple_seq_add_stmt (&new_body,
8252 gimple_build_assign (v, x));
8253 x = build_fold_addr_expr (v);
8255 gimple_seq_add_stmt (&new_body,
8256 gimple_build_assign (new_var, x));
8258 else
8260 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8261 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8262 fb_rvalue);
8263 gimple_seq_add_stmt (&new_body,
8264 gimple_build_assign (new_var, x));
8267 else if (is_variable_sized (var))
8269 tree pvar = DECL_VALUE_EXPR (var);
8270 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8271 pvar = TREE_OPERAND (pvar, 0);
8272 gcc_assert (DECL_P (pvar));
8273 tree new_var = lookup_decl (pvar, ctx);
8274 x = build_receiver_ref (var, false, ctx);
8275 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8276 gimple_seq_add_stmt (&new_body,
8277 gimple_build_assign (new_var, x));
8279 break;
8280 case OMP_CLAUSE_PRIVATE:
8281 if (is_gimple_omp_oacc (ctx->stmt))
8282 break;
8283 var = OMP_CLAUSE_DECL (c);
8284 if (omp_is_reference (var))
8286 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8287 tree new_var = lookup_decl (var, ctx);
8288 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8289 if (TREE_CONSTANT (x))
8291 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8292 get_name (var));
8293 gimple_add_tmp_var (x);
8294 TREE_ADDRESSABLE (x) = 1;
8295 x = build_fold_addr_expr_loc (clause_loc, x);
8297 else
8298 break;
8300 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8301 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8302 gimple_seq_add_stmt (&new_body,
8303 gimple_build_assign (new_var, x));
8305 break;
8306 case OMP_CLAUSE_USE_DEVICE_PTR:
8307 case OMP_CLAUSE_IS_DEVICE_PTR:
8308 var = OMP_CLAUSE_DECL (c);
8309 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8310 x = build_sender_ref (var, ctx);
8311 else
8312 x = build_receiver_ref (var, false, ctx);
8313 if (is_variable_sized (var))
8315 tree pvar = DECL_VALUE_EXPR (var);
8316 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8317 pvar = TREE_OPERAND (pvar, 0);
8318 gcc_assert (DECL_P (pvar));
8319 tree new_var = lookup_decl (pvar, ctx);
8320 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8321 gimple_seq_add_stmt (&new_body,
8322 gimple_build_assign (new_var, x));
8324 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8326 tree new_var = lookup_decl (var, ctx);
8327 new_var = DECL_VALUE_EXPR (new_var);
8328 gcc_assert (TREE_CODE (new_var) == MEM_REF);
8329 new_var = TREE_OPERAND (new_var, 0);
8330 gcc_assert (DECL_P (new_var));
8331 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8332 gimple_seq_add_stmt (&new_body,
8333 gimple_build_assign (new_var, x));
8335 else
8337 tree type = TREE_TYPE (var);
8338 tree new_var = lookup_decl (var, ctx);
8339 if (omp_is_reference (var))
8341 type = TREE_TYPE (type);
8342 if (TREE_CODE (type) != ARRAY_TYPE)
8344 tree v = create_tmp_var_raw (type, get_name (var));
8345 gimple_add_tmp_var (v);
8346 TREE_ADDRESSABLE (v) = 1;
8347 x = fold_convert (type, x);
8348 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8349 fb_rvalue);
8350 gimple_seq_add_stmt (&new_body,
8351 gimple_build_assign (v, x));
8352 x = build_fold_addr_expr (v);
8355 new_var = DECL_VALUE_EXPR (new_var);
8356 x = fold_convert (TREE_TYPE (new_var), x);
8357 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8358 gimple_seq_add_stmt (&new_body,
8359 gimple_build_assign (new_var, x));
8361 break;
8363 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8364 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8365 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8366 or references to VLAs. */
8367 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8368 switch (OMP_CLAUSE_CODE (c))
8370 tree var;
8371 default:
8372 break;
8373 case OMP_CLAUSE_MAP:
8374 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8375 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8377 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8378 HOST_WIDE_INT offset = 0;
8379 gcc_assert (prev);
8380 var = OMP_CLAUSE_DECL (c);
8381 if (DECL_P (var)
8382 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8383 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8384 ctx))
8385 && varpool_node::get_create (var)->offloadable)
8386 break;
8387 if (TREE_CODE (var) == INDIRECT_REF
8388 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8389 var = TREE_OPERAND (var, 0);
8390 if (TREE_CODE (var) == COMPONENT_REF)
8392 var = get_addr_base_and_unit_offset (var, &offset);
8393 gcc_assert (var != NULL_TREE && DECL_P (var));
8395 else if (DECL_SIZE (var)
8396 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8398 tree var2 = DECL_VALUE_EXPR (var);
8399 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8400 var2 = TREE_OPERAND (var2, 0);
8401 gcc_assert (DECL_P (var2));
8402 var = var2;
8404 tree new_var = lookup_decl (var, ctx), x;
8405 tree type = TREE_TYPE (new_var);
8406 bool is_ref;
8407 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8408 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8409 == COMPONENT_REF))
8411 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8412 is_ref = true;
8413 new_var = build2 (MEM_REF, type,
8414 build_fold_addr_expr (new_var),
8415 build_int_cst (build_pointer_type (type),
8416 offset));
8418 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8420 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8421 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8422 new_var = build2 (MEM_REF, type,
8423 build_fold_addr_expr (new_var),
8424 build_int_cst (build_pointer_type (type),
8425 offset));
8427 else
8428 is_ref = omp_is_reference (var);
8429 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8430 is_ref = false;
8431 bool ref_to_array = false;
8432 if (is_ref)
8434 type = TREE_TYPE (type);
8435 if (TREE_CODE (type) == ARRAY_TYPE)
8437 type = build_pointer_type (type);
8438 ref_to_array = true;
8441 else if (TREE_CODE (type) == ARRAY_TYPE)
8443 tree decl2 = DECL_VALUE_EXPR (new_var);
8444 gcc_assert (TREE_CODE (decl2) == MEM_REF);
8445 decl2 = TREE_OPERAND (decl2, 0);
8446 gcc_assert (DECL_P (decl2));
8447 new_var = decl2;
8448 type = TREE_TYPE (new_var);
8450 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8451 x = fold_convert_loc (clause_loc, type, x);
8452 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8454 tree bias = OMP_CLAUSE_SIZE (c);
8455 if (DECL_P (bias))
8456 bias = lookup_decl (bias, ctx);
8457 bias = fold_convert_loc (clause_loc, sizetype, bias);
8458 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8459 bias);
8460 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8461 TREE_TYPE (x), x, bias);
8463 if (ref_to_array)
8464 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8465 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8466 if (is_ref && !ref_to_array)
8468 tree t = create_tmp_var_raw (type, get_name (var));
8469 gimple_add_tmp_var (t);
8470 TREE_ADDRESSABLE (t) = 1;
8471 gimple_seq_add_stmt (&new_body,
8472 gimple_build_assign (t, x));
8473 x = build_fold_addr_expr_loc (clause_loc, t);
8475 gimple_seq_add_stmt (&new_body,
8476 gimple_build_assign (new_var, x));
8477 prev = NULL_TREE;
8479 else if (OMP_CLAUSE_CHAIN (c)
8480 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8481 == OMP_CLAUSE_MAP
8482 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8483 == GOMP_MAP_FIRSTPRIVATE_POINTER
8484 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8485 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8486 prev = c;
8487 break;
8488 case OMP_CLAUSE_PRIVATE:
8489 var = OMP_CLAUSE_DECL (c);
8490 if (is_variable_sized (var))
8492 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8493 tree new_var = lookup_decl (var, ctx);
8494 tree pvar = DECL_VALUE_EXPR (var);
8495 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8496 pvar = TREE_OPERAND (pvar, 0);
8497 gcc_assert (DECL_P (pvar));
8498 tree new_pvar = lookup_decl (pvar, ctx);
8499 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8500 tree al = size_int (DECL_ALIGN (var));
8501 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8502 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8503 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8504 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8505 gimple_seq_add_stmt (&new_body,
8506 gimple_build_assign (new_pvar, x));
8508 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8510 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8511 tree new_var = lookup_decl (var, ctx);
8512 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8513 if (TREE_CONSTANT (x))
8514 break;
8515 else
8517 tree atmp
8518 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8519 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8520 tree al = size_int (TYPE_ALIGN (rtype));
8521 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8524 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8525 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8526 gimple_seq_add_stmt (&new_body,
8527 gimple_build_assign (new_var, x));
8529 break;
8532 gimple_seq fork_seq = NULL;
8533 gimple_seq join_seq = NULL;
8535 if (is_oacc_parallel (ctx))
8537 /* If there are reductions on the offloaded region itself, treat
8538 them as a dummy GANG loop. */
8539 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8541 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8542 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8545 gimple_seq_add_seq (&new_body, fork_seq);
8546 gimple_seq_add_seq (&new_body, tgt_body);
8547 gimple_seq_add_seq (&new_body, join_seq);
8549 if (offloaded)
8550 new_body = maybe_catch_exception (new_body);
8552 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8553 gimple_omp_set_body (stmt, new_body);
8556 bind = gimple_build_bind (NULL, NULL,
8557 tgt_bind ? gimple_bind_block (tgt_bind)
8558 : NULL_TREE);
8559 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8560 gimple_bind_add_seq (bind, ilist);
8561 gimple_bind_add_stmt (bind, stmt);
8562 gimple_bind_add_seq (bind, olist);
8564 pop_gimplify_context (NULL);
8566 if (dep_bind)
8568 gimple_bind_add_seq (dep_bind, dep_ilist);
8569 gimple_bind_add_stmt (dep_bind, bind);
8570 gimple_bind_add_seq (dep_bind, dep_olist);
8571 pop_gimplify_context (dep_bind);
8575 /* Expand code for an OpenMP teams directive. */
8577 static void
8578 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8580 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8581 push_gimplify_context ();
8583 tree block = make_node (BLOCK);
8584 gbind *bind = gimple_build_bind (NULL, NULL, block);
8585 gsi_replace (gsi_p, bind, true);
8586 gimple_seq bind_body = NULL;
8587 gimple_seq dlist = NULL;
8588 gimple_seq olist = NULL;
8590 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8591 OMP_CLAUSE_NUM_TEAMS);
8592 if (num_teams == NULL_TREE)
8593 num_teams = build_int_cst (unsigned_type_node, 0);
8594 else
8596 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8597 num_teams = fold_convert (unsigned_type_node, num_teams);
8598 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8600 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8601 OMP_CLAUSE_THREAD_LIMIT);
8602 if (thread_limit == NULL_TREE)
8603 thread_limit = build_int_cst (unsigned_type_node, 0);
8604 else
8606 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8607 thread_limit = fold_convert (unsigned_type_node, thread_limit);
8608 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8609 fb_rvalue);
8612 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8613 &bind_body, &dlist, ctx, NULL);
8614 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8615 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8616 if (!gimple_omp_teams_grid_phony (teams_stmt))
8618 gimple_seq_add_stmt (&bind_body, teams_stmt);
8619 location_t loc = gimple_location (teams_stmt);
8620 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8621 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8622 gimple_set_location (call, loc);
8623 gimple_seq_add_stmt (&bind_body, call);
8626 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8627 gimple_omp_set_body (teams_stmt, NULL);
8628 gimple_seq_add_seq (&bind_body, olist);
8629 gimple_seq_add_seq (&bind_body, dlist);
8630 if (!gimple_omp_teams_grid_phony (teams_stmt))
8631 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8632 gimple_bind_set_body (bind, bind_body);
8634 pop_gimplify_context (bind);
8636 gimple_bind_append_vars (bind, ctx->block_vars);
8637 BLOCK_VARS (block) = ctx->block_vars;
8638 if (BLOCK_VARS (block))
8639 TREE_USED (block) = 1;
8642 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8644 static void
8645 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8647 gimple *stmt = gsi_stmt (*gsi_p);
8648 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8649 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8650 gimple_build_omp_return (false));
8654 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8655 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8656 of OMP context, but with task_shared_vars set. */
8658 static tree
8659 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8660 void *data)
8662 tree t = *tp;
8664 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8665 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8666 return t;
8668 if (task_shared_vars
8669 && DECL_P (t)
8670 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8671 return t;
8673 /* If a global variable has been privatized, TREE_CONSTANT on
8674 ADDR_EXPR might be wrong. */
8675 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8676 recompute_tree_invariant_for_addr_expr (t);
8678 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8679 return NULL_TREE;
8682 /* Data to be communicated between lower_omp_regimplify_operands and
8683 lower_omp_regimplify_operands_p. */
8685 struct lower_omp_regimplify_operands_data
8687 omp_context *ctx;
8688 vec<tree> *decls;
8691 /* Helper function for lower_omp_regimplify_operands. Find
8692 omp_member_access_dummy_var vars and adjust temporarily their
8693 DECL_VALUE_EXPRs if needed. */
8695 static tree
8696 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8697 void *data)
8699 tree t = omp_member_access_dummy_var (*tp);
8700 if (t)
8702 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8703 lower_omp_regimplify_operands_data *ldata
8704 = (lower_omp_regimplify_operands_data *) wi->info;
8705 tree o = maybe_lookup_decl (t, ldata->ctx);
8706 if (o != t)
8708 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8709 ldata->decls->safe_push (*tp);
8710 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8711 SET_DECL_VALUE_EXPR (*tp, v);
8714 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8715 return NULL_TREE;
8718 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8719 of omp_member_access_dummy_var vars during regimplification. */
8721 static void
8722 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8723 gimple_stmt_iterator *gsi_p)
8725 auto_vec<tree, 10> decls;
8726 if (ctx)
8728 struct walk_stmt_info wi;
8729 memset (&wi, '\0', sizeof (wi));
8730 struct lower_omp_regimplify_operands_data data;
8731 data.ctx = ctx;
8732 data.decls = &decls;
8733 wi.info = &data;
8734 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8736 gimple_regimplify_operands (stmt, gsi_p);
8737 while (!decls.is_empty ())
8739 tree t = decls.pop ();
8740 tree v = decls.pop ();
8741 SET_DECL_VALUE_EXPR (t, v);
8745 static void
8746 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8748 gimple *stmt = gsi_stmt (*gsi_p);
8749 struct walk_stmt_info wi;
8750 gcall *call_stmt;
8752 if (gimple_has_location (stmt))
8753 input_location = gimple_location (stmt);
8755 if (task_shared_vars)
8756 memset (&wi, '\0', sizeof (wi));
8758 /* If we have issued syntax errors, avoid doing any heavy lifting.
8759 Just replace the OMP directives with a NOP to avoid
8760 confusing RTL expansion. */
8761 if (seen_error () && is_gimple_omp (stmt))
8763 gsi_replace (gsi_p, gimple_build_nop (), true);
8764 return;
8767 switch (gimple_code (stmt))
8769 case GIMPLE_COND:
8771 gcond *cond_stmt = as_a <gcond *> (stmt);
8772 if ((ctx || task_shared_vars)
8773 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8774 lower_omp_regimplify_p,
8775 ctx ? NULL : &wi, NULL)
8776 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8777 lower_omp_regimplify_p,
8778 ctx ? NULL : &wi, NULL)))
8779 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8781 break;
8782 case GIMPLE_CATCH:
8783 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8784 break;
8785 case GIMPLE_EH_FILTER:
8786 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8787 break;
8788 case GIMPLE_TRY:
8789 lower_omp (gimple_try_eval_ptr (stmt), ctx);
8790 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8791 break;
8792 case GIMPLE_TRANSACTION:
8793 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8794 ctx);
8795 break;
8796 case GIMPLE_BIND:
8797 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8798 break;
8799 case GIMPLE_OMP_PARALLEL:
8800 case GIMPLE_OMP_TASK:
8801 ctx = maybe_lookup_ctx (stmt);
8802 gcc_assert (ctx);
8803 if (ctx->cancellable)
8804 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8805 lower_omp_taskreg (gsi_p, ctx);
8806 break;
8807 case GIMPLE_OMP_FOR:
8808 ctx = maybe_lookup_ctx (stmt);
8809 gcc_assert (ctx);
8810 if (ctx->cancellable)
8811 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8812 lower_omp_for (gsi_p, ctx);
8813 break;
8814 case GIMPLE_OMP_SECTIONS:
8815 ctx = maybe_lookup_ctx (stmt);
8816 gcc_assert (ctx);
8817 if (ctx->cancellable)
8818 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8819 lower_omp_sections (gsi_p, ctx);
8820 break;
8821 case GIMPLE_OMP_SINGLE:
8822 ctx = maybe_lookup_ctx (stmt);
8823 gcc_assert (ctx);
8824 lower_omp_single (gsi_p, ctx);
8825 break;
8826 case GIMPLE_OMP_MASTER:
8827 ctx = maybe_lookup_ctx (stmt);
8828 gcc_assert (ctx);
8829 lower_omp_master (gsi_p, ctx);
8830 break;
8831 case GIMPLE_OMP_TASKGROUP:
8832 ctx = maybe_lookup_ctx (stmt);
8833 gcc_assert (ctx);
8834 lower_omp_taskgroup (gsi_p, ctx);
8835 break;
8836 case GIMPLE_OMP_ORDERED:
8837 ctx = maybe_lookup_ctx (stmt);
8838 gcc_assert (ctx);
8839 lower_omp_ordered (gsi_p, ctx);
8840 break;
8841 case GIMPLE_OMP_CRITICAL:
8842 ctx = maybe_lookup_ctx (stmt);
8843 gcc_assert (ctx);
8844 lower_omp_critical (gsi_p, ctx);
8845 break;
8846 case GIMPLE_OMP_ATOMIC_LOAD:
8847 if ((ctx || task_shared_vars)
8848 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8849 as_a <gomp_atomic_load *> (stmt)),
8850 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8851 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8852 break;
8853 case GIMPLE_OMP_TARGET:
8854 ctx = maybe_lookup_ctx (stmt);
8855 gcc_assert (ctx);
8856 lower_omp_target (gsi_p, ctx);
8857 break;
8858 case GIMPLE_OMP_TEAMS:
8859 ctx = maybe_lookup_ctx (stmt);
8860 gcc_assert (ctx);
8861 lower_omp_teams (gsi_p, ctx);
8862 break;
8863 case GIMPLE_OMP_GRID_BODY:
8864 ctx = maybe_lookup_ctx (stmt);
8865 gcc_assert (ctx);
8866 lower_omp_grid_body (gsi_p, ctx);
8867 break;
8868 case GIMPLE_CALL:
8869 tree fndecl;
8870 call_stmt = as_a <gcall *> (stmt);
8871 fndecl = gimple_call_fndecl (call_stmt);
8872 if (fndecl
8873 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8874 switch (DECL_FUNCTION_CODE (fndecl))
8876 case BUILT_IN_GOMP_BARRIER:
8877 if (ctx == NULL)
8878 break;
8879 /* FALLTHRU */
8880 case BUILT_IN_GOMP_CANCEL:
8881 case BUILT_IN_GOMP_CANCELLATION_POINT:
8882 omp_context *cctx;
8883 cctx = ctx;
8884 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8885 cctx = cctx->outer;
8886 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8887 if (!cctx->cancellable)
8889 if (DECL_FUNCTION_CODE (fndecl)
8890 == BUILT_IN_GOMP_CANCELLATION_POINT)
8892 stmt = gimple_build_nop ();
8893 gsi_replace (gsi_p, stmt, false);
8895 break;
8897 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8899 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8900 gimple_call_set_fndecl (call_stmt, fndecl);
8901 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8903 tree lhs;
8904 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8905 gimple_call_set_lhs (call_stmt, lhs);
8906 tree fallthru_label;
8907 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8908 gimple *g;
8909 g = gimple_build_label (fallthru_label);
8910 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8911 g = gimple_build_cond (NE_EXPR, lhs,
8912 fold_convert (TREE_TYPE (lhs),
8913 boolean_false_node),
8914 cctx->cancel_label, fallthru_label);
8915 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8916 break;
8917 default:
8918 break;
8920 /* FALLTHRU */
8921 default:
8922 if ((ctx || task_shared_vars)
8923 && walk_gimple_op (stmt, lower_omp_regimplify_p,
8924 ctx ? NULL : &wi))
8926 /* Just remove clobbers, this should happen only if we have
8927 "privatized" local addressable variables in SIMD regions,
8928 the clobber isn't needed in that case and gimplifying address
8929 of the ARRAY_REF into a pointer and creating MEM_REF based
8930 clobber would create worse code than we get with the clobber
8931 dropped. */
8932 if (gimple_clobber_p (stmt))
8934 gsi_replace (gsi_p, gimple_build_nop (), true);
8935 break;
8937 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8939 break;
8943 static void
8944 lower_omp (gimple_seq *body, omp_context *ctx)
8946 location_t saved_location = input_location;
8947 gimple_stmt_iterator gsi;
8948 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8949 lower_omp_1 (&gsi, ctx);
8950 /* During gimplification, we haven't folded statments inside offloading
8951 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8952 if (target_nesting_level || taskreg_nesting_level)
8953 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8954 fold_stmt (&gsi);
8955 input_location = saved_location;
8958 /* Main entry point. */
8960 static unsigned int
8961 execute_lower_omp (void)
8963 gimple_seq body;
8964 int i;
8965 omp_context *ctx;
8967 /* This pass always runs, to provide PROP_gimple_lomp.
8968 But often, there is nothing to do. */
8969 if (flag_cilkplus == 0 && flag_openacc == 0 && flag_openmp == 0
8970 && flag_openmp_simd == 0)
8971 return 0;
8973 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
8974 delete_omp_context);
8976 body = gimple_body (current_function_decl);
8978 if (hsa_gen_requested_p ())
8979 omp_grid_gridify_all_targets (&body);
8981 scan_omp (&body, NULL);
8982 gcc_assert (taskreg_nesting_level == 0);
8983 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
8984 finish_taskreg_scan (ctx);
8985 taskreg_contexts.release ();
8987 if (all_contexts->root)
8989 if (task_shared_vars)
8990 push_gimplify_context ();
8991 lower_omp (&body, NULL);
8992 if (task_shared_vars)
8993 pop_gimplify_context (NULL);
8996 if (all_contexts)
8998 splay_tree_delete (all_contexts);
8999 all_contexts = NULL;
9001 BITMAP_FREE (task_shared_vars);
9002 return 0;
9005 namespace {
9007 const pass_data pass_data_lower_omp =
9009 GIMPLE_PASS, /* type */
9010 "omplower", /* name */
9011 OPTGROUP_OMP, /* optinfo_flags */
9012 TV_NONE, /* tv_id */
9013 PROP_gimple_any, /* properties_required */
9014 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
9015 0, /* properties_destroyed */
9016 0, /* todo_flags_start */
9017 0, /* todo_flags_finish */
9020 class pass_lower_omp : public gimple_opt_pass
9022 public:
9023 pass_lower_omp (gcc::context *ctxt)
9024 : gimple_opt_pass (pass_data_lower_omp, ctxt)
9027 /* opt_pass methods: */
9028 virtual unsigned int execute (function *) { return execute_lower_omp (); }
9030 }; // class pass_lower_omp
9032 } // anon namespace
9034 gimple_opt_pass *
9035 make_pass_lower_omp (gcc::context *ctxt)
9037 return new pass_lower_omp (ctxt);
9040 /* The following is a utility to diagnose structured block violations.
9041 It is not part of the "omplower" pass, as that's invoked too late. It
9042 should be invoked by the respective front ends after gimplification. */
9044 static splay_tree all_labels;
9046 /* Check for mismatched contexts and generate an error if needed. Return
9047 true if an error is detected. */
9049 static bool
9050 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9051 gimple *branch_ctx, gimple *label_ctx)
9053 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9054 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9056 if (label_ctx == branch_ctx)
9057 return false;
9059 const char* kind = NULL;
9061 if (flag_cilkplus)
9063 if ((branch_ctx
9064 && gimple_code (branch_ctx) == GIMPLE_OMP_FOR
9065 && gimple_omp_for_kind (branch_ctx) == GF_OMP_FOR_KIND_CILKSIMD)
9066 || (label_ctx
9067 && gimple_code (label_ctx) == GIMPLE_OMP_FOR
9068 && gimple_omp_for_kind (label_ctx) == GF_OMP_FOR_KIND_CILKSIMD))
9069 kind = "Cilk Plus";
9071 if (flag_openacc)
9073 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9074 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9076 gcc_checking_assert (kind == NULL);
9077 kind = "OpenACC";
9080 if (kind == NULL)
9082 gcc_checking_assert (flag_openmp);
9083 kind = "OpenMP";
9086 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9087 so we could traverse it and issue a correct "exit" or "enter" error
9088 message upon a structured block violation.
9090 We built the context by building a list with tree_cons'ing, but there is
9091 no easy counterpart in gimple tuples. It seems like far too much work
9092 for issuing exit/enter error messages. If someone really misses the
9093 distinct error message... patches welcome. */
9095 #if 0
9096 /* Try to avoid confusing the user by producing and error message
9097 with correct "exit" or "enter" verbiage. We prefer "exit"
9098 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9099 if (branch_ctx == NULL)
9100 exit_p = false;
9101 else
9103 while (label_ctx)
9105 if (TREE_VALUE (label_ctx) == branch_ctx)
9107 exit_p = false;
9108 break;
9110 label_ctx = TREE_CHAIN (label_ctx);
9114 if (exit_p)
9115 error ("invalid exit from %s structured block", kind);
9116 else
9117 error ("invalid entry to %s structured block", kind);
9118 #endif
9120 /* If it's obvious we have an invalid entry, be specific about the error. */
9121 if (branch_ctx == NULL)
9122 error ("invalid entry to %s structured block", kind);
9123 else
9125 /* Otherwise, be vague and lazy, but efficient. */
9126 error ("invalid branch to/from %s structured block", kind);
9129 gsi_replace (gsi_p, gimple_build_nop (), false);
9130 return true;
9133 /* Pass 1: Create a minimal tree of structured blocks, and record
9134 where each label is found. */
9136 static tree
9137 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9138 struct walk_stmt_info *wi)
9140 gimple *context = (gimple *) wi->info;
9141 gimple *inner_context;
9142 gimple *stmt = gsi_stmt (*gsi_p);
9144 *handled_ops_p = true;
9146 switch (gimple_code (stmt))
9148 WALK_SUBSTMTS;
9150 case GIMPLE_OMP_PARALLEL:
9151 case GIMPLE_OMP_TASK:
9152 case GIMPLE_OMP_SECTIONS:
9153 case GIMPLE_OMP_SINGLE:
9154 case GIMPLE_OMP_SECTION:
9155 case GIMPLE_OMP_MASTER:
9156 case GIMPLE_OMP_ORDERED:
9157 case GIMPLE_OMP_CRITICAL:
9158 case GIMPLE_OMP_TARGET:
9159 case GIMPLE_OMP_TEAMS:
9160 case GIMPLE_OMP_TASKGROUP:
9161 /* The minimal context here is just the current OMP construct. */
9162 inner_context = stmt;
9163 wi->info = inner_context;
9164 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9165 wi->info = context;
9166 break;
9168 case GIMPLE_OMP_FOR:
9169 inner_context = stmt;
9170 wi->info = inner_context;
9171 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9172 walk them. */
9173 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9174 diagnose_sb_1, NULL, wi);
9175 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9176 wi->info = context;
9177 break;
9179 case GIMPLE_LABEL:
9180 splay_tree_insert (all_labels,
9181 (splay_tree_key) gimple_label_label (
9182 as_a <glabel *> (stmt)),
9183 (splay_tree_value) context);
9184 break;
9186 default:
9187 break;
9190 return NULL_TREE;
9193 /* Pass 2: Check each branch and see if its context differs from that of
9194 the destination label's context. */
9196 static tree
9197 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9198 struct walk_stmt_info *wi)
9200 gimple *context = (gimple *) wi->info;
9201 splay_tree_node n;
9202 gimple *stmt = gsi_stmt (*gsi_p);
9204 *handled_ops_p = true;
9206 switch (gimple_code (stmt))
9208 WALK_SUBSTMTS;
9210 case GIMPLE_OMP_PARALLEL:
9211 case GIMPLE_OMP_TASK:
9212 case GIMPLE_OMP_SECTIONS:
9213 case GIMPLE_OMP_SINGLE:
9214 case GIMPLE_OMP_SECTION:
9215 case GIMPLE_OMP_MASTER:
9216 case GIMPLE_OMP_ORDERED:
9217 case GIMPLE_OMP_CRITICAL:
9218 case GIMPLE_OMP_TARGET:
9219 case GIMPLE_OMP_TEAMS:
9220 case GIMPLE_OMP_TASKGROUP:
9221 wi->info = stmt;
9222 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9223 wi->info = context;
9224 break;
9226 case GIMPLE_OMP_FOR:
9227 wi->info = stmt;
9228 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9229 walk them. */
9230 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9231 diagnose_sb_2, NULL, wi);
9232 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9233 wi->info = context;
9234 break;
9236 case GIMPLE_COND:
9238 gcond *cond_stmt = as_a <gcond *> (stmt);
9239 tree lab = gimple_cond_true_label (cond_stmt);
9240 if (lab)
9242 n = splay_tree_lookup (all_labels,
9243 (splay_tree_key) lab);
9244 diagnose_sb_0 (gsi_p, context,
9245 n ? (gimple *) n->value : NULL);
9247 lab = gimple_cond_false_label (cond_stmt);
9248 if (lab)
9250 n = splay_tree_lookup (all_labels,
9251 (splay_tree_key) lab);
9252 diagnose_sb_0 (gsi_p, context,
9253 n ? (gimple *) n->value : NULL);
9256 break;
9258 case GIMPLE_GOTO:
9260 tree lab = gimple_goto_dest (stmt);
9261 if (TREE_CODE (lab) != LABEL_DECL)
9262 break;
9264 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9265 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9267 break;
9269 case GIMPLE_SWITCH:
9271 gswitch *switch_stmt = as_a <gswitch *> (stmt);
9272 unsigned int i;
9273 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9275 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9276 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9277 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9278 break;
9281 break;
9283 case GIMPLE_RETURN:
9284 diagnose_sb_0 (gsi_p, context, NULL);
9285 break;
9287 default:
9288 break;
9291 return NULL_TREE;
9294 static unsigned int
9295 diagnose_omp_structured_block_errors (void)
9297 struct walk_stmt_info wi;
9298 gimple_seq body = gimple_body (current_function_decl);
9300 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9302 memset (&wi, 0, sizeof (wi));
9303 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9305 memset (&wi, 0, sizeof (wi));
9306 wi.want_locations = true;
9307 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9309 gimple_set_body (current_function_decl, body);
9311 splay_tree_delete (all_labels);
9312 all_labels = NULL;
9314 return 0;
9317 namespace {
9319 const pass_data pass_data_diagnose_omp_blocks =
9321 GIMPLE_PASS, /* type */
9322 "*diagnose_omp_blocks", /* name */
9323 OPTGROUP_OMP, /* optinfo_flags */
9324 TV_NONE, /* tv_id */
9325 PROP_gimple_any, /* properties_required */
9326 0, /* properties_provided */
9327 0, /* properties_destroyed */
9328 0, /* todo_flags_start */
9329 0, /* todo_flags_finish */
9332 class pass_diagnose_omp_blocks : public gimple_opt_pass
9334 public:
9335 pass_diagnose_omp_blocks (gcc::context *ctxt)
9336 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9339 /* opt_pass methods: */
9340 virtual bool gate (function *)
9342 return flag_cilkplus || flag_openacc || flag_openmp;
9344 virtual unsigned int execute (function *)
9346 return diagnose_omp_structured_block_errors ();
9349 }; // class pass_diagnose_omp_blocks
9351 } // anon namespace
9353 gimple_opt_pass *
9354 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9356 return new pass_diagnose_omp_blocks (ctxt);
9360 #include "gt-omp-low.h"