* gimple-ssa-store-merging.c (struct store_immediate_info): Add
[official-gcc.git] / gcc / omp-low.c
blob33e633cd627b494cd26a45b217905fe5a147d1d1
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2017 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* Nesting depth of this context. Used to beautify error messages re
118 invalid gotos. The outermost ctx is depth 1, with depth 0 being
119 reserved for the main body of the function. */
120 int depth;
122 /* True if this parallel directive is nested within another. */
123 bool is_nested;
125 /* True if this construct can be cancelled. */
126 bool cancellable;
129 static splay_tree all_contexts;
130 static int taskreg_nesting_level;
131 static int target_nesting_level;
132 static bitmap task_shared_vars;
133 static vec<omp_context *> taskreg_contexts;
135 static void scan_omp (gimple_seq *, omp_context *);
136 static tree scan_omp_1_op (tree *, int *, void *);
138 #define WALK_SUBSTMTS \
139 case GIMPLE_BIND: \
140 case GIMPLE_TRY: \
141 case GIMPLE_CATCH: \
142 case GIMPLE_EH_FILTER: \
143 case GIMPLE_TRANSACTION: \
144 /* The sub-statements for these should be walked. */ \
145 *handled_ops_p = false; \
146 break;
148 /* Return true if CTX corresponds to an oacc parallel region. */
150 static bool
151 is_oacc_parallel (omp_context *ctx)
153 enum gimple_code outer_type = gimple_code (ctx->stmt);
154 return ((outer_type == GIMPLE_OMP_TARGET)
155 && (gimple_omp_target_kind (ctx->stmt)
156 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
159 /* Return true if CTX corresponds to an oacc kernels region. */
161 static bool
162 is_oacc_kernels (omp_context *ctx)
164 enum gimple_code outer_type = gimple_code (ctx->stmt);
165 return ((outer_type == GIMPLE_OMP_TARGET)
166 && (gimple_omp_target_kind (ctx->stmt)
167 == GF_OMP_TARGET_KIND_OACC_KERNELS));
170 /* If DECL is the artificial dummy VAR_DECL created for non-static
171 data member privatization, return the underlying "this" parameter,
172 otherwise return NULL. */
174 tree
175 omp_member_access_dummy_var (tree decl)
177 if (!VAR_P (decl)
178 || !DECL_ARTIFICIAL (decl)
179 || !DECL_IGNORED_P (decl)
180 || !DECL_HAS_VALUE_EXPR_P (decl)
181 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
182 return NULL_TREE;
184 tree v = DECL_VALUE_EXPR (decl);
185 if (TREE_CODE (v) != COMPONENT_REF)
186 return NULL_TREE;
188 while (1)
189 switch (TREE_CODE (v))
191 case COMPONENT_REF:
192 case MEM_REF:
193 case INDIRECT_REF:
194 CASE_CONVERT:
195 case POINTER_PLUS_EXPR:
196 v = TREE_OPERAND (v, 0);
197 continue;
198 case PARM_DECL:
199 if (DECL_CONTEXT (v) == current_function_decl
200 && DECL_ARTIFICIAL (v)
201 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
202 return v;
203 return NULL_TREE;
204 default:
205 return NULL_TREE;
209 /* Helper for unshare_and_remap, called through walk_tree. */
211 static tree
212 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
214 tree *pair = (tree *) data;
215 if (*tp == pair[0])
217 *tp = unshare_expr (pair[1]);
218 *walk_subtrees = 0;
220 else if (IS_TYPE_OR_DECL_P (*tp))
221 *walk_subtrees = 0;
222 return NULL_TREE;
225 /* Return unshare_expr (X) with all occurrences of FROM
226 replaced with TO. */
228 static tree
229 unshare_and_remap (tree x, tree from, tree to)
231 tree pair[2] = { from, to };
232 x = unshare_expr (x);
233 walk_tree (&x, unshare_and_remap_1, pair, NULL);
234 return x;
237 /* Convenience function for calling scan_omp_1_op on tree operands. */
239 static inline tree
240 scan_omp_op (tree *tp, omp_context *ctx)
242 struct walk_stmt_info wi;
244 memset (&wi, 0, sizeof (wi));
245 wi.info = ctx;
246 wi.want_locations = true;
248 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
251 static void lower_omp (gimple_seq *, omp_context *);
252 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
253 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
255 /* Return true if CTX is for an omp parallel. */
257 static inline bool
258 is_parallel_ctx (omp_context *ctx)
260 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
264 /* Return true if CTX is for an omp task. */
266 static inline bool
267 is_task_ctx (omp_context *ctx)
269 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
273 /* Return true if CTX is for an omp taskloop. */
275 static inline bool
276 is_taskloop_ctx (omp_context *ctx)
278 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
279 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
283 /* Return true if CTX is for an omp parallel or omp task. */
285 static inline bool
286 is_taskreg_ctx (omp_context *ctx)
288 return is_parallel_ctx (ctx) || is_task_ctx (ctx);
291 /* Return true if EXPR is variable sized. */
293 static inline bool
294 is_variable_sized (const_tree expr)
296 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
299 /* Lookup variables. The "maybe" form
300 allows for the variable form to not have been entered, otherwise we
301 assert that the variable must have been entered. */
303 static inline tree
304 lookup_decl (tree var, omp_context *ctx)
306 tree *n = ctx->cb.decl_map->get (var);
307 return *n;
310 static inline tree
311 maybe_lookup_decl (const_tree var, omp_context *ctx)
313 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
314 return n ? *n : NULL_TREE;
317 static inline tree
318 lookup_field (tree var, omp_context *ctx)
320 splay_tree_node n;
321 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
322 return (tree) n->value;
325 static inline tree
326 lookup_sfield (splay_tree_key key, omp_context *ctx)
328 splay_tree_node n;
329 n = splay_tree_lookup (ctx->sfield_map
330 ? ctx->sfield_map : ctx->field_map, key);
331 return (tree) n->value;
334 static inline tree
335 lookup_sfield (tree var, omp_context *ctx)
337 return lookup_sfield ((splay_tree_key) var, ctx);
340 static inline tree
341 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
343 splay_tree_node n;
344 n = splay_tree_lookup (ctx->field_map, key);
345 return n ? (tree) n->value : NULL_TREE;
348 static inline tree
349 maybe_lookup_field (tree var, omp_context *ctx)
351 return maybe_lookup_field ((splay_tree_key) var, ctx);
354 /* Return true if DECL should be copied by pointer. SHARED_CTX is
355 the parallel context if DECL is to be shared. */
357 static bool
358 use_pointer_for_field (tree decl, omp_context *shared_ctx)
360 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
361 || TYPE_ATOMIC (TREE_TYPE (decl)))
362 return true;
364 /* We can only use copy-in/copy-out semantics for shared variables
365 when we know the value is not accessible from an outer scope. */
366 if (shared_ctx)
368 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
370 /* ??? Trivially accessible from anywhere. But why would we even
371 be passing an address in this case? Should we simply assert
372 this to be false, or should we have a cleanup pass that removes
373 these from the list of mappings? */
374 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
375 return true;
377 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
378 without analyzing the expression whether or not its location
379 is accessible to anyone else. In the case of nested parallel
380 regions it certainly may be. */
381 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
382 return true;
384 /* Do not use copy-in/copy-out for variables that have their
385 address taken. */
386 if (TREE_ADDRESSABLE (decl))
387 return true;
389 /* lower_send_shared_vars only uses copy-in, but not copy-out
390 for these. */
391 if (TREE_READONLY (decl)
392 || ((TREE_CODE (decl) == RESULT_DECL
393 || TREE_CODE (decl) == PARM_DECL)
394 && DECL_BY_REFERENCE (decl)))
395 return false;
397 /* Disallow copy-in/out in nested parallel if
398 decl is shared in outer parallel, otherwise
399 each thread could store the shared variable
400 in its own copy-in location, making the
401 variable no longer really shared. */
402 if (shared_ctx->is_nested)
404 omp_context *up;
406 for (up = shared_ctx->outer; up; up = up->outer)
407 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
408 break;
410 if (up)
412 tree c;
414 for (c = gimple_omp_taskreg_clauses (up->stmt);
415 c; c = OMP_CLAUSE_CHAIN (c))
416 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
417 && OMP_CLAUSE_DECL (c) == decl)
418 break;
420 if (c)
421 goto maybe_mark_addressable_and_ret;
425 /* For tasks avoid using copy-in/out. As tasks can be
426 deferred or executed in different thread, when GOMP_task
427 returns, the task hasn't necessarily terminated. */
428 if (is_task_ctx (shared_ctx))
430 tree outer;
431 maybe_mark_addressable_and_ret:
432 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
433 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
435 /* Taking address of OUTER in lower_send_shared_vars
436 might need regimplification of everything that uses the
437 variable. */
438 if (!task_shared_vars)
439 task_shared_vars = BITMAP_ALLOC (NULL);
440 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
441 TREE_ADDRESSABLE (outer) = 1;
443 return true;
447 return false;
450 /* Construct a new automatic decl similar to VAR. */
452 static tree
453 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
455 tree copy = copy_var_decl (var, name, type);
457 DECL_CONTEXT (copy) = current_function_decl;
458 DECL_CHAIN (copy) = ctx->block_vars;
459 /* If VAR is listed in task_shared_vars, it means it wasn't
460 originally addressable and is just because task needs to take
461 it's address. But we don't need to take address of privatizations
462 from that var. */
463 if (TREE_ADDRESSABLE (var)
464 && task_shared_vars
465 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
466 TREE_ADDRESSABLE (copy) = 0;
467 ctx->block_vars = copy;
469 return copy;
472 static tree
473 omp_copy_decl_1 (tree var, omp_context *ctx)
475 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
478 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
479 as appropriate. */
480 static tree
481 omp_build_component_ref (tree obj, tree field)
483 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
484 if (TREE_THIS_VOLATILE (field))
485 TREE_THIS_VOLATILE (ret) |= 1;
486 if (TREE_READONLY (field))
487 TREE_READONLY (ret) |= 1;
488 return ret;
491 /* Build tree nodes to access the field for VAR on the receiver side. */
493 static tree
494 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
496 tree x, field = lookup_field (var, ctx);
498 /* If the receiver record type was remapped in the child function,
499 remap the field into the new record type. */
500 x = maybe_lookup_field (field, ctx);
501 if (x != NULL)
502 field = x;
504 x = build_simple_mem_ref (ctx->receiver_decl);
505 TREE_THIS_NOTRAP (x) = 1;
506 x = omp_build_component_ref (x, field);
507 if (by_ref)
509 x = build_simple_mem_ref (x);
510 TREE_THIS_NOTRAP (x) = 1;
513 return x;
516 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
517 of a parallel, this is a component reference; for workshare constructs
518 this is some variable. */
520 static tree
521 build_outer_var_ref (tree var, omp_context *ctx,
522 enum omp_clause_code code = OMP_CLAUSE_ERROR)
524 tree x;
526 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
527 x = var;
528 else if (is_variable_sized (var))
530 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
531 x = build_outer_var_ref (x, ctx, code);
532 x = build_simple_mem_ref (x);
534 else if (is_taskreg_ctx (ctx))
536 bool by_ref = use_pointer_for_field (var, NULL);
537 x = build_receiver_ref (var, by_ref, ctx);
539 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
540 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
541 || (code == OMP_CLAUSE_PRIVATE
542 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
543 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
544 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
546 /* #pragma omp simd isn't a worksharing construct, and can reference
547 even private vars in its linear etc. clauses.
548 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
549 to private vars in all worksharing constructs. */
550 x = NULL_TREE;
551 if (ctx->outer && is_taskreg_ctx (ctx))
552 x = lookup_decl (var, ctx->outer);
553 else if (ctx->outer)
554 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
555 if (x == NULL_TREE)
556 x = var;
558 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
560 gcc_assert (ctx->outer);
561 splay_tree_node n
562 = splay_tree_lookup (ctx->outer->field_map,
563 (splay_tree_key) &DECL_UID (var));
564 if (n == NULL)
566 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
567 x = var;
568 else
569 x = lookup_decl (var, ctx->outer);
571 else
573 tree field = (tree) n->value;
574 /* If the receiver record type was remapped in the child function,
575 remap the field into the new record type. */
576 x = maybe_lookup_field (field, ctx->outer);
577 if (x != NULL)
578 field = x;
580 x = build_simple_mem_ref (ctx->outer->receiver_decl);
581 x = omp_build_component_ref (x, field);
582 if (use_pointer_for_field (var, ctx->outer))
583 x = build_simple_mem_ref (x);
586 else if (ctx->outer)
588 omp_context *outer = ctx->outer;
589 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
591 outer = outer->outer;
592 gcc_assert (outer
593 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
595 x = lookup_decl (var, outer);
597 else if (omp_is_reference (var))
598 /* This can happen with orphaned constructs. If var is reference, it is
599 possible it is shared and as such valid. */
600 x = var;
601 else if (omp_member_access_dummy_var (var))
602 x = var;
603 else
604 gcc_unreachable ();
606 if (x == var)
608 tree t = omp_member_access_dummy_var (var);
609 if (t)
611 x = DECL_VALUE_EXPR (var);
612 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
613 if (o != t)
614 x = unshare_and_remap (x, t, o);
615 else
616 x = unshare_expr (x);
620 if (omp_is_reference (var))
621 x = build_simple_mem_ref (x);
623 return x;
626 /* Build tree nodes to access the field for VAR on the sender side. */
628 static tree
629 build_sender_ref (splay_tree_key key, omp_context *ctx)
631 tree field = lookup_sfield (key, ctx);
632 return omp_build_component_ref (ctx->sender_decl, field);
635 static tree
636 build_sender_ref (tree var, omp_context *ctx)
638 return build_sender_ref ((splay_tree_key) var, ctx);
641 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
642 BASE_POINTERS_RESTRICT, declare the field with restrict. */
644 static void
645 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx,
646 bool base_pointers_restrict = false)
648 tree field, type, sfield = NULL_TREE;
649 splay_tree_key key = (splay_tree_key) var;
651 if ((mask & 8) != 0)
653 key = (splay_tree_key) &DECL_UID (var);
654 gcc_checking_assert (key != (splay_tree_key) var);
656 gcc_assert ((mask & 1) == 0
657 || !splay_tree_lookup (ctx->field_map, key));
658 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
659 || !splay_tree_lookup (ctx->sfield_map, key));
660 gcc_assert ((mask & 3) == 3
661 || !is_gimple_omp_oacc (ctx->stmt));
663 type = TREE_TYPE (var);
664 /* Prevent redeclaring the var in the split-off function with a restrict
665 pointer type. Note that we only clear type itself, restrict qualifiers in
666 the pointed-to type will be ignored by points-to analysis. */
667 if (POINTER_TYPE_P (type)
668 && TYPE_RESTRICT (type))
669 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
671 if (mask & 4)
673 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
674 type = build_pointer_type (build_pointer_type (type));
676 else if (by_ref)
678 type = build_pointer_type (type);
679 if (base_pointers_restrict)
680 type = build_qualified_type (type, TYPE_QUAL_RESTRICT);
682 else if ((mask & 3) == 1 && omp_is_reference (var))
683 type = TREE_TYPE (type);
685 field = build_decl (DECL_SOURCE_LOCATION (var),
686 FIELD_DECL, DECL_NAME (var), type);
688 /* Remember what variable this field was created for. This does have a
689 side effect of making dwarf2out ignore this member, so for helpful
690 debugging we clear it later in delete_omp_context. */
691 DECL_ABSTRACT_ORIGIN (field) = var;
692 if (type == TREE_TYPE (var))
694 SET_DECL_ALIGN (field, DECL_ALIGN (var));
695 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
696 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
698 else
699 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
701 if ((mask & 3) == 3)
703 insert_field_into_struct (ctx->record_type, field);
704 if (ctx->srecord_type)
706 sfield = build_decl (DECL_SOURCE_LOCATION (var),
707 FIELD_DECL, DECL_NAME (var), type);
708 DECL_ABSTRACT_ORIGIN (sfield) = var;
709 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
710 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
711 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
712 insert_field_into_struct (ctx->srecord_type, sfield);
715 else
717 if (ctx->srecord_type == NULL_TREE)
719 tree t;
721 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
722 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
723 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
725 sfield = build_decl (DECL_SOURCE_LOCATION (t),
726 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
727 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
728 insert_field_into_struct (ctx->srecord_type, sfield);
729 splay_tree_insert (ctx->sfield_map,
730 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
731 (splay_tree_value) sfield);
734 sfield = field;
735 insert_field_into_struct ((mask & 1) ? ctx->record_type
736 : ctx->srecord_type, field);
739 if (mask & 1)
740 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
741 if ((mask & 2) && ctx->sfield_map)
742 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
745 static tree
746 install_var_local (tree var, omp_context *ctx)
748 tree new_var = omp_copy_decl_1 (var, ctx);
749 insert_decl_map (&ctx->cb, var, new_var);
750 return new_var;
753 /* Adjust the replacement for DECL in CTX for the new context. This means
754 copying the DECL_VALUE_EXPR, and fixing up the type. */
756 static void
757 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
759 tree new_decl, size;
761 new_decl = lookup_decl (decl, ctx);
763 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
765 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
766 && DECL_HAS_VALUE_EXPR_P (decl))
768 tree ve = DECL_VALUE_EXPR (decl);
769 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
770 SET_DECL_VALUE_EXPR (new_decl, ve);
771 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
774 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
776 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
777 if (size == error_mark_node)
778 size = TYPE_SIZE (TREE_TYPE (new_decl));
779 DECL_SIZE (new_decl) = size;
781 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
782 if (size == error_mark_node)
783 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
784 DECL_SIZE_UNIT (new_decl) = size;
788 /* The callback for remap_decl. Search all containing contexts for a
789 mapping of the variable; this avoids having to duplicate the splay
790 tree ahead of time. We know a mapping doesn't already exist in the
791 given context. Create new mappings to implement default semantics. */
793 static tree
794 omp_copy_decl (tree var, copy_body_data *cb)
796 omp_context *ctx = (omp_context *) cb;
797 tree new_var;
799 if (TREE_CODE (var) == LABEL_DECL)
801 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
802 return var;
803 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
804 DECL_CONTEXT (new_var) = current_function_decl;
805 insert_decl_map (&ctx->cb, var, new_var);
806 return new_var;
809 while (!is_taskreg_ctx (ctx))
811 ctx = ctx->outer;
812 if (ctx == NULL)
813 return var;
814 new_var = maybe_lookup_decl (var, ctx);
815 if (new_var)
816 return new_var;
819 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
820 return var;
822 return error_mark_node;
825 /* Create a new context, with OUTER_CTX being the surrounding context. */
827 static omp_context *
828 new_omp_context (gimple *stmt, omp_context *outer_ctx)
830 omp_context *ctx = XCNEW (omp_context);
832 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
833 (splay_tree_value) ctx);
834 ctx->stmt = stmt;
836 if (outer_ctx)
838 ctx->outer = outer_ctx;
839 ctx->cb = outer_ctx->cb;
840 ctx->cb.block = NULL;
841 ctx->depth = outer_ctx->depth + 1;
843 else
845 ctx->cb.src_fn = current_function_decl;
846 ctx->cb.dst_fn = current_function_decl;
847 ctx->cb.src_node = cgraph_node::get (current_function_decl);
848 gcc_checking_assert (ctx->cb.src_node);
849 ctx->cb.dst_node = ctx->cb.src_node;
850 ctx->cb.src_cfun = cfun;
851 ctx->cb.copy_decl = omp_copy_decl;
852 ctx->cb.eh_lp_nr = 0;
853 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
854 ctx->depth = 1;
857 ctx->cb.decl_map = new hash_map<tree, tree>;
859 return ctx;
862 static gimple_seq maybe_catch_exception (gimple_seq);
864 /* Finalize task copyfn. */
866 static void
867 finalize_task_copyfn (gomp_task *task_stmt)
869 struct function *child_cfun;
870 tree child_fn;
871 gimple_seq seq = NULL, new_seq;
872 gbind *bind;
874 child_fn = gimple_omp_task_copy_fn (task_stmt);
875 if (child_fn == NULL_TREE)
876 return;
878 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
879 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
881 push_cfun (child_cfun);
882 bind = gimplify_body (child_fn, false);
883 gimple_seq_add_stmt (&seq, bind);
884 new_seq = maybe_catch_exception (seq);
885 if (new_seq != seq)
887 bind = gimple_build_bind (NULL, new_seq, NULL);
888 seq = NULL;
889 gimple_seq_add_stmt (&seq, bind);
891 gimple_set_body (child_fn, seq);
892 pop_cfun ();
894 /* Inform the callgraph about the new function. */
895 cgraph_node *node = cgraph_node::get_create (child_fn);
896 node->parallelized_function = 1;
897 cgraph_node::add_new_function (child_fn, false);
900 /* Destroy a omp_context data structures. Called through the splay tree
901 value delete callback. */
903 static void
904 delete_omp_context (splay_tree_value value)
906 omp_context *ctx = (omp_context *) value;
908 delete ctx->cb.decl_map;
910 if (ctx->field_map)
911 splay_tree_delete (ctx->field_map);
912 if (ctx->sfield_map)
913 splay_tree_delete (ctx->sfield_map);
915 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
916 it produces corrupt debug information. */
917 if (ctx->record_type)
919 tree t;
920 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
921 DECL_ABSTRACT_ORIGIN (t) = NULL;
923 if (ctx->srecord_type)
925 tree t;
926 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
927 DECL_ABSTRACT_ORIGIN (t) = NULL;
930 if (is_task_ctx (ctx))
931 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
933 XDELETE (ctx);
936 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
937 context. */
939 static void
940 fixup_child_record_type (omp_context *ctx)
942 tree f, type = ctx->record_type;
944 if (!ctx->receiver_decl)
945 return;
946 /* ??? It isn't sufficient to just call remap_type here, because
947 variably_modified_type_p doesn't work the way we expect for
948 record types. Testing each field for whether it needs remapping
949 and creating a new record by hand works, however. */
950 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
951 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
952 break;
953 if (f)
955 tree name, new_fields = NULL;
957 type = lang_hooks.types.make_type (RECORD_TYPE);
958 name = DECL_NAME (TYPE_NAME (ctx->record_type));
959 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
960 TYPE_DECL, name, type);
961 TYPE_NAME (type) = name;
963 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
965 tree new_f = copy_node (f);
966 DECL_CONTEXT (new_f) = type;
967 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
968 DECL_CHAIN (new_f) = new_fields;
969 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
970 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
971 &ctx->cb, NULL);
972 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
973 &ctx->cb, NULL);
974 new_fields = new_f;
976 /* Arrange to be able to look up the receiver field
977 given the sender field. */
978 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
979 (splay_tree_value) new_f);
981 TYPE_FIELDS (type) = nreverse (new_fields);
982 layout_type (type);
985 /* In a target region we never modify any of the pointers in *.omp_data_i,
986 so attempt to help the optimizers. */
987 if (is_gimple_omp_offloaded (ctx->stmt))
988 type = build_qualified_type (type, TYPE_QUAL_CONST);
990 TREE_TYPE (ctx->receiver_decl)
991 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
994 /* Instantiate decls as necessary in CTX to satisfy the data sharing
995 specified by CLAUSES. If BASE_POINTERS_RESTRICT, install var field with
996 restrict. */
998 static void
999 scan_sharing_clauses (tree clauses, omp_context *ctx,
1000 bool base_pointers_restrict = false)
1002 tree c, decl;
1003 bool scan_array_reductions = false;
1005 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1007 bool by_ref;
1009 switch (OMP_CLAUSE_CODE (c))
1011 case OMP_CLAUSE_PRIVATE:
1012 decl = OMP_CLAUSE_DECL (c);
1013 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1014 goto do_private;
1015 else if (!is_variable_sized (decl))
1016 install_var_local (decl, ctx);
1017 break;
1019 case OMP_CLAUSE_SHARED:
1020 decl = OMP_CLAUSE_DECL (c);
1021 /* Ignore shared directives in teams construct. */
1022 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1024 /* Global variables don't need to be copied,
1025 the receiver side will use them directly. */
1026 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1027 if (is_global_var (odecl))
1028 break;
1029 insert_decl_map (&ctx->cb, decl, odecl);
1030 break;
1032 gcc_assert (is_taskreg_ctx (ctx));
1033 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1034 || !is_variable_sized (decl));
1035 /* Global variables don't need to be copied,
1036 the receiver side will use them directly. */
1037 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1038 break;
1039 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1041 use_pointer_for_field (decl, ctx);
1042 break;
1044 by_ref = use_pointer_for_field (decl, NULL);
1045 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1046 || TREE_ADDRESSABLE (decl)
1047 || by_ref
1048 || omp_is_reference (decl))
1050 by_ref = use_pointer_for_field (decl, ctx);
1051 install_var_field (decl, by_ref, 3, ctx);
1052 install_var_local (decl, ctx);
1053 break;
1055 /* We don't need to copy const scalar vars back. */
1056 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1057 goto do_private;
1059 case OMP_CLAUSE_REDUCTION:
1060 decl = OMP_CLAUSE_DECL (c);
1061 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1062 && TREE_CODE (decl) == MEM_REF)
1064 tree t = TREE_OPERAND (decl, 0);
1065 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1066 t = TREE_OPERAND (t, 0);
1067 if (TREE_CODE (t) == INDIRECT_REF
1068 || TREE_CODE (t) == ADDR_EXPR)
1069 t = TREE_OPERAND (t, 0);
1070 install_var_local (t, ctx);
1071 if (is_taskreg_ctx (ctx)
1072 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1073 && !is_variable_sized (t))
1075 by_ref = use_pointer_for_field (t, ctx);
1076 install_var_field (t, by_ref, 3, ctx);
1078 break;
1080 goto do_private;
1082 case OMP_CLAUSE_LASTPRIVATE:
1083 /* Let the corresponding firstprivate clause create
1084 the variable. */
1085 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1086 break;
1087 /* FALLTHRU */
1089 case OMP_CLAUSE_FIRSTPRIVATE:
1090 case OMP_CLAUSE_LINEAR:
1091 decl = OMP_CLAUSE_DECL (c);
1092 do_private:
1093 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1094 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1095 && is_gimple_omp_offloaded (ctx->stmt))
1097 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1098 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1099 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1100 install_var_field (decl, true, 3, ctx);
1101 else
1102 install_var_field (decl, false, 3, ctx);
1104 if (is_variable_sized (decl))
1106 if (is_task_ctx (ctx))
1107 install_var_field (decl, false, 1, ctx);
1108 break;
1110 else if (is_taskreg_ctx (ctx))
1112 bool global
1113 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1114 by_ref = use_pointer_for_field (decl, NULL);
1116 if (is_task_ctx (ctx)
1117 && (global || by_ref || omp_is_reference (decl)))
1119 install_var_field (decl, false, 1, ctx);
1120 if (!global)
1121 install_var_field (decl, by_ref, 2, ctx);
1123 else if (!global)
1124 install_var_field (decl, by_ref, 3, ctx);
1126 install_var_local (decl, ctx);
1127 break;
1129 case OMP_CLAUSE_USE_DEVICE_PTR:
1130 decl = OMP_CLAUSE_DECL (c);
1131 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1132 install_var_field (decl, true, 3, ctx);
1133 else
1134 install_var_field (decl, false, 3, ctx);
1135 if (DECL_SIZE (decl)
1136 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1138 tree decl2 = DECL_VALUE_EXPR (decl);
1139 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1140 decl2 = TREE_OPERAND (decl2, 0);
1141 gcc_assert (DECL_P (decl2));
1142 install_var_local (decl2, ctx);
1144 install_var_local (decl, ctx);
1145 break;
1147 case OMP_CLAUSE_IS_DEVICE_PTR:
1148 decl = OMP_CLAUSE_DECL (c);
1149 goto do_private;
1151 case OMP_CLAUSE__LOOPTEMP_:
1152 gcc_assert (is_taskreg_ctx (ctx));
1153 decl = OMP_CLAUSE_DECL (c);
1154 install_var_field (decl, false, 3, ctx);
1155 install_var_local (decl, ctx);
1156 break;
1158 case OMP_CLAUSE_COPYPRIVATE:
1159 case OMP_CLAUSE_COPYIN:
1160 decl = OMP_CLAUSE_DECL (c);
1161 by_ref = use_pointer_for_field (decl, NULL);
1162 install_var_field (decl, by_ref, 3, ctx);
1163 break;
1165 case OMP_CLAUSE_FINAL:
1166 case OMP_CLAUSE_IF:
1167 case OMP_CLAUSE_NUM_THREADS:
1168 case OMP_CLAUSE_NUM_TEAMS:
1169 case OMP_CLAUSE_THREAD_LIMIT:
1170 case OMP_CLAUSE_DEVICE:
1171 case OMP_CLAUSE_SCHEDULE:
1172 case OMP_CLAUSE_DIST_SCHEDULE:
1173 case OMP_CLAUSE_DEPEND:
1174 case OMP_CLAUSE_PRIORITY:
1175 case OMP_CLAUSE_GRAINSIZE:
1176 case OMP_CLAUSE_NUM_TASKS:
1177 case OMP_CLAUSE__CILK_FOR_COUNT_:
1178 case OMP_CLAUSE_NUM_GANGS:
1179 case OMP_CLAUSE_NUM_WORKERS:
1180 case OMP_CLAUSE_VECTOR_LENGTH:
1181 if (ctx->outer)
1182 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1183 break;
1185 case OMP_CLAUSE_TO:
1186 case OMP_CLAUSE_FROM:
1187 case OMP_CLAUSE_MAP:
1188 if (ctx->outer)
1189 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1190 decl = OMP_CLAUSE_DECL (c);
1191 /* Global variables with "omp declare target" attribute
1192 don't need to be copied, the receiver side will use them
1193 directly. However, global variables with "omp declare target link"
1194 attribute need to be copied. */
1195 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1196 && DECL_P (decl)
1197 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1198 && (OMP_CLAUSE_MAP_KIND (c)
1199 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1200 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1201 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1202 && varpool_node::get_create (decl)->offloadable
1203 && !lookup_attribute ("omp declare target link",
1204 DECL_ATTRIBUTES (decl)))
1205 break;
1206 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1207 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1209 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1210 not offloaded; there is nothing to map for those. */
1211 if (!is_gimple_omp_offloaded (ctx->stmt)
1212 && !POINTER_TYPE_P (TREE_TYPE (decl))
1213 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1214 break;
1216 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1217 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1218 || (OMP_CLAUSE_MAP_KIND (c)
1219 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1221 if (TREE_CODE (decl) == COMPONENT_REF
1222 || (TREE_CODE (decl) == INDIRECT_REF
1223 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1224 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1225 == REFERENCE_TYPE)))
1226 break;
1227 if (DECL_SIZE (decl)
1228 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1230 tree decl2 = DECL_VALUE_EXPR (decl);
1231 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1232 decl2 = TREE_OPERAND (decl2, 0);
1233 gcc_assert (DECL_P (decl2));
1234 install_var_local (decl2, ctx);
1236 install_var_local (decl, ctx);
1237 break;
1239 if (DECL_P (decl))
1241 if (DECL_SIZE (decl)
1242 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1244 tree decl2 = DECL_VALUE_EXPR (decl);
1245 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1246 decl2 = TREE_OPERAND (decl2, 0);
1247 gcc_assert (DECL_P (decl2));
1248 install_var_field (decl2, true, 3, ctx);
1249 install_var_local (decl2, ctx);
1250 install_var_local (decl, ctx);
1252 else
1254 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1255 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1256 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1257 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1258 install_var_field (decl, true, 7, ctx);
1259 else
1260 install_var_field (decl, true, 3, ctx,
1261 base_pointers_restrict);
1262 if (is_gimple_omp_offloaded (ctx->stmt)
1263 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1264 install_var_local (decl, ctx);
1267 else
1269 tree base = get_base_address (decl);
1270 tree nc = OMP_CLAUSE_CHAIN (c);
1271 if (DECL_P (base)
1272 && nc != NULL_TREE
1273 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1274 && OMP_CLAUSE_DECL (nc) == base
1275 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1276 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1278 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1279 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1281 else
1283 if (ctx->outer)
1285 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1286 decl = OMP_CLAUSE_DECL (c);
1288 gcc_assert (!splay_tree_lookup (ctx->field_map,
1289 (splay_tree_key) decl));
1290 tree field
1291 = build_decl (OMP_CLAUSE_LOCATION (c),
1292 FIELD_DECL, NULL_TREE, ptr_type_node);
1293 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1294 insert_field_into_struct (ctx->record_type, field);
1295 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1296 (splay_tree_value) field);
1299 break;
1301 case OMP_CLAUSE__GRIDDIM_:
1302 if (ctx->outer)
1304 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1305 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1307 break;
1309 case OMP_CLAUSE_NOWAIT:
1310 case OMP_CLAUSE_ORDERED:
1311 case OMP_CLAUSE_COLLAPSE:
1312 case OMP_CLAUSE_UNTIED:
1313 case OMP_CLAUSE_MERGEABLE:
1314 case OMP_CLAUSE_PROC_BIND:
1315 case OMP_CLAUSE_SAFELEN:
1316 case OMP_CLAUSE_SIMDLEN:
1317 case OMP_CLAUSE_THREADS:
1318 case OMP_CLAUSE_SIMD:
1319 case OMP_CLAUSE_NOGROUP:
1320 case OMP_CLAUSE_DEFAULTMAP:
1321 case OMP_CLAUSE_ASYNC:
1322 case OMP_CLAUSE_WAIT:
1323 case OMP_CLAUSE_GANG:
1324 case OMP_CLAUSE_WORKER:
1325 case OMP_CLAUSE_VECTOR:
1326 case OMP_CLAUSE_INDEPENDENT:
1327 case OMP_CLAUSE_AUTO:
1328 case OMP_CLAUSE_SEQ:
1329 case OMP_CLAUSE_TILE:
1330 case OMP_CLAUSE__SIMT_:
1331 case OMP_CLAUSE_DEFAULT:
1332 break;
1334 case OMP_CLAUSE_ALIGNED:
1335 decl = OMP_CLAUSE_DECL (c);
1336 if (is_global_var (decl)
1337 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1338 install_var_local (decl, ctx);
1339 break;
1341 case OMP_CLAUSE__CACHE_:
1342 default:
1343 gcc_unreachable ();
1347 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1349 switch (OMP_CLAUSE_CODE (c))
1351 case OMP_CLAUSE_LASTPRIVATE:
1352 /* Let the corresponding firstprivate clause create
1353 the variable. */
1354 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1355 scan_array_reductions = true;
1356 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1357 break;
1358 /* FALLTHRU */
1360 case OMP_CLAUSE_FIRSTPRIVATE:
1361 case OMP_CLAUSE_PRIVATE:
1362 case OMP_CLAUSE_LINEAR:
1363 case OMP_CLAUSE_IS_DEVICE_PTR:
1364 decl = OMP_CLAUSE_DECL (c);
1365 if (is_variable_sized (decl))
1367 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1368 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1369 && is_gimple_omp_offloaded (ctx->stmt))
1371 tree decl2 = DECL_VALUE_EXPR (decl);
1372 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1373 decl2 = TREE_OPERAND (decl2, 0);
1374 gcc_assert (DECL_P (decl2));
1375 install_var_local (decl2, ctx);
1376 fixup_remapped_decl (decl2, ctx, false);
1378 install_var_local (decl, ctx);
1380 fixup_remapped_decl (decl, ctx,
1381 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1382 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1383 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1384 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1385 scan_array_reductions = true;
1386 break;
1388 case OMP_CLAUSE_REDUCTION:
1389 decl = OMP_CLAUSE_DECL (c);
1390 if (TREE_CODE (decl) != MEM_REF)
1392 if (is_variable_sized (decl))
1393 install_var_local (decl, ctx);
1394 fixup_remapped_decl (decl, ctx, false);
1396 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1397 scan_array_reductions = true;
1398 break;
1400 case OMP_CLAUSE_SHARED:
1401 /* Ignore shared directives in teams construct. */
1402 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1403 break;
1404 decl = OMP_CLAUSE_DECL (c);
1405 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1406 break;
1407 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1409 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1410 ctx->outer)))
1411 break;
1412 bool by_ref = use_pointer_for_field (decl, ctx);
1413 install_var_field (decl, by_ref, 11, ctx);
1414 break;
1416 fixup_remapped_decl (decl, ctx, false);
1417 break;
1419 case OMP_CLAUSE_MAP:
1420 if (!is_gimple_omp_offloaded (ctx->stmt))
1421 break;
1422 decl = OMP_CLAUSE_DECL (c);
1423 if (DECL_P (decl)
1424 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1425 && (OMP_CLAUSE_MAP_KIND (c)
1426 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1427 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1428 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1429 && varpool_node::get_create (decl)->offloadable)
1430 break;
1431 if (DECL_P (decl))
1433 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1434 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1435 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1436 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1438 tree new_decl = lookup_decl (decl, ctx);
1439 TREE_TYPE (new_decl)
1440 = remap_type (TREE_TYPE (decl), &ctx->cb);
1442 else if (DECL_SIZE (decl)
1443 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1445 tree decl2 = DECL_VALUE_EXPR (decl);
1446 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1447 decl2 = TREE_OPERAND (decl2, 0);
1448 gcc_assert (DECL_P (decl2));
1449 fixup_remapped_decl (decl2, ctx, false);
1450 fixup_remapped_decl (decl, ctx, true);
1452 else
1453 fixup_remapped_decl (decl, ctx, false);
1455 break;
1457 case OMP_CLAUSE_COPYPRIVATE:
1458 case OMP_CLAUSE_COPYIN:
1459 case OMP_CLAUSE_DEFAULT:
1460 case OMP_CLAUSE_IF:
1461 case OMP_CLAUSE_NUM_THREADS:
1462 case OMP_CLAUSE_NUM_TEAMS:
1463 case OMP_CLAUSE_THREAD_LIMIT:
1464 case OMP_CLAUSE_DEVICE:
1465 case OMP_CLAUSE_SCHEDULE:
1466 case OMP_CLAUSE_DIST_SCHEDULE:
1467 case OMP_CLAUSE_NOWAIT:
1468 case OMP_CLAUSE_ORDERED:
1469 case OMP_CLAUSE_COLLAPSE:
1470 case OMP_CLAUSE_UNTIED:
1471 case OMP_CLAUSE_FINAL:
1472 case OMP_CLAUSE_MERGEABLE:
1473 case OMP_CLAUSE_PROC_BIND:
1474 case OMP_CLAUSE_SAFELEN:
1475 case OMP_CLAUSE_SIMDLEN:
1476 case OMP_CLAUSE_ALIGNED:
1477 case OMP_CLAUSE_DEPEND:
1478 case OMP_CLAUSE__LOOPTEMP_:
1479 case OMP_CLAUSE_TO:
1480 case OMP_CLAUSE_FROM:
1481 case OMP_CLAUSE_PRIORITY:
1482 case OMP_CLAUSE_GRAINSIZE:
1483 case OMP_CLAUSE_NUM_TASKS:
1484 case OMP_CLAUSE_THREADS:
1485 case OMP_CLAUSE_SIMD:
1486 case OMP_CLAUSE_NOGROUP:
1487 case OMP_CLAUSE_DEFAULTMAP:
1488 case OMP_CLAUSE_USE_DEVICE_PTR:
1489 case OMP_CLAUSE__CILK_FOR_COUNT_:
1490 case OMP_CLAUSE_ASYNC:
1491 case OMP_CLAUSE_WAIT:
1492 case OMP_CLAUSE_NUM_GANGS:
1493 case OMP_CLAUSE_NUM_WORKERS:
1494 case OMP_CLAUSE_VECTOR_LENGTH:
1495 case OMP_CLAUSE_GANG:
1496 case OMP_CLAUSE_WORKER:
1497 case OMP_CLAUSE_VECTOR:
1498 case OMP_CLAUSE_INDEPENDENT:
1499 case OMP_CLAUSE_AUTO:
1500 case OMP_CLAUSE_SEQ:
1501 case OMP_CLAUSE_TILE:
1502 case OMP_CLAUSE__GRIDDIM_:
1503 case OMP_CLAUSE__SIMT_:
1504 break;
1506 case OMP_CLAUSE__CACHE_:
1507 default:
1508 gcc_unreachable ();
1512 gcc_checking_assert (!scan_array_reductions
1513 || !is_gimple_omp_oacc (ctx->stmt));
1514 if (scan_array_reductions)
1516 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1517 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1518 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1520 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1521 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1523 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1524 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1525 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1526 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1527 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1528 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1532 /* Create a new name for omp child function. Returns an identifier. If
1533 IS_CILK_FOR is true then the suffix for the child function is
1534 "_cilk_for_fn." */
1536 static tree
1537 create_omp_child_function_name (bool task_copy, bool is_cilk_for)
1539 if (is_cilk_for)
1540 return clone_function_name (current_function_decl, "_cilk_for_fn");
1541 return clone_function_name (current_function_decl,
1542 task_copy ? "_omp_cpyfn" : "_omp_fn");
1545 /* Returns the type of the induction variable for the child function for
1546 _Cilk_for and the types for _high and _low variables based on TYPE. */
1548 static tree
1549 cilk_for_check_loop_diff_type (tree type)
1551 if (TYPE_PRECISION (type) <= TYPE_PRECISION (uint32_type_node))
1553 if (TYPE_UNSIGNED (type))
1554 return uint32_type_node;
1555 else
1556 return integer_type_node;
1558 else
1560 if (TYPE_UNSIGNED (type))
1561 return uint64_type_node;
1562 else
1563 return long_long_integer_type_node;
1567 /* Return true if CTX may belong to offloaded code: either if current function
1568 is offloaded, or any enclosing context corresponds to a target region. */
1570 static bool
1571 omp_maybe_offloaded_ctx (omp_context *ctx)
1573 if (cgraph_node::get (current_function_decl)->offloadable)
1574 return true;
1575 for (; ctx; ctx = ctx->outer)
1576 if (is_gimple_omp_offloaded (ctx->stmt))
1577 return true;
1578 return false;
1581 /* Build a decl for the omp child function. It'll not contain a body
1582 yet, just the bare decl. */
1584 static void
1585 create_omp_child_function (omp_context *ctx, bool task_copy)
1587 tree decl, type, name, t;
1589 tree cilk_for_count
1590 = (flag_cilkplus && gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1591 ? omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
1592 OMP_CLAUSE__CILK_FOR_COUNT_) : NULL_TREE;
1593 tree cilk_var_type = NULL_TREE;
1595 name = create_omp_child_function_name (task_copy,
1596 cilk_for_count != NULL_TREE);
1597 if (task_copy)
1598 type = build_function_type_list (void_type_node, ptr_type_node,
1599 ptr_type_node, NULL_TREE);
1600 else if (cilk_for_count)
1602 type = TREE_TYPE (OMP_CLAUSE_OPERAND (cilk_for_count, 0));
1603 cilk_var_type = cilk_for_check_loop_diff_type (type);
1604 type = build_function_type_list (void_type_node, ptr_type_node,
1605 cilk_var_type, cilk_var_type, NULL_TREE);
1607 else
1608 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1610 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1612 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1613 || !task_copy);
1614 if (!task_copy)
1615 ctx->cb.dst_fn = decl;
1616 else
1617 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1619 TREE_STATIC (decl) = 1;
1620 TREE_USED (decl) = 1;
1621 DECL_ARTIFICIAL (decl) = 1;
1622 DECL_IGNORED_P (decl) = 0;
1623 TREE_PUBLIC (decl) = 0;
1624 DECL_UNINLINABLE (decl) = 1;
1625 DECL_EXTERNAL (decl) = 0;
1626 DECL_CONTEXT (decl) = NULL_TREE;
1627 DECL_INITIAL (decl) = make_node (BLOCK);
1628 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1629 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1630 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1631 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1632 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1633 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1634 DECL_FUNCTION_VERSIONED (decl)
1635 = DECL_FUNCTION_VERSIONED (current_function_decl);
1637 if (omp_maybe_offloaded_ctx (ctx))
1639 cgraph_node::get_create (decl)->offloadable = 1;
1640 if (ENABLE_OFFLOADING)
1641 g->have_offload = true;
1644 if (cgraph_node::get_create (decl)->offloadable
1645 && !lookup_attribute ("omp declare target",
1646 DECL_ATTRIBUTES (current_function_decl)))
1648 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1649 ? "omp target entrypoint"
1650 : "omp declare target");
1651 DECL_ATTRIBUTES (decl)
1652 = tree_cons (get_identifier (target_attr),
1653 NULL_TREE, DECL_ATTRIBUTES (decl));
1656 t = build_decl (DECL_SOURCE_LOCATION (decl),
1657 RESULT_DECL, NULL_TREE, void_type_node);
1658 DECL_ARTIFICIAL (t) = 1;
1659 DECL_IGNORED_P (t) = 1;
1660 DECL_CONTEXT (t) = decl;
1661 DECL_RESULT (decl) = t;
1663 /* _Cilk_for's child function requires two extra parameters called
1664 __low and __high that are set the by Cilk runtime when it calls this
1665 function. */
1666 if (cilk_for_count)
1668 t = build_decl (DECL_SOURCE_LOCATION (decl),
1669 PARM_DECL, get_identifier ("__high"), cilk_var_type);
1670 DECL_ARTIFICIAL (t) = 1;
1671 DECL_NAMELESS (t) = 1;
1672 DECL_ARG_TYPE (t) = ptr_type_node;
1673 DECL_CONTEXT (t) = current_function_decl;
1674 TREE_USED (t) = 1;
1675 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1676 DECL_ARGUMENTS (decl) = t;
1678 t = build_decl (DECL_SOURCE_LOCATION (decl),
1679 PARM_DECL, get_identifier ("__low"), cilk_var_type);
1680 DECL_ARTIFICIAL (t) = 1;
1681 DECL_NAMELESS (t) = 1;
1682 DECL_ARG_TYPE (t) = ptr_type_node;
1683 DECL_CONTEXT (t) = current_function_decl;
1684 TREE_USED (t) = 1;
1685 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1686 DECL_ARGUMENTS (decl) = t;
1689 tree data_name = get_identifier (".omp_data_i");
1690 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1691 ptr_type_node);
1692 DECL_ARTIFICIAL (t) = 1;
1693 DECL_NAMELESS (t) = 1;
1694 DECL_ARG_TYPE (t) = ptr_type_node;
1695 DECL_CONTEXT (t) = current_function_decl;
1696 TREE_USED (t) = 1;
1697 TREE_READONLY (t) = 1;
1698 if (cilk_for_count)
1699 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1700 DECL_ARGUMENTS (decl) = t;
1701 if (!task_copy)
1702 ctx->receiver_decl = t;
1703 else
1705 t = build_decl (DECL_SOURCE_LOCATION (decl),
1706 PARM_DECL, get_identifier (".omp_data_o"),
1707 ptr_type_node);
1708 DECL_ARTIFICIAL (t) = 1;
1709 DECL_NAMELESS (t) = 1;
1710 DECL_ARG_TYPE (t) = ptr_type_node;
1711 DECL_CONTEXT (t) = current_function_decl;
1712 TREE_USED (t) = 1;
1713 TREE_ADDRESSABLE (t) = 1;
1714 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1715 DECL_ARGUMENTS (decl) = t;
1718 /* Allocate memory for the function structure. The call to
1719 allocate_struct_function clobbers CFUN, so we need to restore
1720 it afterward. */
1721 push_struct_function (decl);
1722 cfun->function_end_locus = gimple_location (ctx->stmt);
1723 init_tree_ssa (cfun);
1724 pop_cfun ();
1727 /* Callback for walk_gimple_seq. Check if combined parallel
1728 contains gimple_omp_for_combined_into_p OMP_FOR. */
1730 tree
1731 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1732 bool *handled_ops_p,
1733 struct walk_stmt_info *wi)
1735 gimple *stmt = gsi_stmt (*gsi_p);
1737 *handled_ops_p = true;
1738 switch (gimple_code (stmt))
1740 WALK_SUBSTMTS;
1742 case GIMPLE_OMP_FOR:
1743 if (gimple_omp_for_combined_into_p (stmt)
1744 && gimple_omp_for_kind (stmt)
1745 == *(const enum gf_mask *) (wi->info))
1747 wi->info = stmt;
1748 return integer_zero_node;
1750 break;
1751 default:
1752 break;
1754 return NULL;
1757 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1759 static void
1760 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1761 omp_context *outer_ctx)
1763 struct walk_stmt_info wi;
1765 memset (&wi, 0, sizeof (wi));
1766 wi.val_only = true;
1767 wi.info = (void *) &msk;
1768 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1769 if (wi.info != (void *) &msk)
1771 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1772 struct omp_for_data fd;
1773 omp_extract_for_data (for_stmt, &fd, NULL);
1774 /* We need two temporaries with fd.loop.v type (istart/iend)
1775 and then (fd.collapse - 1) temporaries with the same
1776 type for count2 ... countN-1 vars if not constant. */
1777 size_t count = 2, i;
1778 tree type = fd.iter_type;
1779 if (fd.collapse > 1
1780 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1782 count += fd.collapse - 1;
1783 /* If there are lastprivate clauses on the inner
1784 GIMPLE_OMP_FOR, add one more temporaries for the total number
1785 of iterations (product of count1 ... countN-1). */
1786 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1787 OMP_CLAUSE_LASTPRIVATE))
1788 count++;
1789 else if (msk == GF_OMP_FOR_KIND_FOR
1790 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1791 OMP_CLAUSE_LASTPRIVATE))
1792 count++;
1794 for (i = 0; i < count; i++)
1796 tree temp = create_tmp_var (type);
1797 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1798 insert_decl_map (&outer_ctx->cb, temp, temp);
1799 OMP_CLAUSE_DECL (c) = temp;
1800 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1801 gimple_omp_taskreg_set_clauses (stmt, c);
1806 /* Scan an OpenMP parallel directive. */
1808 static void
1809 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1811 omp_context *ctx;
1812 tree name;
1813 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1815 /* Ignore parallel directives with empty bodies, unless there
1816 are copyin clauses. */
1817 if (optimize > 0
1818 && empty_body_p (gimple_omp_body (stmt))
1819 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1820 OMP_CLAUSE_COPYIN) == NULL)
1822 gsi_replace (gsi, gimple_build_nop (), false);
1823 return;
1826 if (gimple_omp_parallel_combined_p (stmt))
1827 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1829 ctx = new_omp_context (stmt, outer_ctx);
1830 taskreg_contexts.safe_push (ctx);
1831 if (taskreg_nesting_level > 1)
1832 ctx->is_nested = true;
1833 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1834 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1835 name = create_tmp_var_name (".omp_data_s");
1836 name = build_decl (gimple_location (stmt),
1837 TYPE_DECL, name, ctx->record_type);
1838 DECL_ARTIFICIAL (name) = 1;
1839 DECL_NAMELESS (name) = 1;
1840 TYPE_NAME (ctx->record_type) = name;
1841 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1842 if (!gimple_omp_parallel_grid_phony (stmt))
1844 create_omp_child_function (ctx, false);
1845 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1848 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1849 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1851 if (TYPE_FIELDS (ctx->record_type) == NULL)
1852 ctx->record_type = ctx->receiver_decl = NULL;
1855 /* Scan an OpenMP task directive. */
1857 static void
1858 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1860 omp_context *ctx;
1861 tree name, t;
1862 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1864 /* Ignore task directives with empty bodies, unless they have depend
1865 clause. */
1866 if (optimize > 0
1867 && empty_body_p (gimple_omp_body (stmt))
1868 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1870 gsi_replace (gsi, gimple_build_nop (), false);
1871 return;
1874 if (gimple_omp_task_taskloop_p (stmt))
1875 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1877 ctx = new_omp_context (stmt, outer_ctx);
1878 taskreg_contexts.safe_push (ctx);
1879 if (taskreg_nesting_level > 1)
1880 ctx->is_nested = true;
1881 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1882 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1883 name = create_tmp_var_name (".omp_data_s");
1884 name = build_decl (gimple_location (stmt),
1885 TYPE_DECL, name, ctx->record_type);
1886 DECL_ARTIFICIAL (name) = 1;
1887 DECL_NAMELESS (name) = 1;
1888 TYPE_NAME (ctx->record_type) = name;
1889 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1890 create_omp_child_function (ctx, false);
1891 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1893 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1895 if (ctx->srecord_type)
1897 name = create_tmp_var_name (".omp_data_a");
1898 name = build_decl (gimple_location (stmt),
1899 TYPE_DECL, name, ctx->srecord_type);
1900 DECL_ARTIFICIAL (name) = 1;
1901 DECL_NAMELESS (name) = 1;
1902 TYPE_NAME (ctx->srecord_type) = name;
1903 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1904 create_omp_child_function (ctx, true);
1907 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1909 if (TYPE_FIELDS (ctx->record_type) == NULL)
1911 ctx->record_type = ctx->receiver_decl = NULL;
1912 t = build_int_cst (long_integer_type_node, 0);
1913 gimple_omp_task_set_arg_size (stmt, t);
1914 t = build_int_cst (long_integer_type_node, 1);
1915 gimple_omp_task_set_arg_align (stmt, t);
1919 /* Helper function for finish_taskreg_scan, called through walk_tree.
1920 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1921 tree, replace it in the expression. */
1923 static tree
1924 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1926 if (VAR_P (*tp))
1928 omp_context *ctx = (omp_context *) data;
1929 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1930 if (t != *tp)
1932 if (DECL_HAS_VALUE_EXPR_P (t))
1933 t = unshare_expr (DECL_VALUE_EXPR (t));
1934 *tp = t;
1936 *walk_subtrees = 0;
1938 else if (IS_TYPE_OR_DECL_P (*tp))
1939 *walk_subtrees = 0;
1940 return NULL_TREE;
1943 /* If any decls have been made addressable during scan_omp,
1944 adjust their fields if needed, and layout record types
1945 of parallel/task constructs. */
1947 static void
1948 finish_taskreg_scan (omp_context *ctx)
1950 if (ctx->record_type == NULL_TREE)
1951 return;
1953 /* If any task_shared_vars were needed, verify all
1954 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1955 statements if use_pointer_for_field hasn't changed
1956 because of that. If it did, update field types now. */
1957 if (task_shared_vars)
1959 tree c;
1961 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1962 c; c = OMP_CLAUSE_CHAIN (c))
1963 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1964 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1966 tree decl = OMP_CLAUSE_DECL (c);
1968 /* Global variables don't need to be copied,
1969 the receiver side will use them directly. */
1970 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1971 continue;
1972 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1973 || !use_pointer_for_field (decl, ctx))
1974 continue;
1975 tree field = lookup_field (decl, ctx);
1976 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1977 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1978 continue;
1979 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1980 TREE_THIS_VOLATILE (field) = 0;
1981 DECL_USER_ALIGN (field) = 0;
1982 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1983 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1984 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1985 if (ctx->srecord_type)
1987 tree sfield = lookup_sfield (decl, ctx);
1988 TREE_TYPE (sfield) = TREE_TYPE (field);
1989 TREE_THIS_VOLATILE (sfield) = 0;
1990 DECL_USER_ALIGN (sfield) = 0;
1991 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1992 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1993 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1998 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2000 layout_type (ctx->record_type);
2001 fixup_child_record_type (ctx);
2003 else
2005 location_t loc = gimple_location (ctx->stmt);
2006 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2007 /* Move VLA fields to the end. */
2008 p = &TYPE_FIELDS (ctx->record_type);
2009 while (*p)
2010 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2011 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2013 *q = *p;
2014 *p = TREE_CHAIN (*p);
2015 TREE_CHAIN (*q) = NULL_TREE;
2016 q = &TREE_CHAIN (*q);
2018 else
2019 p = &DECL_CHAIN (*p);
2020 *p = vla_fields;
2021 if (gimple_omp_task_taskloop_p (ctx->stmt))
2023 /* Move fields corresponding to first and second _looptemp_
2024 clause first. There are filled by GOMP_taskloop
2025 and thus need to be in specific positions. */
2026 tree c1 = gimple_omp_task_clauses (ctx->stmt);
2027 c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
2028 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2029 OMP_CLAUSE__LOOPTEMP_);
2030 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2031 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2032 p = &TYPE_FIELDS (ctx->record_type);
2033 while (*p)
2034 if (*p == f1 || *p == f2)
2035 *p = DECL_CHAIN (*p);
2036 else
2037 p = &DECL_CHAIN (*p);
2038 DECL_CHAIN (f1) = f2;
2039 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2040 TYPE_FIELDS (ctx->record_type) = f1;
2041 if (ctx->srecord_type)
2043 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2044 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2045 p = &TYPE_FIELDS (ctx->srecord_type);
2046 while (*p)
2047 if (*p == f1 || *p == f2)
2048 *p = DECL_CHAIN (*p);
2049 else
2050 p = &DECL_CHAIN (*p);
2051 DECL_CHAIN (f1) = f2;
2052 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2053 TYPE_FIELDS (ctx->srecord_type) = f1;
2056 layout_type (ctx->record_type);
2057 fixup_child_record_type (ctx);
2058 if (ctx->srecord_type)
2059 layout_type (ctx->srecord_type);
2060 tree t = fold_convert_loc (loc, long_integer_type_node,
2061 TYPE_SIZE_UNIT (ctx->record_type));
2062 if (TREE_CODE (t) != INTEGER_CST)
2064 t = unshare_expr (t);
2065 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2067 gimple_omp_task_set_arg_size (ctx->stmt, t);
2068 t = build_int_cst (long_integer_type_node,
2069 TYPE_ALIGN_UNIT (ctx->record_type));
2070 gimple_omp_task_set_arg_align (ctx->stmt, t);
2074 /* Find the enclosing offload context. */
2076 static omp_context *
2077 enclosing_target_ctx (omp_context *ctx)
2079 for (; ctx; ctx = ctx->outer)
2080 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2081 break;
2083 return ctx;
2086 /* Return true if ctx is part of an oacc kernels region. */
2088 static bool
2089 ctx_in_oacc_kernels_region (omp_context *ctx)
2091 for (;ctx != NULL; ctx = ctx->outer)
2093 gimple *stmt = ctx->stmt;
2094 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2095 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2096 return true;
2099 return false;
2102 /* Check the parallelism clauses inside a kernels regions.
2103 Until kernels handling moves to use the same loop indirection
2104 scheme as parallel, we need to do this checking early. */
2106 static unsigned
2107 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2109 bool checking = true;
2110 unsigned outer_mask = 0;
2111 unsigned this_mask = 0;
2112 bool has_seq = false, has_auto = false;
2114 if (ctx->outer)
2115 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2116 if (!stmt)
2118 checking = false;
2119 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2120 return outer_mask;
2121 stmt = as_a <gomp_for *> (ctx->stmt);
2124 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2126 switch (OMP_CLAUSE_CODE (c))
2128 case OMP_CLAUSE_GANG:
2129 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2130 break;
2131 case OMP_CLAUSE_WORKER:
2132 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2133 break;
2134 case OMP_CLAUSE_VECTOR:
2135 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2136 break;
2137 case OMP_CLAUSE_SEQ:
2138 has_seq = true;
2139 break;
2140 case OMP_CLAUSE_AUTO:
2141 has_auto = true;
2142 break;
2143 default:
2144 break;
2148 if (checking)
2150 if (has_seq && (this_mask || has_auto))
2151 error_at (gimple_location (stmt), "%<seq%> overrides other"
2152 " OpenACC loop specifiers");
2153 else if (has_auto && this_mask)
2154 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2155 " OpenACC loop specifiers");
2157 if (this_mask & outer_mask)
2158 error_at (gimple_location (stmt), "inner loop uses same"
2159 " OpenACC parallelism as containing loop");
2162 return outer_mask | this_mask;
2165 /* Scan a GIMPLE_OMP_FOR. */
2167 static omp_context *
2168 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2170 omp_context *ctx;
2171 size_t i;
2172 tree clauses = gimple_omp_for_clauses (stmt);
2174 ctx = new_omp_context (stmt, outer_ctx);
2176 if (is_gimple_omp_oacc (stmt))
2178 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2180 if (!tgt || is_oacc_parallel (tgt))
2181 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2183 char const *check = NULL;
2185 switch (OMP_CLAUSE_CODE (c))
2187 case OMP_CLAUSE_GANG:
2188 check = "gang";
2189 break;
2191 case OMP_CLAUSE_WORKER:
2192 check = "worker";
2193 break;
2195 case OMP_CLAUSE_VECTOR:
2196 check = "vector";
2197 break;
2199 default:
2200 break;
2203 if (check && OMP_CLAUSE_OPERAND (c, 0))
2204 error_at (gimple_location (stmt),
2205 "argument not permitted on %qs clause in"
2206 " OpenACC %<parallel%>", check);
2209 if (tgt && is_oacc_kernels (tgt))
2211 /* Strip out reductions, as they are not handled yet. */
2212 tree *prev_ptr = &clauses;
2214 while (tree probe = *prev_ptr)
2216 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2218 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2219 *prev_ptr = *next_ptr;
2220 else
2221 prev_ptr = next_ptr;
2224 gimple_omp_for_set_clauses (stmt, clauses);
2225 check_oacc_kernel_gwv (stmt, ctx);
2229 scan_sharing_clauses (clauses, ctx);
2231 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2232 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2234 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2235 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2236 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2237 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2239 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2240 return ctx;
2243 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2245 static void
2246 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2247 omp_context *outer_ctx)
2249 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2250 gsi_replace (gsi, bind, false);
2251 gimple_seq seq = NULL;
2252 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2253 tree cond = create_tmp_var_raw (integer_type_node);
2254 DECL_CONTEXT (cond) = current_function_decl;
2255 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2256 gimple_bind_set_vars (bind, cond);
2257 gimple_call_set_lhs (g, cond);
2258 gimple_seq_add_stmt (&seq, g);
2259 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2260 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2261 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2262 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2263 gimple_seq_add_stmt (&seq, g);
2264 g = gimple_build_label (lab1);
2265 gimple_seq_add_stmt (&seq, g);
2266 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2267 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2268 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2269 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2270 gimple_omp_for_set_clauses (new_stmt, clause);
2271 gimple_seq_add_stmt (&seq, new_stmt);
2272 g = gimple_build_goto (lab3);
2273 gimple_seq_add_stmt (&seq, g);
2274 g = gimple_build_label (lab2);
2275 gimple_seq_add_stmt (&seq, g);
2276 gimple_seq_add_stmt (&seq, stmt);
2277 g = gimple_build_label (lab3);
2278 gimple_seq_add_stmt (&seq, g);
2279 gimple_bind_set_body (bind, seq);
2280 update_stmt (bind);
2281 scan_omp_for (new_stmt, outer_ctx);
2282 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2285 /* Scan an OpenMP sections directive. */
2287 static void
2288 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2290 omp_context *ctx;
2292 ctx = new_omp_context (stmt, outer_ctx);
2293 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2294 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2297 /* Scan an OpenMP single directive. */
2299 static void
2300 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2302 omp_context *ctx;
2303 tree name;
2305 ctx = new_omp_context (stmt, outer_ctx);
2306 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2307 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2308 name = create_tmp_var_name (".omp_copy_s");
2309 name = build_decl (gimple_location (stmt),
2310 TYPE_DECL, name, ctx->record_type);
2311 TYPE_NAME (ctx->record_type) = name;
2313 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2314 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2316 if (TYPE_FIELDS (ctx->record_type) == NULL)
2317 ctx->record_type = NULL;
2318 else
2319 layout_type (ctx->record_type);
2322 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2323 used in the corresponding offloaded function are restrict. */
2325 static bool
2326 omp_target_base_pointers_restrict_p (tree clauses)
2328 /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2329 used by OpenACC. */
2330 if (flag_openacc == 0)
2331 return false;
2333 /* I. Basic example:
2335 void foo (void)
2337 unsigned int a[2], b[2];
2339 #pragma acc kernels \
2340 copyout (a) \
2341 copyout (b)
2343 a[0] = 0;
2344 b[0] = 1;
2348 After gimplification, we have:
2350 #pragma omp target oacc_kernels \
2351 map(force_from:a [len: 8]) \
2352 map(force_from:b [len: 8])
2354 a[0] = 0;
2355 b[0] = 1;
2358 Because both mappings have the force prefix, we know that they will be
2359 allocated when calling the corresponding offloaded function, which means we
2360 can mark the base pointers for a and b in the offloaded function as
2361 restrict. */
2363 tree c;
2364 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2366 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
2367 return false;
2369 switch (OMP_CLAUSE_MAP_KIND (c))
2371 case GOMP_MAP_FORCE_ALLOC:
2372 case GOMP_MAP_FORCE_TO:
2373 case GOMP_MAP_FORCE_FROM:
2374 case GOMP_MAP_FORCE_TOFROM:
2375 break;
2376 default:
2377 return false;
2381 return true;
2384 /* Scan a GIMPLE_OMP_TARGET. */
2386 static void
2387 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2389 omp_context *ctx;
2390 tree name;
2391 bool offloaded = is_gimple_omp_offloaded (stmt);
2392 tree clauses = gimple_omp_target_clauses (stmt);
2394 ctx = new_omp_context (stmt, outer_ctx);
2395 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2396 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2397 name = create_tmp_var_name (".omp_data_t");
2398 name = build_decl (gimple_location (stmt),
2399 TYPE_DECL, name, ctx->record_type);
2400 DECL_ARTIFICIAL (name) = 1;
2401 DECL_NAMELESS (name) = 1;
2402 TYPE_NAME (ctx->record_type) = name;
2403 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2405 bool base_pointers_restrict = false;
2406 if (offloaded)
2408 create_omp_child_function (ctx, false);
2409 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2411 base_pointers_restrict = omp_target_base_pointers_restrict_p (clauses);
2412 if (base_pointers_restrict
2413 && dump_file && (dump_flags & TDF_DETAILS))
2414 fprintf (dump_file,
2415 "Base pointers in offloaded function are restrict\n");
2418 scan_sharing_clauses (clauses, ctx, base_pointers_restrict);
2419 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2421 if (TYPE_FIELDS (ctx->record_type) == NULL)
2422 ctx->record_type = ctx->receiver_decl = NULL;
2423 else
2425 TYPE_FIELDS (ctx->record_type)
2426 = nreverse (TYPE_FIELDS (ctx->record_type));
2427 if (flag_checking)
2429 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2430 for (tree field = TYPE_FIELDS (ctx->record_type);
2431 field;
2432 field = DECL_CHAIN (field))
2433 gcc_assert (DECL_ALIGN (field) == align);
2435 layout_type (ctx->record_type);
2436 if (offloaded)
2437 fixup_child_record_type (ctx);
2441 /* Scan an OpenMP teams directive. */
2443 static void
2444 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2446 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2447 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2448 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2451 /* Check nesting restrictions. */
2452 static bool
2453 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2455 tree c;
2457 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2458 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2459 the original copy of its contents. */
2460 return true;
2462 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2463 inside an OpenACC CTX. */
2464 if (!(is_gimple_omp (stmt)
2465 && is_gimple_omp_oacc (stmt))
2466 /* Except for atomic codes that we share with OpenMP. */
2467 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2468 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2470 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2472 error_at (gimple_location (stmt),
2473 "non-OpenACC construct inside of OpenACC routine");
2474 return false;
2476 else
2477 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2478 if (is_gimple_omp (octx->stmt)
2479 && is_gimple_omp_oacc (octx->stmt))
2481 error_at (gimple_location (stmt),
2482 "non-OpenACC construct inside of OpenACC region");
2483 return false;
2487 if (ctx != NULL)
2489 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2490 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2492 c = NULL_TREE;
2493 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2495 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2496 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2498 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2499 && (ctx->outer == NULL
2500 || !gimple_omp_for_combined_into_p (ctx->stmt)
2501 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2502 || (gimple_omp_for_kind (ctx->outer->stmt)
2503 != GF_OMP_FOR_KIND_FOR)
2504 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2506 error_at (gimple_location (stmt),
2507 "%<ordered simd threads%> must be closely "
2508 "nested inside of %<for simd%> region");
2509 return false;
2511 return true;
2514 error_at (gimple_location (stmt),
2515 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2516 " may not be nested inside %<simd%> region");
2517 return false;
2519 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2521 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2522 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2523 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2524 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2526 error_at (gimple_location (stmt),
2527 "only %<distribute%> or %<parallel%> regions are "
2528 "allowed to be strictly nested inside %<teams%> "
2529 "region");
2530 return false;
2534 switch (gimple_code (stmt))
2536 case GIMPLE_OMP_FOR:
2537 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2538 return true;
2539 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2541 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2543 error_at (gimple_location (stmt),
2544 "%<distribute%> region must be strictly nested "
2545 "inside %<teams%> construct");
2546 return false;
2548 return true;
2550 /* We split taskloop into task and nested taskloop in it. */
2551 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2552 return true;
2553 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2555 bool ok = false;
2557 if (ctx)
2558 switch (gimple_code (ctx->stmt))
2560 case GIMPLE_OMP_FOR:
2561 ok = (gimple_omp_for_kind (ctx->stmt)
2562 == GF_OMP_FOR_KIND_OACC_LOOP);
2563 break;
2565 case GIMPLE_OMP_TARGET:
2566 switch (gimple_omp_target_kind (ctx->stmt))
2568 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2569 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2570 ok = true;
2571 break;
2573 default:
2574 break;
2577 default:
2578 break;
2580 else if (oacc_get_fn_attrib (current_function_decl))
2581 ok = true;
2582 if (!ok)
2584 error_at (gimple_location (stmt),
2585 "OpenACC loop directive must be associated with"
2586 " an OpenACC compute region");
2587 return false;
2590 /* FALLTHRU */
2591 case GIMPLE_CALL:
2592 if (is_gimple_call (stmt)
2593 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2594 == BUILT_IN_GOMP_CANCEL
2595 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2596 == BUILT_IN_GOMP_CANCELLATION_POINT))
2598 const char *bad = NULL;
2599 const char *kind = NULL;
2600 const char *construct
2601 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2602 == BUILT_IN_GOMP_CANCEL)
2603 ? "#pragma omp cancel"
2604 : "#pragma omp cancellation point";
2605 if (ctx == NULL)
2607 error_at (gimple_location (stmt), "orphaned %qs construct",
2608 construct);
2609 return false;
2611 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2612 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2613 : 0)
2615 case 1:
2616 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2617 bad = "#pragma omp parallel";
2618 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2619 == BUILT_IN_GOMP_CANCEL
2620 && !integer_zerop (gimple_call_arg (stmt, 1)))
2621 ctx->cancellable = true;
2622 kind = "parallel";
2623 break;
2624 case 2:
2625 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2626 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2627 bad = "#pragma omp for";
2628 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2629 == BUILT_IN_GOMP_CANCEL
2630 && !integer_zerop (gimple_call_arg (stmt, 1)))
2632 ctx->cancellable = true;
2633 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2634 OMP_CLAUSE_NOWAIT))
2635 warning_at (gimple_location (stmt), 0,
2636 "%<#pragma omp cancel for%> inside "
2637 "%<nowait%> for construct");
2638 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2639 OMP_CLAUSE_ORDERED))
2640 warning_at (gimple_location (stmt), 0,
2641 "%<#pragma omp cancel for%> inside "
2642 "%<ordered%> for construct");
2644 kind = "for";
2645 break;
2646 case 4:
2647 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2648 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2649 bad = "#pragma omp sections";
2650 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2651 == BUILT_IN_GOMP_CANCEL
2652 && !integer_zerop (gimple_call_arg (stmt, 1)))
2654 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2656 ctx->cancellable = true;
2657 if (omp_find_clause (gimple_omp_sections_clauses
2658 (ctx->stmt),
2659 OMP_CLAUSE_NOWAIT))
2660 warning_at (gimple_location (stmt), 0,
2661 "%<#pragma omp cancel sections%> inside "
2662 "%<nowait%> sections construct");
2664 else
2666 gcc_assert (ctx->outer
2667 && gimple_code (ctx->outer->stmt)
2668 == GIMPLE_OMP_SECTIONS);
2669 ctx->outer->cancellable = true;
2670 if (omp_find_clause (gimple_omp_sections_clauses
2671 (ctx->outer->stmt),
2672 OMP_CLAUSE_NOWAIT))
2673 warning_at (gimple_location (stmt), 0,
2674 "%<#pragma omp cancel sections%> inside "
2675 "%<nowait%> sections construct");
2678 kind = "sections";
2679 break;
2680 case 8:
2681 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2682 bad = "#pragma omp task";
2683 else
2685 for (omp_context *octx = ctx->outer;
2686 octx; octx = octx->outer)
2688 switch (gimple_code (octx->stmt))
2690 case GIMPLE_OMP_TASKGROUP:
2691 break;
2692 case GIMPLE_OMP_TARGET:
2693 if (gimple_omp_target_kind (octx->stmt)
2694 != GF_OMP_TARGET_KIND_REGION)
2695 continue;
2696 /* FALLTHRU */
2697 case GIMPLE_OMP_PARALLEL:
2698 case GIMPLE_OMP_TEAMS:
2699 error_at (gimple_location (stmt),
2700 "%<%s taskgroup%> construct not closely "
2701 "nested inside of %<taskgroup%> region",
2702 construct);
2703 return false;
2704 default:
2705 continue;
2707 break;
2709 ctx->cancellable = true;
2711 kind = "taskgroup";
2712 break;
2713 default:
2714 error_at (gimple_location (stmt), "invalid arguments");
2715 return false;
2717 if (bad)
2719 error_at (gimple_location (stmt),
2720 "%<%s %s%> construct not closely nested inside of %qs",
2721 construct, kind, bad);
2722 return false;
2725 /* FALLTHRU */
2726 case GIMPLE_OMP_SECTIONS:
2727 case GIMPLE_OMP_SINGLE:
2728 for (; ctx != NULL; ctx = ctx->outer)
2729 switch (gimple_code (ctx->stmt))
2731 case GIMPLE_OMP_FOR:
2732 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2733 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2734 break;
2735 /* FALLTHRU */
2736 case GIMPLE_OMP_SECTIONS:
2737 case GIMPLE_OMP_SINGLE:
2738 case GIMPLE_OMP_ORDERED:
2739 case GIMPLE_OMP_MASTER:
2740 case GIMPLE_OMP_TASK:
2741 case GIMPLE_OMP_CRITICAL:
2742 if (is_gimple_call (stmt))
2744 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2745 != BUILT_IN_GOMP_BARRIER)
2746 return true;
2747 error_at (gimple_location (stmt),
2748 "barrier region may not be closely nested inside "
2749 "of work-sharing, %<critical%>, %<ordered%>, "
2750 "%<master%>, explicit %<task%> or %<taskloop%> "
2751 "region");
2752 return false;
2754 error_at (gimple_location (stmt),
2755 "work-sharing region may not be closely nested inside "
2756 "of work-sharing, %<critical%>, %<ordered%>, "
2757 "%<master%>, explicit %<task%> or %<taskloop%> region");
2758 return false;
2759 case GIMPLE_OMP_PARALLEL:
2760 case GIMPLE_OMP_TEAMS:
2761 return true;
2762 case GIMPLE_OMP_TARGET:
2763 if (gimple_omp_target_kind (ctx->stmt)
2764 == GF_OMP_TARGET_KIND_REGION)
2765 return true;
2766 break;
2767 default:
2768 break;
2770 break;
2771 case GIMPLE_OMP_MASTER:
2772 for (; ctx != NULL; ctx = ctx->outer)
2773 switch (gimple_code (ctx->stmt))
2775 case GIMPLE_OMP_FOR:
2776 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2777 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2778 break;
2779 /* FALLTHRU */
2780 case GIMPLE_OMP_SECTIONS:
2781 case GIMPLE_OMP_SINGLE:
2782 case GIMPLE_OMP_TASK:
2783 error_at (gimple_location (stmt),
2784 "%<master%> region may not be closely nested inside "
2785 "of work-sharing, explicit %<task%> or %<taskloop%> "
2786 "region");
2787 return false;
2788 case GIMPLE_OMP_PARALLEL:
2789 case GIMPLE_OMP_TEAMS:
2790 return true;
2791 case GIMPLE_OMP_TARGET:
2792 if (gimple_omp_target_kind (ctx->stmt)
2793 == GF_OMP_TARGET_KIND_REGION)
2794 return true;
2795 break;
2796 default:
2797 break;
2799 break;
2800 case GIMPLE_OMP_TASK:
2801 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2802 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2803 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2804 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2806 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2807 error_at (OMP_CLAUSE_LOCATION (c),
2808 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2809 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2810 return false;
2812 break;
2813 case GIMPLE_OMP_ORDERED:
2814 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2815 c; c = OMP_CLAUSE_CHAIN (c))
2817 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2819 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2820 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2821 continue;
2823 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2824 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2825 || kind == OMP_CLAUSE_DEPEND_SINK)
2827 tree oclause;
2828 /* Look for containing ordered(N) loop. */
2829 if (ctx == NULL
2830 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2831 || (oclause
2832 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2833 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2835 error_at (OMP_CLAUSE_LOCATION (c),
2836 "%<ordered%> construct with %<depend%> clause "
2837 "must be closely nested inside an %<ordered%> "
2838 "loop");
2839 return false;
2841 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2843 error_at (OMP_CLAUSE_LOCATION (c),
2844 "%<ordered%> construct with %<depend%> clause "
2845 "must be closely nested inside a loop with "
2846 "%<ordered%> clause with a parameter");
2847 return false;
2850 else
2852 error_at (OMP_CLAUSE_LOCATION (c),
2853 "invalid depend kind in omp %<ordered%> %<depend%>");
2854 return false;
2857 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2858 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2860 /* ordered simd must be closely nested inside of simd region,
2861 and simd region must not encounter constructs other than
2862 ordered simd, therefore ordered simd may be either orphaned,
2863 or ctx->stmt must be simd. The latter case is handled already
2864 earlier. */
2865 if (ctx != NULL)
2867 error_at (gimple_location (stmt),
2868 "%<ordered%> %<simd%> must be closely nested inside "
2869 "%<simd%> region");
2870 return false;
2873 for (; ctx != NULL; ctx = ctx->outer)
2874 switch (gimple_code (ctx->stmt))
2876 case GIMPLE_OMP_CRITICAL:
2877 case GIMPLE_OMP_TASK:
2878 case GIMPLE_OMP_ORDERED:
2879 ordered_in_taskloop:
2880 error_at (gimple_location (stmt),
2881 "%<ordered%> region may not be closely nested inside "
2882 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2883 "%<taskloop%> region");
2884 return false;
2885 case GIMPLE_OMP_FOR:
2886 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2887 goto ordered_in_taskloop;
2888 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2889 OMP_CLAUSE_ORDERED) == NULL)
2891 error_at (gimple_location (stmt),
2892 "%<ordered%> region must be closely nested inside "
2893 "a loop region with an %<ordered%> clause");
2894 return false;
2896 return true;
2897 case GIMPLE_OMP_TARGET:
2898 if (gimple_omp_target_kind (ctx->stmt)
2899 != GF_OMP_TARGET_KIND_REGION)
2900 break;
2901 /* FALLTHRU */
2902 case GIMPLE_OMP_PARALLEL:
2903 case GIMPLE_OMP_TEAMS:
2904 error_at (gimple_location (stmt),
2905 "%<ordered%> region must be closely nested inside "
2906 "a loop region with an %<ordered%> clause");
2907 return false;
2908 default:
2909 break;
2911 break;
2912 case GIMPLE_OMP_CRITICAL:
2914 tree this_stmt_name
2915 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2916 for (; ctx != NULL; ctx = ctx->outer)
2917 if (gomp_critical *other_crit
2918 = dyn_cast <gomp_critical *> (ctx->stmt))
2919 if (this_stmt_name == gimple_omp_critical_name (other_crit))
2921 error_at (gimple_location (stmt),
2922 "%<critical%> region may not be nested inside "
2923 "a %<critical%> region with the same name");
2924 return false;
2927 break;
2928 case GIMPLE_OMP_TEAMS:
2929 if (ctx == NULL
2930 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2931 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2933 error_at (gimple_location (stmt),
2934 "%<teams%> construct not closely nested inside of "
2935 "%<target%> construct");
2936 return false;
2938 break;
2939 case GIMPLE_OMP_TARGET:
2940 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2941 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2942 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2943 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2945 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2946 error_at (OMP_CLAUSE_LOCATION (c),
2947 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2948 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2949 return false;
2951 if (is_gimple_omp_offloaded (stmt)
2952 && oacc_get_fn_attrib (cfun->decl) != NULL)
2954 error_at (gimple_location (stmt),
2955 "OpenACC region inside of OpenACC routine, nested "
2956 "parallelism not supported yet");
2957 return false;
2959 for (; ctx != NULL; ctx = ctx->outer)
2961 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2963 if (is_gimple_omp (stmt)
2964 && is_gimple_omp_oacc (stmt)
2965 && is_gimple_omp (ctx->stmt))
2967 error_at (gimple_location (stmt),
2968 "OpenACC construct inside of non-OpenACC region");
2969 return false;
2971 continue;
2974 const char *stmt_name, *ctx_stmt_name;
2975 switch (gimple_omp_target_kind (stmt))
2977 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2978 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2979 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2980 case GF_OMP_TARGET_KIND_ENTER_DATA:
2981 stmt_name = "target enter data"; break;
2982 case GF_OMP_TARGET_KIND_EXIT_DATA:
2983 stmt_name = "target exit data"; break;
2984 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2985 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2986 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2987 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
2988 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2989 stmt_name = "enter/exit data"; break;
2990 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2991 break;
2992 default: gcc_unreachable ();
2994 switch (gimple_omp_target_kind (ctx->stmt))
2996 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2997 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
2998 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2999 ctx_stmt_name = "parallel"; break;
3000 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3001 ctx_stmt_name = "kernels"; break;
3002 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3003 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3004 ctx_stmt_name = "host_data"; break;
3005 default: gcc_unreachable ();
3008 /* OpenACC/OpenMP mismatch? */
3009 if (is_gimple_omp_oacc (stmt)
3010 != is_gimple_omp_oacc (ctx->stmt))
3012 error_at (gimple_location (stmt),
3013 "%s %qs construct inside of %s %qs region",
3014 (is_gimple_omp_oacc (stmt)
3015 ? "OpenACC" : "OpenMP"), stmt_name,
3016 (is_gimple_omp_oacc (ctx->stmt)
3017 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3018 return false;
3020 if (is_gimple_omp_offloaded (ctx->stmt))
3022 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3023 if (is_gimple_omp_oacc (ctx->stmt))
3025 error_at (gimple_location (stmt),
3026 "%qs construct inside of %qs region",
3027 stmt_name, ctx_stmt_name);
3028 return false;
3030 else
3032 warning_at (gimple_location (stmt), 0,
3033 "%qs construct inside of %qs region",
3034 stmt_name, ctx_stmt_name);
3038 break;
3039 default:
3040 break;
3042 return true;
3046 /* Helper function scan_omp.
3048 Callback for walk_tree or operators in walk_gimple_stmt used to
3049 scan for OMP directives in TP. */
3051 static tree
3052 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3054 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3055 omp_context *ctx = (omp_context *) wi->info;
3056 tree t = *tp;
3058 switch (TREE_CODE (t))
3060 case VAR_DECL:
3061 case PARM_DECL:
3062 case LABEL_DECL:
3063 case RESULT_DECL:
3064 if (ctx)
3066 tree repl = remap_decl (t, &ctx->cb);
3067 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3068 *tp = repl;
3070 break;
3072 default:
3073 if (ctx && TYPE_P (t))
3074 *tp = remap_type (t, &ctx->cb);
3075 else if (!DECL_P (t))
3077 *walk_subtrees = 1;
3078 if (ctx)
3080 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3081 if (tem != TREE_TYPE (t))
3083 if (TREE_CODE (t) == INTEGER_CST)
3084 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3085 else
3086 TREE_TYPE (t) = tem;
3090 break;
3093 return NULL_TREE;
3096 /* Return true if FNDECL is a setjmp or a longjmp. */
3098 static bool
3099 setjmp_or_longjmp_p (const_tree fndecl)
3101 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3102 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
3103 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
3104 return true;
3106 tree declname = DECL_NAME (fndecl);
3107 if (!declname)
3108 return false;
3109 const char *name = IDENTIFIER_POINTER (declname);
3110 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3114 /* Helper function for scan_omp.
3116 Callback for walk_gimple_stmt used to scan for OMP directives in
3117 the current statement in GSI. */
3119 static tree
3120 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3121 struct walk_stmt_info *wi)
3123 gimple *stmt = gsi_stmt (*gsi);
3124 omp_context *ctx = (omp_context *) wi->info;
3126 if (gimple_has_location (stmt))
3127 input_location = gimple_location (stmt);
3129 /* Check the nesting restrictions. */
3130 bool remove = false;
3131 if (is_gimple_omp (stmt))
3132 remove = !check_omp_nesting_restrictions (stmt, ctx);
3133 else if (is_gimple_call (stmt))
3135 tree fndecl = gimple_call_fndecl (stmt);
3136 if (fndecl)
3138 if (setjmp_or_longjmp_p (fndecl)
3139 && ctx
3140 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3141 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3143 remove = true;
3144 error_at (gimple_location (stmt),
3145 "setjmp/longjmp inside simd construct");
3147 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3148 switch (DECL_FUNCTION_CODE (fndecl))
3150 case BUILT_IN_GOMP_BARRIER:
3151 case BUILT_IN_GOMP_CANCEL:
3152 case BUILT_IN_GOMP_CANCELLATION_POINT:
3153 case BUILT_IN_GOMP_TASKYIELD:
3154 case BUILT_IN_GOMP_TASKWAIT:
3155 case BUILT_IN_GOMP_TASKGROUP_START:
3156 case BUILT_IN_GOMP_TASKGROUP_END:
3157 remove = !check_omp_nesting_restrictions (stmt, ctx);
3158 break;
3159 default:
3160 break;
3164 if (remove)
3166 stmt = gimple_build_nop ();
3167 gsi_replace (gsi, stmt, false);
3170 *handled_ops_p = true;
3172 switch (gimple_code (stmt))
3174 case GIMPLE_OMP_PARALLEL:
3175 taskreg_nesting_level++;
3176 scan_omp_parallel (gsi, ctx);
3177 taskreg_nesting_level--;
3178 break;
3180 case GIMPLE_OMP_TASK:
3181 taskreg_nesting_level++;
3182 scan_omp_task (gsi, ctx);
3183 taskreg_nesting_level--;
3184 break;
3186 case GIMPLE_OMP_FOR:
3187 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3188 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3189 && omp_maybe_offloaded_ctx (ctx)
3190 && omp_max_simt_vf ())
3191 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3192 else
3193 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3194 break;
3196 case GIMPLE_OMP_SECTIONS:
3197 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3198 break;
3200 case GIMPLE_OMP_SINGLE:
3201 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3202 break;
3204 case GIMPLE_OMP_SECTION:
3205 case GIMPLE_OMP_MASTER:
3206 case GIMPLE_OMP_TASKGROUP:
3207 case GIMPLE_OMP_ORDERED:
3208 case GIMPLE_OMP_CRITICAL:
3209 case GIMPLE_OMP_GRID_BODY:
3210 ctx = new_omp_context (stmt, ctx);
3211 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3212 break;
3214 case GIMPLE_OMP_TARGET:
3215 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3216 break;
3218 case GIMPLE_OMP_TEAMS:
3219 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3220 break;
3222 case GIMPLE_BIND:
3224 tree var;
3226 *handled_ops_p = false;
3227 if (ctx)
3228 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3229 var ;
3230 var = DECL_CHAIN (var))
3231 insert_decl_map (&ctx->cb, var, var);
3233 break;
3234 default:
3235 *handled_ops_p = false;
3236 break;
3239 return NULL_TREE;
3243 /* Scan all the statements starting at the current statement. CTX
3244 contains context information about the OMP directives and
3245 clauses found during the scan. */
3247 static void
3248 scan_omp (gimple_seq *body_p, omp_context *ctx)
3250 location_t saved_location;
3251 struct walk_stmt_info wi;
3253 memset (&wi, 0, sizeof (wi));
3254 wi.info = ctx;
3255 wi.want_locations = true;
3257 saved_location = input_location;
3258 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3259 input_location = saved_location;
3262 /* Re-gimplification and code generation routines. */
3264 /* If a context was created for STMT when it was scanned, return it. */
3266 static omp_context *
3267 maybe_lookup_ctx (gimple *stmt)
3269 splay_tree_node n;
3270 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3271 return n ? (omp_context *) n->value : NULL;
3275 /* Find the mapping for DECL in CTX or the immediately enclosing
3276 context that has a mapping for DECL.
3278 If CTX is a nested parallel directive, we may have to use the decl
3279 mappings created in CTX's parent context. Suppose that we have the
3280 following parallel nesting (variable UIDs showed for clarity):
3282 iD.1562 = 0;
3283 #omp parallel shared(iD.1562) -> outer parallel
3284 iD.1562 = iD.1562 + 1;
3286 #omp parallel shared (iD.1562) -> inner parallel
3287 iD.1562 = iD.1562 - 1;
3289 Each parallel structure will create a distinct .omp_data_s structure
3290 for copying iD.1562 in/out of the directive:
3292 outer parallel .omp_data_s.1.i -> iD.1562
3293 inner parallel .omp_data_s.2.i -> iD.1562
3295 A shared variable mapping will produce a copy-out operation before
3296 the parallel directive and a copy-in operation after it. So, in
3297 this case we would have:
3299 iD.1562 = 0;
3300 .omp_data_o.1.i = iD.1562;
3301 #omp parallel shared(iD.1562) -> outer parallel
3302 .omp_data_i.1 = &.omp_data_o.1
3303 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3305 .omp_data_o.2.i = iD.1562; -> **
3306 #omp parallel shared(iD.1562) -> inner parallel
3307 .omp_data_i.2 = &.omp_data_o.2
3308 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3311 ** This is a problem. The symbol iD.1562 cannot be referenced
3312 inside the body of the outer parallel region. But since we are
3313 emitting this copy operation while expanding the inner parallel
3314 directive, we need to access the CTX structure of the outer
3315 parallel directive to get the correct mapping:
3317 .omp_data_o.2.i = .omp_data_i.1->i
3319 Since there may be other workshare or parallel directives enclosing
3320 the parallel directive, it may be necessary to walk up the context
3321 parent chain. This is not a problem in general because nested
3322 parallelism happens only rarely. */
3324 static tree
3325 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3327 tree t;
3328 omp_context *up;
3330 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3331 t = maybe_lookup_decl (decl, up);
3333 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3335 return t ? t : decl;
3339 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3340 in outer contexts. */
3342 static tree
3343 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3345 tree t = NULL;
3346 omp_context *up;
3348 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3349 t = maybe_lookup_decl (decl, up);
3351 return t ? t : decl;
3355 /* Construct the initialization value for reduction operation OP. */
3357 tree
3358 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3360 switch (op)
3362 case PLUS_EXPR:
3363 case MINUS_EXPR:
3364 case BIT_IOR_EXPR:
3365 case BIT_XOR_EXPR:
3366 case TRUTH_OR_EXPR:
3367 case TRUTH_ORIF_EXPR:
3368 case TRUTH_XOR_EXPR:
3369 case NE_EXPR:
3370 return build_zero_cst (type);
3372 case MULT_EXPR:
3373 case TRUTH_AND_EXPR:
3374 case TRUTH_ANDIF_EXPR:
3375 case EQ_EXPR:
3376 return fold_convert_loc (loc, type, integer_one_node);
3378 case BIT_AND_EXPR:
3379 return fold_convert_loc (loc, type, integer_minus_one_node);
3381 case MAX_EXPR:
3382 if (SCALAR_FLOAT_TYPE_P (type))
3384 REAL_VALUE_TYPE max, min;
3385 if (HONOR_INFINITIES (type))
3387 real_inf (&max);
3388 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3390 else
3391 real_maxval (&min, 1, TYPE_MODE (type));
3392 return build_real (type, min);
3394 else if (POINTER_TYPE_P (type))
3396 wide_int min
3397 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3398 return wide_int_to_tree (type, min);
3400 else
3402 gcc_assert (INTEGRAL_TYPE_P (type));
3403 return TYPE_MIN_VALUE (type);
3406 case MIN_EXPR:
3407 if (SCALAR_FLOAT_TYPE_P (type))
3409 REAL_VALUE_TYPE max;
3410 if (HONOR_INFINITIES (type))
3411 real_inf (&max);
3412 else
3413 real_maxval (&max, 0, TYPE_MODE (type));
3414 return build_real (type, max);
3416 else if (POINTER_TYPE_P (type))
3418 wide_int max
3419 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3420 return wide_int_to_tree (type, max);
3422 else
3424 gcc_assert (INTEGRAL_TYPE_P (type));
3425 return TYPE_MAX_VALUE (type);
3428 default:
3429 gcc_unreachable ();
3433 /* Construct the initialization value for reduction CLAUSE. */
3435 tree
3436 omp_reduction_init (tree clause, tree type)
3438 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3439 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3442 /* Return alignment to be assumed for var in CLAUSE, which should be
3443 OMP_CLAUSE_ALIGNED. */
3445 static tree
3446 omp_clause_aligned_alignment (tree clause)
3448 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3449 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3451 /* Otherwise return implementation defined alignment. */
3452 unsigned int al = 1;
3453 opt_scalar_mode mode_iter;
3454 int vs = targetm.vectorize.autovectorize_vector_sizes ();
3455 if (vs)
3456 vs = 1 << floor_log2 (vs);
3457 static enum mode_class classes[]
3458 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3459 for (int i = 0; i < 4; i += 2)
3460 /* The for loop above dictates that we only walk through scalar classes. */
3461 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3463 scalar_mode mode = mode_iter.require ();
3464 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3465 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3466 continue;
3467 while (vs
3468 && GET_MODE_SIZE (vmode) < vs
3469 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3470 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3472 tree type = lang_hooks.types.type_for_mode (mode, 1);
3473 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3474 continue;
3475 type = build_vector_type (type, GET_MODE_SIZE (vmode)
3476 / GET_MODE_SIZE (mode));
3477 if (TYPE_MODE (type) != vmode)
3478 continue;
3479 if (TYPE_ALIGN_UNIT (type) > al)
3480 al = TYPE_ALIGN_UNIT (type);
3482 return build_int_cst (integer_type_node, al);
3486 /* This structure is part of the interface between lower_rec_simd_input_clauses
3487 and lower_rec_input_clauses. */
3489 struct omplow_simd_context {
3490 tree idx;
3491 tree lane;
3492 vec<tree, va_heap> simt_eargs;
3493 gimple_seq simt_dlist;
3494 int max_vf;
3495 bool is_simt;
3498 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3499 privatization. */
3501 static bool
3502 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3503 omplow_simd_context *sctx, tree &ivar, tree &lvar)
3505 if (sctx->max_vf == 0)
3507 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3508 if (sctx->max_vf > 1)
3510 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3511 OMP_CLAUSE_SAFELEN);
3512 if (c
3513 && (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) != INTEGER_CST
3514 || tree_int_cst_sgn (OMP_CLAUSE_SAFELEN_EXPR (c)) != 1))
3515 sctx->max_vf = 1;
3516 else if (c && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
3517 sctx->max_vf) == -1)
3518 sctx->max_vf = tree_to_shwi (OMP_CLAUSE_SAFELEN_EXPR (c));
3520 if (sctx->max_vf > 1)
3522 sctx->idx = create_tmp_var (unsigned_type_node);
3523 sctx->lane = create_tmp_var (unsigned_type_node);
3526 if (sctx->max_vf == 1)
3527 return false;
3529 if (sctx->is_simt)
3531 if (is_gimple_reg (new_var))
3533 ivar = lvar = new_var;
3534 return true;
3536 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3537 ivar = lvar = create_tmp_var (type);
3538 TREE_ADDRESSABLE (ivar) = 1;
3539 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3540 NULL, DECL_ATTRIBUTES (ivar));
3541 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3542 tree clobber = build_constructor (type, NULL);
3543 TREE_THIS_VOLATILE (clobber) = 1;
3544 gimple *g = gimple_build_assign (ivar, clobber);
3545 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3547 else
3549 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3550 tree avar = create_tmp_var_raw (atype);
3551 if (TREE_ADDRESSABLE (new_var))
3552 TREE_ADDRESSABLE (avar) = 1;
3553 DECL_ATTRIBUTES (avar)
3554 = tree_cons (get_identifier ("omp simd array"), NULL,
3555 DECL_ATTRIBUTES (avar));
3556 gimple_add_tmp_var (avar);
3557 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3558 NULL_TREE, NULL_TREE);
3559 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3560 NULL_TREE, NULL_TREE);
3562 if (DECL_P (new_var))
3564 SET_DECL_VALUE_EXPR (new_var, lvar);
3565 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3567 return true;
3570 /* Helper function of lower_rec_input_clauses. For a reference
3571 in simd reduction, add an underlying variable it will reference. */
3573 static void
3574 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3576 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3577 if (TREE_CONSTANT (z))
3579 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3580 get_name (new_vard));
3581 gimple_add_tmp_var (z);
3582 TREE_ADDRESSABLE (z) = 1;
3583 z = build_fold_addr_expr_loc (loc, z);
3584 gimplify_assign (new_vard, z, ilist);
3588 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3589 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3590 private variables. Initialization statements go in ILIST, while calls
3591 to destructors go in DLIST. */
3593 static void
3594 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3595 omp_context *ctx, struct omp_for_data *fd)
3597 tree c, dtor, copyin_seq, x, ptr;
3598 bool copyin_by_ref = false;
3599 bool lastprivate_firstprivate = false;
3600 bool reduction_omp_orig_ref = false;
3601 int pass;
3602 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3603 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3604 omplow_simd_context sctx = omplow_simd_context ();
3605 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3606 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3607 gimple_seq llist[3] = { };
3609 copyin_seq = NULL;
3610 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3612 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3613 with data sharing clauses referencing variable sized vars. That
3614 is unnecessarily hard to support and very unlikely to result in
3615 vectorized code anyway. */
3616 if (is_simd)
3617 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3618 switch (OMP_CLAUSE_CODE (c))
3620 case OMP_CLAUSE_LINEAR:
3621 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3622 sctx.max_vf = 1;
3623 /* FALLTHRU */
3624 case OMP_CLAUSE_PRIVATE:
3625 case OMP_CLAUSE_FIRSTPRIVATE:
3626 case OMP_CLAUSE_LASTPRIVATE:
3627 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3628 sctx.max_vf = 1;
3629 break;
3630 case OMP_CLAUSE_REDUCTION:
3631 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3632 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3633 sctx.max_vf = 1;
3634 break;
3635 default:
3636 continue;
3639 /* Add a placeholder for simduid. */
3640 if (sctx.is_simt && sctx.max_vf != 1)
3641 sctx.simt_eargs.safe_push (NULL_TREE);
3643 /* Do all the fixed sized types in the first pass, and the variable sized
3644 types in the second pass. This makes sure that the scalar arguments to
3645 the variable sized types are processed before we use them in the
3646 variable sized operations. */
3647 for (pass = 0; pass < 2; ++pass)
3649 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3651 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3652 tree var, new_var;
3653 bool by_ref;
3654 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3656 switch (c_kind)
3658 case OMP_CLAUSE_PRIVATE:
3659 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3660 continue;
3661 break;
3662 case OMP_CLAUSE_SHARED:
3663 /* Ignore shared directives in teams construct. */
3664 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3665 continue;
3666 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3668 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3669 || is_global_var (OMP_CLAUSE_DECL (c)));
3670 continue;
3672 case OMP_CLAUSE_FIRSTPRIVATE:
3673 case OMP_CLAUSE_COPYIN:
3674 break;
3675 case OMP_CLAUSE_LINEAR:
3676 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3677 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3678 lastprivate_firstprivate = true;
3679 break;
3680 case OMP_CLAUSE_REDUCTION:
3681 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3682 reduction_omp_orig_ref = true;
3683 break;
3684 case OMP_CLAUSE__LOOPTEMP_:
3685 /* Handle _looptemp_ clauses only on parallel/task. */
3686 if (fd)
3687 continue;
3688 break;
3689 case OMP_CLAUSE_LASTPRIVATE:
3690 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3692 lastprivate_firstprivate = true;
3693 if (pass != 0 || is_taskloop_ctx (ctx))
3694 continue;
3696 /* Even without corresponding firstprivate, if
3697 decl is Fortran allocatable, it needs outer var
3698 reference. */
3699 else if (pass == 0
3700 && lang_hooks.decls.omp_private_outer_ref
3701 (OMP_CLAUSE_DECL (c)))
3702 lastprivate_firstprivate = true;
3703 break;
3704 case OMP_CLAUSE_ALIGNED:
3705 if (pass == 0)
3706 continue;
3707 var = OMP_CLAUSE_DECL (c);
3708 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3709 && !is_global_var (var))
3711 new_var = maybe_lookup_decl (var, ctx);
3712 if (new_var == NULL_TREE)
3713 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3714 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3715 tree alarg = omp_clause_aligned_alignment (c);
3716 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3717 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3718 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3719 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3720 gimplify_and_add (x, ilist);
3722 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3723 && is_global_var (var))
3725 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3726 new_var = lookup_decl (var, ctx);
3727 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3728 t = build_fold_addr_expr_loc (clause_loc, t);
3729 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3730 tree alarg = omp_clause_aligned_alignment (c);
3731 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3732 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3733 t = fold_convert_loc (clause_loc, ptype, t);
3734 x = create_tmp_var (ptype);
3735 t = build2 (MODIFY_EXPR, ptype, x, t);
3736 gimplify_and_add (t, ilist);
3737 t = build_simple_mem_ref_loc (clause_loc, x);
3738 SET_DECL_VALUE_EXPR (new_var, t);
3739 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3741 continue;
3742 default:
3743 continue;
3746 new_var = var = OMP_CLAUSE_DECL (c);
3747 if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3749 var = TREE_OPERAND (var, 0);
3750 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3751 var = TREE_OPERAND (var, 0);
3752 if (TREE_CODE (var) == INDIRECT_REF
3753 || TREE_CODE (var) == ADDR_EXPR)
3754 var = TREE_OPERAND (var, 0);
3755 if (is_variable_sized (var))
3757 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3758 var = DECL_VALUE_EXPR (var);
3759 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3760 var = TREE_OPERAND (var, 0);
3761 gcc_assert (DECL_P (var));
3763 new_var = var;
3765 if (c_kind != OMP_CLAUSE_COPYIN)
3766 new_var = lookup_decl (var, ctx);
3768 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3770 if (pass != 0)
3771 continue;
3773 /* C/C++ array section reductions. */
3774 else if (c_kind == OMP_CLAUSE_REDUCTION
3775 && var != OMP_CLAUSE_DECL (c))
3777 if (pass == 0)
3778 continue;
3780 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3781 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3782 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3784 tree b = TREE_OPERAND (orig_var, 1);
3785 b = maybe_lookup_decl (b, ctx);
3786 if (b == NULL)
3788 b = TREE_OPERAND (orig_var, 1);
3789 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3791 if (integer_zerop (bias))
3792 bias = b;
3793 else
3795 bias = fold_convert_loc (clause_loc,
3796 TREE_TYPE (b), bias);
3797 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3798 TREE_TYPE (b), b, bias);
3800 orig_var = TREE_OPERAND (orig_var, 0);
3802 if (TREE_CODE (orig_var) == INDIRECT_REF
3803 || TREE_CODE (orig_var) == ADDR_EXPR)
3804 orig_var = TREE_OPERAND (orig_var, 0);
3805 tree d = OMP_CLAUSE_DECL (c);
3806 tree type = TREE_TYPE (d);
3807 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3808 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3809 const char *name = get_name (orig_var);
3810 if (TREE_CONSTANT (v))
3812 x = create_tmp_var_raw (type, name);
3813 gimple_add_tmp_var (x);
3814 TREE_ADDRESSABLE (x) = 1;
3815 x = build_fold_addr_expr_loc (clause_loc, x);
3817 else
3819 tree atmp
3820 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3821 tree t = maybe_lookup_decl (v, ctx);
3822 if (t)
3823 v = t;
3824 else
3825 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3826 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3827 t = fold_build2_loc (clause_loc, PLUS_EXPR,
3828 TREE_TYPE (v), v,
3829 build_int_cst (TREE_TYPE (v), 1));
3830 t = fold_build2_loc (clause_loc, MULT_EXPR,
3831 TREE_TYPE (v), t,
3832 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3833 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3834 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3837 tree ptype = build_pointer_type (TREE_TYPE (type));
3838 x = fold_convert_loc (clause_loc, ptype, x);
3839 tree y = create_tmp_var (ptype, name);
3840 gimplify_assign (y, x, ilist);
3841 x = y;
3842 tree yb = y;
3844 if (!integer_zerop (bias))
3846 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3847 bias);
3848 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3850 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3851 pointer_sized_int_node, yb, bias);
3852 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3853 yb = create_tmp_var (ptype, name);
3854 gimplify_assign (yb, x, ilist);
3855 x = yb;
3858 d = TREE_OPERAND (d, 0);
3859 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3860 d = TREE_OPERAND (d, 0);
3861 if (TREE_CODE (d) == ADDR_EXPR)
3863 if (orig_var != var)
3865 gcc_assert (is_variable_sized (orig_var));
3866 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3868 gimplify_assign (new_var, x, ilist);
3869 tree new_orig_var = lookup_decl (orig_var, ctx);
3870 tree t = build_fold_indirect_ref (new_var);
3871 DECL_IGNORED_P (new_var) = 0;
3872 TREE_THIS_NOTRAP (t);
3873 SET_DECL_VALUE_EXPR (new_orig_var, t);
3874 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3876 else
3878 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3879 build_int_cst (ptype, 0));
3880 SET_DECL_VALUE_EXPR (new_var, x);
3881 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3884 else
3886 gcc_assert (orig_var == var);
3887 if (TREE_CODE (d) == INDIRECT_REF)
3889 x = create_tmp_var (ptype, name);
3890 TREE_ADDRESSABLE (x) = 1;
3891 gimplify_assign (x, yb, ilist);
3892 x = build_fold_addr_expr_loc (clause_loc, x);
3894 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3895 gimplify_assign (new_var, x, ilist);
3897 tree y1 = create_tmp_var (ptype, NULL);
3898 gimplify_assign (y1, y, ilist);
3899 tree i2 = NULL_TREE, y2 = NULL_TREE;
3900 tree body2 = NULL_TREE, end2 = NULL_TREE;
3901 tree y3 = NULL_TREE, y4 = NULL_TREE;
3902 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3904 y2 = create_tmp_var (ptype, NULL);
3905 gimplify_assign (y2, y, ilist);
3906 tree ref = build_outer_var_ref (var, ctx);
3907 /* For ref build_outer_var_ref already performs this. */
3908 if (TREE_CODE (d) == INDIRECT_REF)
3909 gcc_assert (omp_is_reference (var));
3910 else if (TREE_CODE (d) == ADDR_EXPR)
3911 ref = build_fold_addr_expr (ref);
3912 else if (omp_is_reference (var))
3913 ref = build_fold_addr_expr (ref);
3914 ref = fold_convert_loc (clause_loc, ptype, ref);
3915 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3916 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3918 y3 = create_tmp_var (ptype, NULL);
3919 gimplify_assign (y3, unshare_expr (ref), ilist);
3921 if (is_simd)
3923 y4 = create_tmp_var (ptype, NULL);
3924 gimplify_assign (y4, ref, dlist);
3927 tree i = create_tmp_var (TREE_TYPE (v), NULL);
3928 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3929 tree body = create_artificial_label (UNKNOWN_LOCATION);
3930 tree end = create_artificial_label (UNKNOWN_LOCATION);
3931 gimple_seq_add_stmt (ilist, gimple_build_label (body));
3932 if (y2)
3934 i2 = create_tmp_var (TREE_TYPE (v), NULL);
3935 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3936 body2 = create_artificial_label (UNKNOWN_LOCATION);
3937 end2 = create_artificial_label (UNKNOWN_LOCATION);
3938 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3940 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3942 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3943 tree decl_placeholder
3944 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3945 SET_DECL_VALUE_EXPR (decl_placeholder,
3946 build_simple_mem_ref (y1));
3947 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3948 SET_DECL_VALUE_EXPR (placeholder,
3949 y3 ? build_simple_mem_ref (y3)
3950 : error_mark_node);
3951 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3952 x = lang_hooks.decls.omp_clause_default_ctor
3953 (c, build_simple_mem_ref (y1),
3954 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3955 if (x)
3956 gimplify_and_add (x, ilist);
3957 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3959 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3960 lower_omp (&tseq, ctx);
3961 gimple_seq_add_seq (ilist, tseq);
3963 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3964 if (is_simd)
3966 SET_DECL_VALUE_EXPR (decl_placeholder,
3967 build_simple_mem_ref (y2));
3968 SET_DECL_VALUE_EXPR (placeholder,
3969 build_simple_mem_ref (y4));
3970 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3971 lower_omp (&tseq, ctx);
3972 gimple_seq_add_seq (dlist, tseq);
3973 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3975 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3976 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3977 x = lang_hooks.decls.omp_clause_dtor
3978 (c, build_simple_mem_ref (y2));
3979 if (x)
3981 gimple_seq tseq = NULL;
3982 dtor = x;
3983 gimplify_stmt (&dtor, &tseq);
3984 gimple_seq_add_seq (dlist, tseq);
3987 else
3989 x = omp_reduction_init (c, TREE_TYPE (type));
3990 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3992 /* reduction(-:var) sums up the partial results, so it
3993 acts identically to reduction(+:var). */
3994 if (code == MINUS_EXPR)
3995 code = PLUS_EXPR;
3997 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
3998 if (is_simd)
4000 x = build2 (code, TREE_TYPE (type),
4001 build_simple_mem_ref (y4),
4002 build_simple_mem_ref (y2));
4003 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4006 gimple *g
4007 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4008 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4009 gimple_seq_add_stmt (ilist, g);
4010 if (y3)
4012 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4013 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4014 gimple_seq_add_stmt (ilist, g);
4016 g = gimple_build_assign (i, PLUS_EXPR, i,
4017 build_int_cst (TREE_TYPE (i), 1));
4018 gimple_seq_add_stmt (ilist, g);
4019 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4020 gimple_seq_add_stmt (ilist, g);
4021 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4022 if (y2)
4024 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4025 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4026 gimple_seq_add_stmt (dlist, g);
4027 if (y4)
4029 g = gimple_build_assign
4030 (y4, POINTER_PLUS_EXPR, y4,
4031 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4032 gimple_seq_add_stmt (dlist, g);
4034 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4035 build_int_cst (TREE_TYPE (i2), 1));
4036 gimple_seq_add_stmt (dlist, g);
4037 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4038 gimple_seq_add_stmt (dlist, g);
4039 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4041 continue;
4043 else if (is_variable_sized (var))
4045 /* For variable sized types, we need to allocate the
4046 actual storage here. Call alloca and store the
4047 result in the pointer decl that we created elsewhere. */
4048 if (pass == 0)
4049 continue;
4051 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4053 gcall *stmt;
4054 tree tmp, atmp;
4056 ptr = DECL_VALUE_EXPR (new_var);
4057 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4058 ptr = TREE_OPERAND (ptr, 0);
4059 gcc_assert (DECL_P (ptr));
4060 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4062 /* void *tmp = __builtin_alloca */
4063 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4064 stmt = gimple_build_call (atmp, 2, x,
4065 size_int (DECL_ALIGN (var)));
4066 tmp = create_tmp_var_raw (ptr_type_node);
4067 gimple_add_tmp_var (tmp);
4068 gimple_call_set_lhs (stmt, tmp);
4070 gimple_seq_add_stmt (ilist, stmt);
4072 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4073 gimplify_assign (ptr, x, ilist);
4076 else if (omp_is_reference (var))
4078 /* For references that are being privatized for Fortran,
4079 allocate new backing storage for the new pointer
4080 variable. This allows us to avoid changing all the
4081 code that expects a pointer to something that expects
4082 a direct variable. */
4083 if (pass == 0)
4084 continue;
4086 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4087 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4089 x = build_receiver_ref (var, false, ctx);
4090 x = build_fold_addr_expr_loc (clause_loc, x);
4092 else if (TREE_CONSTANT (x))
4094 /* For reduction in SIMD loop, defer adding the
4095 initialization of the reference, because if we decide
4096 to use SIMD array for it, the initilization could cause
4097 expansion ICE. */
4098 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4099 x = NULL_TREE;
4100 else
4102 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4103 get_name (var));
4104 gimple_add_tmp_var (x);
4105 TREE_ADDRESSABLE (x) = 1;
4106 x = build_fold_addr_expr_loc (clause_loc, x);
4109 else
4111 tree atmp
4112 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4113 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4114 tree al = size_int (TYPE_ALIGN (rtype));
4115 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4118 if (x)
4120 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4121 gimplify_assign (new_var, x, ilist);
4124 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4126 else if (c_kind == OMP_CLAUSE_REDUCTION
4127 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4129 if (pass == 0)
4130 continue;
4132 else if (pass != 0)
4133 continue;
4135 switch (OMP_CLAUSE_CODE (c))
4137 case OMP_CLAUSE_SHARED:
4138 /* Ignore shared directives in teams construct. */
4139 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4140 continue;
4141 /* Shared global vars are just accessed directly. */
4142 if (is_global_var (new_var))
4143 break;
4144 /* For taskloop firstprivate/lastprivate, represented
4145 as firstprivate and shared clause on the task, new_var
4146 is the firstprivate var. */
4147 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4148 break;
4149 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4150 needs to be delayed until after fixup_child_record_type so
4151 that we get the correct type during the dereference. */
4152 by_ref = use_pointer_for_field (var, ctx);
4153 x = build_receiver_ref (var, by_ref, ctx);
4154 SET_DECL_VALUE_EXPR (new_var, x);
4155 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4157 /* ??? If VAR is not passed by reference, and the variable
4158 hasn't been initialized yet, then we'll get a warning for
4159 the store into the omp_data_s structure. Ideally, we'd be
4160 able to notice this and not store anything at all, but
4161 we're generating code too early. Suppress the warning. */
4162 if (!by_ref)
4163 TREE_NO_WARNING (var) = 1;
4164 break;
4166 case OMP_CLAUSE_LASTPRIVATE:
4167 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4168 break;
4169 /* FALLTHRU */
4171 case OMP_CLAUSE_PRIVATE:
4172 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4173 x = build_outer_var_ref (var, ctx);
4174 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4176 if (is_task_ctx (ctx))
4177 x = build_receiver_ref (var, false, ctx);
4178 else
4179 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4181 else
4182 x = NULL;
4183 do_private:
4184 tree nx;
4185 nx = lang_hooks.decls.omp_clause_default_ctor
4186 (c, unshare_expr (new_var), x);
4187 if (is_simd)
4189 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4190 if ((TREE_ADDRESSABLE (new_var) || nx || y
4191 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4192 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4193 ivar, lvar))
4195 if (nx)
4196 x = lang_hooks.decls.omp_clause_default_ctor
4197 (c, unshare_expr (ivar), x);
4198 if (nx && x)
4199 gimplify_and_add (x, &llist[0]);
4200 if (y)
4202 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4203 if (y)
4205 gimple_seq tseq = NULL;
4207 dtor = y;
4208 gimplify_stmt (&dtor, &tseq);
4209 gimple_seq_add_seq (&llist[1], tseq);
4212 break;
4215 if (nx)
4216 gimplify_and_add (nx, ilist);
4217 /* FALLTHRU */
4219 do_dtor:
4220 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4221 if (x)
4223 gimple_seq tseq = NULL;
4225 dtor = x;
4226 gimplify_stmt (&dtor, &tseq);
4227 gimple_seq_add_seq (dlist, tseq);
4229 break;
4231 case OMP_CLAUSE_LINEAR:
4232 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4233 goto do_firstprivate;
4234 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4235 x = NULL;
4236 else
4237 x = build_outer_var_ref (var, ctx);
4238 goto do_private;
4240 case OMP_CLAUSE_FIRSTPRIVATE:
4241 if (is_task_ctx (ctx))
4243 if (omp_is_reference (var) || is_variable_sized (var))
4244 goto do_dtor;
4245 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4246 ctx))
4247 || use_pointer_for_field (var, NULL))
4249 x = build_receiver_ref (var, false, ctx);
4250 SET_DECL_VALUE_EXPR (new_var, x);
4251 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4252 goto do_dtor;
4255 do_firstprivate:
4256 x = build_outer_var_ref (var, ctx);
4257 if (is_simd)
4259 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4260 && gimple_omp_for_combined_into_p (ctx->stmt))
4262 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4263 tree stept = TREE_TYPE (t);
4264 tree ct = omp_find_clause (clauses,
4265 OMP_CLAUSE__LOOPTEMP_);
4266 gcc_assert (ct);
4267 tree l = OMP_CLAUSE_DECL (ct);
4268 tree n1 = fd->loop.n1;
4269 tree step = fd->loop.step;
4270 tree itype = TREE_TYPE (l);
4271 if (POINTER_TYPE_P (itype))
4272 itype = signed_type_for (itype);
4273 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4274 if (TYPE_UNSIGNED (itype)
4275 && fd->loop.cond_code == GT_EXPR)
4276 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4277 fold_build1 (NEGATE_EXPR, itype, l),
4278 fold_build1 (NEGATE_EXPR,
4279 itype, step));
4280 else
4281 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4282 t = fold_build2 (MULT_EXPR, stept,
4283 fold_convert (stept, l), t);
4285 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4287 x = lang_hooks.decls.omp_clause_linear_ctor
4288 (c, new_var, x, t);
4289 gimplify_and_add (x, ilist);
4290 goto do_dtor;
4293 if (POINTER_TYPE_P (TREE_TYPE (x)))
4294 x = fold_build2 (POINTER_PLUS_EXPR,
4295 TREE_TYPE (x), x, t);
4296 else
4297 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4300 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4301 || TREE_ADDRESSABLE (new_var))
4302 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4303 ivar, lvar))
4305 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4307 tree iv = create_tmp_var (TREE_TYPE (new_var));
4308 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4309 gimplify_and_add (x, ilist);
4310 gimple_stmt_iterator gsi
4311 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4312 gassign *g
4313 = gimple_build_assign (unshare_expr (lvar), iv);
4314 gsi_insert_before_without_update (&gsi, g,
4315 GSI_SAME_STMT);
4316 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4317 enum tree_code code = PLUS_EXPR;
4318 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4319 code = POINTER_PLUS_EXPR;
4320 g = gimple_build_assign (iv, code, iv, t);
4321 gsi_insert_before_without_update (&gsi, g,
4322 GSI_SAME_STMT);
4323 break;
4325 x = lang_hooks.decls.omp_clause_copy_ctor
4326 (c, unshare_expr (ivar), x);
4327 gimplify_and_add (x, &llist[0]);
4328 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4329 if (x)
4331 gimple_seq tseq = NULL;
4333 dtor = x;
4334 gimplify_stmt (&dtor, &tseq);
4335 gimple_seq_add_seq (&llist[1], tseq);
4337 break;
4340 x = lang_hooks.decls.omp_clause_copy_ctor
4341 (c, unshare_expr (new_var), x);
4342 gimplify_and_add (x, ilist);
4343 goto do_dtor;
4345 case OMP_CLAUSE__LOOPTEMP_:
4346 gcc_assert (is_taskreg_ctx (ctx));
4347 x = build_outer_var_ref (var, ctx);
4348 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4349 gimplify_and_add (x, ilist);
4350 break;
4352 case OMP_CLAUSE_COPYIN:
4353 by_ref = use_pointer_for_field (var, NULL);
4354 x = build_receiver_ref (var, by_ref, ctx);
4355 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4356 append_to_statement_list (x, &copyin_seq);
4357 copyin_by_ref |= by_ref;
4358 break;
4360 case OMP_CLAUSE_REDUCTION:
4361 /* OpenACC reductions are initialized using the
4362 GOACC_REDUCTION internal function. */
4363 if (is_gimple_omp_oacc (ctx->stmt))
4364 break;
4365 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4367 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4368 gimple *tseq;
4369 x = build_outer_var_ref (var, ctx);
4371 if (omp_is_reference (var)
4372 && !useless_type_conversion_p (TREE_TYPE (placeholder),
4373 TREE_TYPE (x)))
4374 x = build_fold_addr_expr_loc (clause_loc, x);
4375 SET_DECL_VALUE_EXPR (placeholder, x);
4376 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4377 tree new_vard = new_var;
4378 if (omp_is_reference (var))
4380 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4381 new_vard = TREE_OPERAND (new_var, 0);
4382 gcc_assert (DECL_P (new_vard));
4384 if (is_simd
4385 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4386 ivar, lvar))
4388 if (new_vard == new_var)
4390 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4391 SET_DECL_VALUE_EXPR (new_var, ivar);
4393 else
4395 SET_DECL_VALUE_EXPR (new_vard,
4396 build_fold_addr_expr (ivar));
4397 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4399 x = lang_hooks.decls.omp_clause_default_ctor
4400 (c, unshare_expr (ivar),
4401 build_outer_var_ref (var, ctx));
4402 if (x)
4403 gimplify_and_add (x, &llist[0]);
4404 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4406 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4407 lower_omp (&tseq, ctx);
4408 gimple_seq_add_seq (&llist[0], tseq);
4410 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4411 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4412 lower_omp (&tseq, ctx);
4413 gimple_seq_add_seq (&llist[1], tseq);
4414 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4415 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4416 if (new_vard == new_var)
4417 SET_DECL_VALUE_EXPR (new_var, lvar);
4418 else
4419 SET_DECL_VALUE_EXPR (new_vard,
4420 build_fold_addr_expr (lvar));
4421 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4422 if (x)
4424 tseq = NULL;
4425 dtor = x;
4426 gimplify_stmt (&dtor, &tseq);
4427 gimple_seq_add_seq (&llist[1], tseq);
4429 break;
4431 /* If this is a reference to constant size reduction var
4432 with placeholder, we haven't emitted the initializer
4433 for it because it is undesirable if SIMD arrays are used.
4434 But if they aren't used, we need to emit the deferred
4435 initialization now. */
4436 else if (omp_is_reference (var) && is_simd)
4437 handle_simd_reference (clause_loc, new_vard, ilist);
4438 x = lang_hooks.decls.omp_clause_default_ctor
4439 (c, unshare_expr (new_var),
4440 build_outer_var_ref (var, ctx));
4441 if (x)
4442 gimplify_and_add (x, ilist);
4443 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4445 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4446 lower_omp (&tseq, ctx);
4447 gimple_seq_add_seq (ilist, tseq);
4449 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4450 if (is_simd)
4452 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4453 lower_omp (&tseq, ctx);
4454 gimple_seq_add_seq (dlist, tseq);
4455 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4457 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4458 goto do_dtor;
4460 else
4462 x = omp_reduction_init (c, TREE_TYPE (new_var));
4463 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4464 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4466 /* reduction(-:var) sums up the partial results, so it
4467 acts identically to reduction(+:var). */
4468 if (code == MINUS_EXPR)
4469 code = PLUS_EXPR;
4471 tree new_vard = new_var;
4472 if (is_simd && omp_is_reference (var))
4474 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4475 new_vard = TREE_OPERAND (new_var, 0);
4476 gcc_assert (DECL_P (new_vard));
4478 if (is_simd
4479 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4480 ivar, lvar))
4482 tree ref = build_outer_var_ref (var, ctx);
4484 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4486 if (sctx.is_simt)
4488 if (!simt_lane)
4489 simt_lane = create_tmp_var (unsigned_type_node);
4490 x = build_call_expr_internal_loc
4491 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4492 TREE_TYPE (ivar), 2, ivar, simt_lane);
4493 x = build2 (code, TREE_TYPE (ivar), ivar, x);
4494 gimplify_assign (ivar, x, &llist[2]);
4496 x = build2 (code, TREE_TYPE (ref), ref, ivar);
4497 ref = build_outer_var_ref (var, ctx);
4498 gimplify_assign (ref, x, &llist[1]);
4500 if (new_vard != new_var)
4502 SET_DECL_VALUE_EXPR (new_vard,
4503 build_fold_addr_expr (lvar));
4504 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4507 else
4509 if (omp_is_reference (var) && is_simd)
4510 handle_simd_reference (clause_loc, new_vard, ilist);
4511 gimplify_assign (new_var, x, ilist);
4512 if (is_simd)
4514 tree ref = build_outer_var_ref (var, ctx);
4516 x = build2 (code, TREE_TYPE (ref), ref, new_var);
4517 ref = build_outer_var_ref (var, ctx);
4518 gimplify_assign (ref, x, dlist);
4522 break;
4524 default:
4525 gcc_unreachable ();
4530 if (sctx.max_vf == 1)
4531 sctx.is_simt = false;
4533 if (sctx.lane || sctx.is_simt)
4535 uid = create_tmp_var (ptr_type_node, "simduid");
4536 /* Don't want uninit warnings on simduid, it is always uninitialized,
4537 but we use it not for the value, but for the DECL_UID only. */
4538 TREE_NO_WARNING (uid) = 1;
4539 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4540 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4541 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4542 gimple_omp_for_set_clauses (ctx->stmt, c);
4544 /* Emit calls denoting privatized variables and initializing a pointer to
4545 structure that holds private variables as fields after ompdevlow pass. */
4546 if (sctx.is_simt)
4548 sctx.simt_eargs[0] = uid;
4549 gimple *g
4550 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4551 gimple_call_set_lhs (g, uid);
4552 gimple_seq_add_stmt (ilist, g);
4553 sctx.simt_eargs.release ();
4555 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4556 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4557 gimple_call_set_lhs (g, simtrec);
4558 gimple_seq_add_stmt (ilist, g);
4560 if (sctx.lane)
4562 gimple *g
4563 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4564 gimple_call_set_lhs (g, sctx.lane);
4565 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4566 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4567 g = gimple_build_assign (sctx.lane, INTEGER_CST,
4568 build_int_cst (unsigned_type_node, 0));
4569 gimple_seq_add_stmt (ilist, g);
4570 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4571 if (llist[2])
4573 tree simt_vf = create_tmp_var (unsigned_type_node);
4574 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4575 gimple_call_set_lhs (g, simt_vf);
4576 gimple_seq_add_stmt (dlist, g);
4578 tree t = build_int_cst (unsigned_type_node, 1);
4579 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4580 gimple_seq_add_stmt (dlist, g);
4582 t = build_int_cst (unsigned_type_node, 0);
4583 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4584 gimple_seq_add_stmt (dlist, g);
4586 tree body = create_artificial_label (UNKNOWN_LOCATION);
4587 tree header = create_artificial_label (UNKNOWN_LOCATION);
4588 tree end = create_artificial_label (UNKNOWN_LOCATION);
4589 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4590 gimple_seq_add_stmt (dlist, gimple_build_label (body));
4592 gimple_seq_add_seq (dlist, llist[2]);
4594 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4595 gimple_seq_add_stmt (dlist, g);
4597 gimple_seq_add_stmt (dlist, gimple_build_label (header));
4598 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4599 gimple_seq_add_stmt (dlist, g);
4601 gimple_seq_add_stmt (dlist, gimple_build_label (end));
4603 for (int i = 0; i < 2; i++)
4604 if (llist[i])
4606 tree vf = create_tmp_var (unsigned_type_node);
4607 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4608 gimple_call_set_lhs (g, vf);
4609 gimple_seq *seq = i == 0 ? ilist : dlist;
4610 gimple_seq_add_stmt (seq, g);
4611 tree t = build_int_cst (unsigned_type_node, 0);
4612 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4613 gimple_seq_add_stmt (seq, g);
4614 tree body = create_artificial_label (UNKNOWN_LOCATION);
4615 tree header = create_artificial_label (UNKNOWN_LOCATION);
4616 tree end = create_artificial_label (UNKNOWN_LOCATION);
4617 gimple_seq_add_stmt (seq, gimple_build_goto (header));
4618 gimple_seq_add_stmt (seq, gimple_build_label (body));
4619 gimple_seq_add_seq (seq, llist[i]);
4620 t = build_int_cst (unsigned_type_node, 1);
4621 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
4622 gimple_seq_add_stmt (seq, g);
4623 gimple_seq_add_stmt (seq, gimple_build_label (header));
4624 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
4625 gimple_seq_add_stmt (seq, g);
4626 gimple_seq_add_stmt (seq, gimple_build_label (end));
4629 if (sctx.is_simt)
4631 gimple_seq_add_seq (dlist, sctx.simt_dlist);
4632 gimple *g
4633 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4634 gimple_seq_add_stmt (dlist, g);
4637 /* The copyin sequence is not to be executed by the main thread, since
4638 that would result in self-copies. Perhaps not visible to scalars,
4639 but it certainly is to C++ operator=. */
4640 if (copyin_seq)
4642 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4644 x = build2 (NE_EXPR, boolean_type_node, x,
4645 build_int_cst (TREE_TYPE (x), 0));
4646 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4647 gimplify_and_add (x, ilist);
4650 /* If any copyin variable is passed by reference, we must ensure the
4651 master thread doesn't modify it before it is copied over in all
4652 threads. Similarly for variables in both firstprivate and
4653 lastprivate clauses we need to ensure the lastprivate copying
4654 happens after firstprivate copying in all threads. And similarly
4655 for UDRs if initializer expression refers to omp_orig. */
4656 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4658 /* Don't add any barrier for #pragma omp simd or
4659 #pragma omp distribute. */
4660 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4661 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4662 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4665 /* If max_vf is non-zero, then we can use only a vectorization factor
4666 up to the max_vf we chose. So stick it into the safelen clause. */
4667 if (sctx.max_vf)
4669 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4670 OMP_CLAUSE_SAFELEN);
4671 if (c == NULL_TREE
4672 || (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) == INTEGER_CST
4673 && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
4674 sctx.max_vf) == 1))
4676 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4677 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4678 sctx.max_vf);
4679 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4680 gimple_omp_for_set_clauses (ctx->stmt, c);
4686 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4687 both parallel and workshare constructs. PREDICATE may be NULL if it's
4688 always true. */
4690 static void
4691 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4692 omp_context *ctx)
4694 tree x, c, label = NULL, orig_clauses = clauses;
4695 bool par_clauses = false;
4696 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4698 /* Early exit if there are no lastprivate or linear clauses. */
4699 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4700 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4701 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4702 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4703 break;
4704 if (clauses == NULL)
4706 /* If this was a workshare clause, see if it had been combined
4707 with its parallel. In that case, look for the clauses on the
4708 parallel statement itself. */
4709 if (is_parallel_ctx (ctx))
4710 return;
4712 ctx = ctx->outer;
4713 if (ctx == NULL || !is_parallel_ctx (ctx))
4714 return;
4716 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4717 OMP_CLAUSE_LASTPRIVATE);
4718 if (clauses == NULL)
4719 return;
4720 par_clauses = true;
4723 bool maybe_simt = false;
4724 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4725 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4727 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4728 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4729 if (simduid)
4730 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4733 if (predicate)
4735 gcond *stmt;
4736 tree label_true, arm1, arm2;
4737 enum tree_code pred_code = TREE_CODE (predicate);
4739 label = create_artificial_label (UNKNOWN_LOCATION);
4740 label_true = create_artificial_label (UNKNOWN_LOCATION);
4741 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4743 arm1 = TREE_OPERAND (predicate, 0);
4744 arm2 = TREE_OPERAND (predicate, 1);
4745 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4746 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4748 else
4750 arm1 = predicate;
4751 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4752 arm2 = boolean_false_node;
4753 pred_code = NE_EXPR;
4755 if (maybe_simt)
4757 c = build2 (pred_code, boolean_type_node, arm1, arm2);
4758 c = fold_convert (integer_type_node, c);
4759 simtcond = create_tmp_var (integer_type_node);
4760 gimplify_assign (simtcond, c, stmt_list);
4761 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4762 1, simtcond);
4763 c = create_tmp_var (integer_type_node);
4764 gimple_call_set_lhs (g, c);
4765 gimple_seq_add_stmt (stmt_list, g);
4766 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4767 label_true, label);
4769 else
4770 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4771 gimple_seq_add_stmt (stmt_list, stmt);
4772 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4775 for (c = clauses; c ;)
4777 tree var, new_var;
4778 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4780 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4781 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4782 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4784 var = OMP_CLAUSE_DECL (c);
4785 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4786 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4787 && is_taskloop_ctx (ctx))
4789 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4790 new_var = lookup_decl (var, ctx->outer);
4792 else
4794 new_var = lookup_decl (var, ctx);
4795 /* Avoid uninitialized warnings for lastprivate and
4796 for linear iterators. */
4797 if (predicate
4798 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4799 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4800 TREE_NO_WARNING (new_var) = 1;
4803 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4805 tree val = DECL_VALUE_EXPR (new_var);
4806 if (TREE_CODE (val) == ARRAY_REF
4807 && VAR_P (TREE_OPERAND (val, 0))
4808 && lookup_attribute ("omp simd array",
4809 DECL_ATTRIBUTES (TREE_OPERAND (val,
4810 0))))
4812 if (lastlane == NULL)
4814 lastlane = create_tmp_var (unsigned_type_node);
4815 gcall *g
4816 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4817 2, simduid,
4818 TREE_OPERAND (val, 1));
4819 gimple_call_set_lhs (g, lastlane);
4820 gimple_seq_add_stmt (stmt_list, g);
4822 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4823 TREE_OPERAND (val, 0), lastlane,
4824 NULL_TREE, NULL_TREE);
4827 else if (maybe_simt)
4829 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
4830 ? DECL_VALUE_EXPR (new_var)
4831 : new_var);
4832 if (simtlast == NULL)
4834 simtlast = create_tmp_var (unsigned_type_node);
4835 gcall *g = gimple_build_call_internal
4836 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4837 gimple_call_set_lhs (g, simtlast);
4838 gimple_seq_add_stmt (stmt_list, g);
4840 x = build_call_expr_internal_loc
4841 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4842 TREE_TYPE (val), 2, val, simtlast);
4843 new_var = unshare_expr (new_var);
4844 gimplify_assign (new_var, x, stmt_list);
4845 new_var = unshare_expr (new_var);
4848 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4849 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4851 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4852 gimple_seq_add_seq (stmt_list,
4853 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4854 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4856 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4857 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4859 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4860 gimple_seq_add_seq (stmt_list,
4861 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4862 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4865 x = NULL_TREE;
4866 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4867 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4869 gcc_checking_assert (is_taskloop_ctx (ctx));
4870 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4871 ctx->outer->outer);
4872 if (is_global_var (ovar))
4873 x = ovar;
4875 if (!x)
4876 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4877 if (omp_is_reference (var))
4878 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4879 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4880 gimplify_and_add (x, stmt_list);
4882 c = OMP_CLAUSE_CHAIN (c);
4883 if (c == NULL && !par_clauses)
4885 /* If this was a workshare clause, see if it had been combined
4886 with its parallel. In that case, continue looking for the
4887 clauses also on the parallel statement itself. */
4888 if (is_parallel_ctx (ctx))
4889 break;
4891 ctx = ctx->outer;
4892 if (ctx == NULL || !is_parallel_ctx (ctx))
4893 break;
4895 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4896 OMP_CLAUSE_LASTPRIVATE);
4897 par_clauses = true;
4901 if (label)
4902 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4905 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4906 (which might be a placeholder). INNER is true if this is an inner
4907 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4908 join markers. Generate the before-loop forking sequence in
4909 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4910 general form of these sequences is
4912 GOACC_REDUCTION_SETUP
4913 GOACC_FORK
4914 GOACC_REDUCTION_INIT
4916 GOACC_REDUCTION_FINI
4917 GOACC_JOIN
4918 GOACC_REDUCTION_TEARDOWN. */
4920 static void
4921 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4922 gcall *fork, gcall *join, gimple_seq *fork_seq,
4923 gimple_seq *join_seq, omp_context *ctx)
4925 gimple_seq before_fork = NULL;
4926 gimple_seq after_fork = NULL;
4927 gimple_seq before_join = NULL;
4928 gimple_seq after_join = NULL;
4929 tree init_code = NULL_TREE, fini_code = NULL_TREE,
4930 setup_code = NULL_TREE, teardown_code = NULL_TREE;
4931 unsigned offset = 0;
4933 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4934 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4936 tree orig = OMP_CLAUSE_DECL (c);
4937 tree var = maybe_lookup_decl (orig, ctx);
4938 tree ref_to_res = NULL_TREE;
4939 tree incoming, outgoing, v1, v2, v3;
4940 bool is_private = false;
4942 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4943 if (rcode == MINUS_EXPR)
4944 rcode = PLUS_EXPR;
4945 else if (rcode == TRUTH_ANDIF_EXPR)
4946 rcode = BIT_AND_EXPR;
4947 else if (rcode == TRUTH_ORIF_EXPR)
4948 rcode = BIT_IOR_EXPR;
4949 tree op = build_int_cst (unsigned_type_node, rcode);
4951 if (!var)
4952 var = orig;
4954 incoming = outgoing = var;
4956 if (!inner)
4958 /* See if an outer construct also reduces this variable. */
4959 omp_context *outer = ctx;
4961 while (omp_context *probe = outer->outer)
4963 enum gimple_code type = gimple_code (probe->stmt);
4964 tree cls;
4966 switch (type)
4968 case GIMPLE_OMP_FOR:
4969 cls = gimple_omp_for_clauses (probe->stmt);
4970 break;
4972 case GIMPLE_OMP_TARGET:
4973 if (gimple_omp_target_kind (probe->stmt)
4974 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
4975 goto do_lookup;
4977 cls = gimple_omp_target_clauses (probe->stmt);
4978 break;
4980 default:
4981 goto do_lookup;
4984 outer = probe;
4985 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
4986 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4987 && orig == OMP_CLAUSE_DECL (cls))
4989 incoming = outgoing = lookup_decl (orig, probe);
4990 goto has_outer_reduction;
4992 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4993 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4994 && orig == OMP_CLAUSE_DECL (cls))
4996 is_private = true;
4997 goto do_lookup;
5001 do_lookup:
5002 /* This is the outermost construct with this reduction,
5003 see if there's a mapping for it. */
5004 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
5005 && maybe_lookup_field (orig, outer) && !is_private)
5007 ref_to_res = build_receiver_ref (orig, false, outer);
5008 if (omp_is_reference (orig))
5009 ref_to_res = build_simple_mem_ref (ref_to_res);
5011 tree type = TREE_TYPE (var);
5012 if (POINTER_TYPE_P (type))
5013 type = TREE_TYPE (type);
5015 outgoing = var;
5016 incoming = omp_reduction_init_op (loc, rcode, type);
5018 else
5020 /* Try to look at enclosing contexts for reduction var,
5021 use original if no mapping found. */
5022 tree t = NULL_TREE;
5023 omp_context *c = ctx->outer;
5024 while (c && !t)
5026 t = maybe_lookup_decl (orig, c);
5027 c = c->outer;
5029 incoming = outgoing = (t ? t : orig);
5032 has_outer_reduction:;
5035 if (!ref_to_res)
5036 ref_to_res = integer_zero_node;
5038 if (omp_is_reference (orig))
5040 tree type = TREE_TYPE (var);
5041 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
5043 if (!inner)
5045 tree x = create_tmp_var (TREE_TYPE (type), id);
5046 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
5049 v1 = create_tmp_var (type, id);
5050 v2 = create_tmp_var (type, id);
5051 v3 = create_tmp_var (type, id);
5053 gimplify_assign (v1, var, fork_seq);
5054 gimplify_assign (v2, var, fork_seq);
5055 gimplify_assign (v3, var, fork_seq);
5057 var = build_simple_mem_ref (var);
5058 v1 = build_simple_mem_ref (v1);
5059 v2 = build_simple_mem_ref (v2);
5060 v3 = build_simple_mem_ref (v3);
5061 outgoing = build_simple_mem_ref (outgoing);
5063 if (!TREE_CONSTANT (incoming))
5064 incoming = build_simple_mem_ref (incoming);
5066 else
5067 v1 = v2 = v3 = var;
5069 /* Determine position in reduction buffer, which may be used
5070 by target. The parser has ensured that this is not a
5071 variable-sized type. */
5072 fixed_size_mode mode
5073 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
5074 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
5075 offset = (offset + align - 1) & ~(align - 1);
5076 tree off = build_int_cst (sizetype, offset);
5077 offset += GET_MODE_SIZE (mode);
5079 if (!init_code)
5081 init_code = build_int_cst (integer_type_node,
5082 IFN_GOACC_REDUCTION_INIT);
5083 fini_code = build_int_cst (integer_type_node,
5084 IFN_GOACC_REDUCTION_FINI);
5085 setup_code = build_int_cst (integer_type_node,
5086 IFN_GOACC_REDUCTION_SETUP);
5087 teardown_code = build_int_cst (integer_type_node,
5088 IFN_GOACC_REDUCTION_TEARDOWN);
5091 tree setup_call
5092 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5093 TREE_TYPE (var), 6, setup_code,
5094 unshare_expr (ref_to_res),
5095 incoming, level, op, off);
5096 tree init_call
5097 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5098 TREE_TYPE (var), 6, init_code,
5099 unshare_expr (ref_to_res),
5100 v1, level, op, off);
5101 tree fini_call
5102 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5103 TREE_TYPE (var), 6, fini_code,
5104 unshare_expr (ref_to_res),
5105 v2, level, op, off);
5106 tree teardown_call
5107 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5108 TREE_TYPE (var), 6, teardown_code,
5109 ref_to_res, v3, level, op, off);
5111 gimplify_assign (v1, setup_call, &before_fork);
5112 gimplify_assign (v2, init_call, &after_fork);
5113 gimplify_assign (v3, fini_call, &before_join);
5114 gimplify_assign (outgoing, teardown_call, &after_join);
5117 /* Now stitch things together. */
5118 gimple_seq_add_seq (fork_seq, before_fork);
5119 if (fork)
5120 gimple_seq_add_stmt (fork_seq, fork);
5121 gimple_seq_add_seq (fork_seq, after_fork);
5123 gimple_seq_add_seq (join_seq, before_join);
5124 if (join)
5125 gimple_seq_add_stmt (join_seq, join);
5126 gimple_seq_add_seq (join_seq, after_join);
5129 /* Generate code to implement the REDUCTION clauses. */
5131 static void
5132 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5134 gimple_seq sub_seq = NULL;
5135 gimple *stmt;
5136 tree x, c;
5137 int count = 0;
5139 /* OpenACC loop reductions are handled elsewhere. */
5140 if (is_gimple_omp_oacc (ctx->stmt))
5141 return;
5143 /* SIMD reductions are handled in lower_rec_input_clauses. */
5144 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5145 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5146 return;
5148 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5149 update in that case, otherwise use a lock. */
5150 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5151 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5153 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5154 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5156 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5157 count = -1;
5158 break;
5160 count++;
5163 if (count == 0)
5164 return;
5166 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5168 tree var, ref, new_var, orig_var;
5169 enum tree_code code;
5170 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5172 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5173 continue;
5175 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
5176 orig_var = var = OMP_CLAUSE_DECL (c);
5177 if (TREE_CODE (var) == MEM_REF)
5179 var = TREE_OPERAND (var, 0);
5180 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5181 var = TREE_OPERAND (var, 0);
5182 if (TREE_CODE (var) == ADDR_EXPR)
5183 var = TREE_OPERAND (var, 0);
5184 else
5186 /* If this is a pointer or referenced based array
5187 section, the var could be private in the outer
5188 context e.g. on orphaned loop construct. Pretend this
5189 is private variable's outer reference. */
5190 ccode = OMP_CLAUSE_PRIVATE;
5191 if (TREE_CODE (var) == INDIRECT_REF)
5192 var = TREE_OPERAND (var, 0);
5194 orig_var = var;
5195 if (is_variable_sized (var))
5197 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5198 var = DECL_VALUE_EXPR (var);
5199 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5200 var = TREE_OPERAND (var, 0);
5201 gcc_assert (DECL_P (var));
5204 new_var = lookup_decl (var, ctx);
5205 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5206 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5207 ref = build_outer_var_ref (var, ctx, ccode);
5208 code = OMP_CLAUSE_REDUCTION_CODE (c);
5210 /* reduction(-:var) sums up the partial results, so it acts
5211 identically to reduction(+:var). */
5212 if (code == MINUS_EXPR)
5213 code = PLUS_EXPR;
5215 if (count == 1)
5217 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5219 addr = save_expr (addr);
5220 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5221 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5222 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5223 gimplify_and_add (x, stmt_seqp);
5224 return;
5226 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5228 tree d = OMP_CLAUSE_DECL (c);
5229 tree type = TREE_TYPE (d);
5230 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5231 tree i = create_tmp_var (TREE_TYPE (v), NULL);
5232 tree ptype = build_pointer_type (TREE_TYPE (type));
5233 tree bias = TREE_OPERAND (d, 1);
5234 d = TREE_OPERAND (d, 0);
5235 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5237 tree b = TREE_OPERAND (d, 1);
5238 b = maybe_lookup_decl (b, ctx);
5239 if (b == NULL)
5241 b = TREE_OPERAND (d, 1);
5242 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5244 if (integer_zerop (bias))
5245 bias = b;
5246 else
5248 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5249 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5250 TREE_TYPE (b), b, bias);
5252 d = TREE_OPERAND (d, 0);
5254 /* For ref build_outer_var_ref already performs this, so
5255 only new_var needs a dereference. */
5256 if (TREE_CODE (d) == INDIRECT_REF)
5258 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5259 gcc_assert (omp_is_reference (var) && var == orig_var);
5261 else if (TREE_CODE (d) == ADDR_EXPR)
5263 if (orig_var == var)
5265 new_var = build_fold_addr_expr (new_var);
5266 ref = build_fold_addr_expr (ref);
5269 else
5271 gcc_assert (orig_var == var);
5272 if (omp_is_reference (var))
5273 ref = build_fold_addr_expr (ref);
5275 if (DECL_P (v))
5277 tree t = maybe_lookup_decl (v, ctx);
5278 if (t)
5279 v = t;
5280 else
5281 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5282 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5284 if (!integer_zerop (bias))
5286 bias = fold_convert_loc (clause_loc, sizetype, bias);
5287 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5288 TREE_TYPE (new_var), new_var,
5289 unshare_expr (bias));
5290 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5291 TREE_TYPE (ref), ref, bias);
5293 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5294 ref = fold_convert_loc (clause_loc, ptype, ref);
5295 tree m = create_tmp_var (ptype, NULL);
5296 gimplify_assign (m, new_var, stmt_seqp);
5297 new_var = m;
5298 m = create_tmp_var (ptype, NULL);
5299 gimplify_assign (m, ref, stmt_seqp);
5300 ref = m;
5301 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5302 tree body = create_artificial_label (UNKNOWN_LOCATION);
5303 tree end = create_artificial_label (UNKNOWN_LOCATION);
5304 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5305 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5306 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5307 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5309 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5310 tree decl_placeholder
5311 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5312 SET_DECL_VALUE_EXPR (placeholder, out);
5313 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5314 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5315 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5316 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5317 gimple_seq_add_seq (&sub_seq,
5318 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5319 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5320 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5321 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5323 else
5325 x = build2 (code, TREE_TYPE (out), out, priv);
5326 out = unshare_expr (out);
5327 gimplify_assign (out, x, &sub_seq);
5329 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5330 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5331 gimple_seq_add_stmt (&sub_seq, g);
5332 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5333 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5334 gimple_seq_add_stmt (&sub_seq, g);
5335 g = gimple_build_assign (i, PLUS_EXPR, i,
5336 build_int_cst (TREE_TYPE (i), 1));
5337 gimple_seq_add_stmt (&sub_seq, g);
5338 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5339 gimple_seq_add_stmt (&sub_seq, g);
5340 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5342 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5344 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5346 if (omp_is_reference (var)
5347 && !useless_type_conversion_p (TREE_TYPE (placeholder),
5348 TREE_TYPE (ref)))
5349 ref = build_fold_addr_expr_loc (clause_loc, ref);
5350 SET_DECL_VALUE_EXPR (placeholder, ref);
5351 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5352 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5353 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5354 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5355 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5357 else
5359 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5360 ref = build_outer_var_ref (var, ctx);
5361 gimplify_assign (ref, x, &sub_seq);
5365 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5367 gimple_seq_add_stmt (stmt_seqp, stmt);
5369 gimple_seq_add_seq (stmt_seqp, sub_seq);
5371 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5373 gimple_seq_add_stmt (stmt_seqp, stmt);
5377 /* Generate code to implement the COPYPRIVATE clauses. */
5379 static void
5380 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5381 omp_context *ctx)
5383 tree c;
5385 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5387 tree var, new_var, ref, x;
5388 bool by_ref;
5389 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5391 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5392 continue;
5394 var = OMP_CLAUSE_DECL (c);
5395 by_ref = use_pointer_for_field (var, NULL);
5397 ref = build_sender_ref (var, ctx);
5398 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5399 if (by_ref)
5401 x = build_fold_addr_expr_loc (clause_loc, new_var);
5402 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5404 gimplify_assign (ref, x, slist);
5406 ref = build_receiver_ref (var, false, ctx);
5407 if (by_ref)
5409 ref = fold_convert_loc (clause_loc,
5410 build_pointer_type (TREE_TYPE (new_var)),
5411 ref);
5412 ref = build_fold_indirect_ref_loc (clause_loc, ref);
5414 if (omp_is_reference (var))
5416 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5417 ref = build_simple_mem_ref_loc (clause_loc, ref);
5418 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5420 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5421 gimplify_and_add (x, rlist);
5426 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5427 and REDUCTION from the sender (aka parent) side. */
5429 static void
5430 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5431 omp_context *ctx)
5433 tree c, t;
5434 int ignored_looptemp = 0;
5435 bool is_taskloop = false;
5437 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5438 by GOMP_taskloop. */
5439 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5441 ignored_looptemp = 2;
5442 is_taskloop = true;
5445 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5447 tree val, ref, x, var;
5448 bool by_ref, do_in = false, do_out = false;
5449 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5451 switch (OMP_CLAUSE_CODE (c))
5453 case OMP_CLAUSE_PRIVATE:
5454 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5455 break;
5456 continue;
5457 case OMP_CLAUSE_FIRSTPRIVATE:
5458 case OMP_CLAUSE_COPYIN:
5459 case OMP_CLAUSE_LASTPRIVATE:
5460 case OMP_CLAUSE_REDUCTION:
5461 break;
5462 case OMP_CLAUSE_SHARED:
5463 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5464 break;
5465 continue;
5466 case OMP_CLAUSE__LOOPTEMP_:
5467 if (ignored_looptemp)
5469 ignored_looptemp--;
5470 continue;
5472 break;
5473 default:
5474 continue;
5477 val = OMP_CLAUSE_DECL (c);
5478 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5479 && TREE_CODE (val) == MEM_REF)
5481 val = TREE_OPERAND (val, 0);
5482 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5483 val = TREE_OPERAND (val, 0);
5484 if (TREE_CODE (val) == INDIRECT_REF
5485 || TREE_CODE (val) == ADDR_EXPR)
5486 val = TREE_OPERAND (val, 0);
5487 if (is_variable_sized (val))
5488 continue;
5491 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5492 outer taskloop region. */
5493 omp_context *ctx_for_o = ctx;
5494 if (is_taskloop
5495 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5496 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5497 ctx_for_o = ctx->outer;
5499 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5501 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5502 && is_global_var (var))
5503 continue;
5505 t = omp_member_access_dummy_var (var);
5506 if (t)
5508 var = DECL_VALUE_EXPR (var);
5509 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5510 if (o != t)
5511 var = unshare_and_remap (var, t, o);
5512 else
5513 var = unshare_expr (var);
5516 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5518 /* Handle taskloop firstprivate/lastprivate, where the
5519 lastprivate on GIMPLE_OMP_TASK is represented as
5520 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5521 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5522 x = omp_build_component_ref (ctx->sender_decl, f);
5523 if (use_pointer_for_field (val, ctx))
5524 var = build_fold_addr_expr (var);
5525 gimplify_assign (x, var, ilist);
5526 DECL_ABSTRACT_ORIGIN (f) = NULL;
5527 continue;
5530 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5531 || val == OMP_CLAUSE_DECL (c))
5532 && is_variable_sized (val))
5533 continue;
5534 by_ref = use_pointer_for_field (val, NULL);
5536 switch (OMP_CLAUSE_CODE (c))
5538 case OMP_CLAUSE_FIRSTPRIVATE:
5539 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5540 && !by_ref
5541 && is_task_ctx (ctx))
5542 TREE_NO_WARNING (var) = 1;
5543 do_in = true;
5544 break;
5546 case OMP_CLAUSE_PRIVATE:
5547 case OMP_CLAUSE_COPYIN:
5548 case OMP_CLAUSE__LOOPTEMP_:
5549 do_in = true;
5550 break;
5552 case OMP_CLAUSE_LASTPRIVATE:
5553 if (by_ref || omp_is_reference (val))
5555 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5556 continue;
5557 do_in = true;
5559 else
5561 do_out = true;
5562 if (lang_hooks.decls.omp_private_outer_ref (val))
5563 do_in = true;
5565 break;
5567 case OMP_CLAUSE_REDUCTION:
5568 do_in = true;
5569 if (val == OMP_CLAUSE_DECL (c))
5570 do_out = !(by_ref || omp_is_reference (val));
5571 else
5572 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5573 break;
5575 default:
5576 gcc_unreachable ();
5579 if (do_in)
5581 ref = build_sender_ref (val, ctx);
5582 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5583 gimplify_assign (ref, x, ilist);
5584 if (is_task_ctx (ctx))
5585 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5588 if (do_out)
5590 ref = build_sender_ref (val, ctx);
5591 gimplify_assign (var, ref, olist);
5596 /* Generate code to implement SHARED from the sender (aka parent)
5597 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5598 list things that got automatically shared. */
5600 static void
5601 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5603 tree var, ovar, nvar, t, f, x, record_type;
5605 if (ctx->record_type == NULL)
5606 return;
5608 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5609 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5611 ovar = DECL_ABSTRACT_ORIGIN (f);
5612 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5613 continue;
5615 nvar = maybe_lookup_decl (ovar, ctx);
5616 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5617 continue;
5619 /* If CTX is a nested parallel directive. Find the immediately
5620 enclosing parallel or workshare construct that contains a
5621 mapping for OVAR. */
5622 var = lookup_decl_in_outer_ctx (ovar, ctx);
5624 t = omp_member_access_dummy_var (var);
5625 if (t)
5627 var = DECL_VALUE_EXPR (var);
5628 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5629 if (o != t)
5630 var = unshare_and_remap (var, t, o);
5631 else
5632 var = unshare_expr (var);
5635 if (use_pointer_for_field (ovar, ctx))
5637 x = build_sender_ref (ovar, ctx);
5638 var = build_fold_addr_expr (var);
5639 gimplify_assign (x, var, ilist);
5641 else
5643 x = build_sender_ref (ovar, ctx);
5644 gimplify_assign (x, var, ilist);
5646 if (!TREE_READONLY (var)
5647 /* We don't need to receive a new reference to a result
5648 or parm decl. In fact we may not store to it as we will
5649 invalidate any pending RSO and generate wrong gimple
5650 during inlining. */
5651 && !((TREE_CODE (var) == RESULT_DECL
5652 || TREE_CODE (var) == PARM_DECL)
5653 && DECL_BY_REFERENCE (var)))
5655 x = build_sender_ref (ovar, ctx);
5656 gimplify_assign (var, x, olist);
5662 /* Emit an OpenACC head marker call, encapulating the partitioning and
5663 other information that must be processed by the target compiler.
5664 Return the maximum number of dimensions the associated loop might
5665 be partitioned over. */
5667 static unsigned
5668 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5669 gimple_seq *seq, omp_context *ctx)
5671 unsigned levels = 0;
5672 unsigned tag = 0;
5673 tree gang_static = NULL_TREE;
5674 auto_vec<tree, 5> args;
5676 args.quick_push (build_int_cst
5677 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5678 args.quick_push (ddvar);
5679 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5681 switch (OMP_CLAUSE_CODE (c))
5683 case OMP_CLAUSE_GANG:
5684 tag |= OLF_DIM_GANG;
5685 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5686 /* static:* is represented by -1, and we can ignore it, as
5687 scheduling is always static. */
5688 if (gang_static && integer_minus_onep (gang_static))
5689 gang_static = NULL_TREE;
5690 levels++;
5691 break;
5693 case OMP_CLAUSE_WORKER:
5694 tag |= OLF_DIM_WORKER;
5695 levels++;
5696 break;
5698 case OMP_CLAUSE_VECTOR:
5699 tag |= OLF_DIM_VECTOR;
5700 levels++;
5701 break;
5703 case OMP_CLAUSE_SEQ:
5704 tag |= OLF_SEQ;
5705 break;
5707 case OMP_CLAUSE_AUTO:
5708 tag |= OLF_AUTO;
5709 break;
5711 case OMP_CLAUSE_INDEPENDENT:
5712 tag |= OLF_INDEPENDENT;
5713 break;
5715 case OMP_CLAUSE_TILE:
5716 tag |= OLF_TILE;
5717 break;
5719 default:
5720 continue;
5724 if (gang_static)
5726 if (DECL_P (gang_static))
5727 gang_static = build_outer_var_ref (gang_static, ctx);
5728 tag |= OLF_GANG_STATIC;
5731 /* In a parallel region, loops are implicitly INDEPENDENT. */
5732 omp_context *tgt = enclosing_target_ctx (ctx);
5733 if (!tgt || is_oacc_parallel (tgt))
5734 tag |= OLF_INDEPENDENT;
5736 if (tag & OLF_TILE)
5737 /* Tiling could use all 3 levels. */
5738 levels = 3;
5739 else
5741 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5742 Ensure at least one level, or 2 for possible auto
5743 partitioning */
5744 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5745 << OLF_DIM_BASE) | OLF_SEQ));
5747 if (levels < 1u + maybe_auto)
5748 levels = 1u + maybe_auto;
5751 args.quick_push (build_int_cst (integer_type_node, levels));
5752 args.quick_push (build_int_cst (integer_type_node, tag));
5753 if (gang_static)
5754 args.quick_push (gang_static);
5756 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5757 gimple_set_location (call, loc);
5758 gimple_set_lhs (call, ddvar);
5759 gimple_seq_add_stmt (seq, call);
5761 return levels;
5764 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5765 partitioning level of the enclosed region. */
5767 static void
5768 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5769 tree tofollow, gimple_seq *seq)
5771 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5772 : IFN_UNIQUE_OACC_TAIL_MARK);
5773 tree marker = build_int_cst (integer_type_node, marker_kind);
5774 int nargs = 2 + (tofollow != NULL_TREE);
5775 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5776 marker, ddvar, tofollow);
5777 gimple_set_location (call, loc);
5778 gimple_set_lhs (call, ddvar);
5779 gimple_seq_add_stmt (seq, call);
5782 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5783 the loop clauses, from which we extract reductions. Initialize
5784 HEAD and TAIL. */
5786 static void
5787 lower_oacc_head_tail (location_t loc, tree clauses,
5788 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5790 bool inner = false;
5791 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5792 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5794 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5795 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5796 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5798 gcc_assert (count);
5799 for (unsigned done = 1; count; count--, done++)
5801 gimple_seq fork_seq = NULL;
5802 gimple_seq join_seq = NULL;
5804 tree place = build_int_cst (integer_type_node, -1);
5805 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5806 fork_kind, ddvar, place);
5807 gimple_set_location (fork, loc);
5808 gimple_set_lhs (fork, ddvar);
5810 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5811 join_kind, ddvar, place);
5812 gimple_set_location (join, loc);
5813 gimple_set_lhs (join, ddvar);
5815 /* Mark the beginning of this level sequence. */
5816 if (inner)
5817 lower_oacc_loop_marker (loc, ddvar, true,
5818 build_int_cst (integer_type_node, count),
5819 &fork_seq);
5820 lower_oacc_loop_marker (loc, ddvar, false,
5821 build_int_cst (integer_type_node, done),
5822 &join_seq);
5824 lower_oacc_reductions (loc, clauses, place, inner,
5825 fork, join, &fork_seq, &join_seq, ctx);
5827 /* Append this level to head. */
5828 gimple_seq_add_seq (head, fork_seq);
5829 /* Prepend it to tail. */
5830 gimple_seq_add_seq (&join_seq, *tail);
5831 *tail = join_seq;
5833 inner = true;
5836 /* Mark the end of the sequence. */
5837 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5838 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5841 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5842 catch handler and return it. This prevents programs from violating the
5843 structured block semantics with throws. */
5845 static gimple_seq
5846 maybe_catch_exception (gimple_seq body)
5848 gimple *g;
5849 tree decl;
5851 if (!flag_exceptions)
5852 return body;
5854 if (lang_hooks.eh_protect_cleanup_actions != NULL)
5855 decl = lang_hooks.eh_protect_cleanup_actions ();
5856 else
5857 decl = builtin_decl_explicit (BUILT_IN_TRAP);
5859 g = gimple_build_eh_must_not_throw (decl);
5860 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5861 GIMPLE_TRY_CATCH);
5863 return gimple_seq_alloc_with_stmt (g);
5867 /* Routines to lower OMP directives into OMP-GIMPLE. */
5869 /* If ctx is a worksharing context inside of a cancellable parallel
5870 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5871 and conditional branch to parallel's cancel_label to handle
5872 cancellation in the implicit barrier. */
5874 static void
5875 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5877 gimple *omp_return = gimple_seq_last_stmt (*body);
5878 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5879 if (gimple_omp_return_nowait_p (omp_return))
5880 return;
5881 if (ctx->outer
5882 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5883 && ctx->outer->cancellable)
5885 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5886 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5887 tree lhs = create_tmp_var (c_bool_type);
5888 gimple_omp_return_set_lhs (omp_return, lhs);
5889 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5890 gimple *g = gimple_build_cond (NE_EXPR, lhs,
5891 fold_convert (c_bool_type,
5892 boolean_false_node),
5893 ctx->outer->cancel_label, fallthru_label);
5894 gimple_seq_add_stmt (body, g);
5895 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5899 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5900 CTX is the enclosing OMP context for the current statement. */
5902 static void
5903 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5905 tree block, control;
5906 gimple_stmt_iterator tgsi;
5907 gomp_sections *stmt;
5908 gimple *t;
5909 gbind *new_stmt, *bind;
5910 gimple_seq ilist, dlist, olist, new_body;
5912 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5914 push_gimplify_context ();
5916 dlist = NULL;
5917 ilist = NULL;
5918 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5919 &ilist, &dlist, ctx, NULL);
5921 new_body = gimple_omp_body (stmt);
5922 gimple_omp_set_body (stmt, NULL);
5923 tgsi = gsi_start (new_body);
5924 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5926 omp_context *sctx;
5927 gimple *sec_start;
5929 sec_start = gsi_stmt (tgsi);
5930 sctx = maybe_lookup_ctx (sec_start);
5931 gcc_assert (sctx);
5933 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5934 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5935 GSI_CONTINUE_LINKING);
5936 gimple_omp_set_body (sec_start, NULL);
5938 if (gsi_one_before_end_p (tgsi))
5940 gimple_seq l = NULL;
5941 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5942 &l, ctx);
5943 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5944 gimple_omp_section_set_last (sec_start);
5947 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5948 GSI_CONTINUE_LINKING);
5951 block = make_node (BLOCK);
5952 bind = gimple_build_bind (NULL, new_body, block);
5954 olist = NULL;
5955 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
5957 block = make_node (BLOCK);
5958 new_stmt = gimple_build_bind (NULL, NULL, block);
5959 gsi_replace (gsi_p, new_stmt, true);
5961 pop_gimplify_context (new_stmt);
5962 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5963 BLOCK_VARS (block) = gimple_bind_vars (bind);
5964 if (BLOCK_VARS (block))
5965 TREE_USED (block) = 1;
5967 new_body = NULL;
5968 gimple_seq_add_seq (&new_body, ilist);
5969 gimple_seq_add_stmt (&new_body, stmt);
5970 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5971 gimple_seq_add_stmt (&new_body, bind);
5973 control = create_tmp_var (unsigned_type_node, ".section");
5974 t = gimple_build_omp_continue (control, control);
5975 gimple_omp_sections_set_control (stmt, control);
5976 gimple_seq_add_stmt (&new_body, t);
5978 gimple_seq_add_seq (&new_body, olist);
5979 if (ctx->cancellable)
5980 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5981 gimple_seq_add_seq (&new_body, dlist);
5983 new_body = maybe_catch_exception (new_body);
5985 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5986 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5987 t = gimple_build_omp_return (nowait);
5988 gimple_seq_add_stmt (&new_body, t);
5989 maybe_add_implicit_barrier_cancel (ctx, &new_body);
5991 gimple_bind_set_body (new_stmt, new_body);
5995 /* A subroutine of lower_omp_single. Expand the simple form of
5996 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5998 if (GOMP_single_start ())
5999 BODY;
6000 [ GOMP_barrier (); ] -> unless 'nowait' is present.
6002 FIXME. It may be better to delay expanding the logic of this until
6003 pass_expand_omp. The expanded logic may make the job more difficult
6004 to a synchronization analysis pass. */
6006 static void
6007 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
6009 location_t loc = gimple_location (single_stmt);
6010 tree tlabel = create_artificial_label (loc);
6011 tree flabel = create_artificial_label (loc);
6012 gimple *call, *cond;
6013 tree lhs, decl;
6015 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
6016 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
6017 call = gimple_build_call (decl, 0);
6018 gimple_call_set_lhs (call, lhs);
6019 gimple_seq_add_stmt (pre_p, call);
6021 cond = gimple_build_cond (EQ_EXPR, lhs,
6022 fold_convert_loc (loc, TREE_TYPE (lhs),
6023 boolean_true_node),
6024 tlabel, flabel);
6025 gimple_seq_add_stmt (pre_p, cond);
6026 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
6027 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6028 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
6032 /* A subroutine of lower_omp_single. Expand the simple form of
6033 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
6035 #pragma omp single copyprivate (a, b, c)
6037 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
6040 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6042 BODY;
6043 copyout.a = a;
6044 copyout.b = b;
6045 copyout.c = c;
6046 GOMP_single_copy_end (&copyout);
6048 else
6050 a = copyout_p->a;
6051 b = copyout_p->b;
6052 c = copyout_p->c;
6054 GOMP_barrier ();
6057 FIXME. It may be better to delay expanding the logic of this until
6058 pass_expand_omp. The expanded logic may make the job more difficult
6059 to a synchronization analysis pass. */
6061 static void
6062 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
6063 omp_context *ctx)
6065 tree ptr_type, t, l0, l1, l2, bfn_decl;
6066 gimple_seq copyin_seq;
6067 location_t loc = gimple_location (single_stmt);
6069 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
6071 ptr_type = build_pointer_type (ctx->record_type);
6072 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
6074 l0 = create_artificial_label (loc);
6075 l1 = create_artificial_label (loc);
6076 l2 = create_artificial_label (loc);
6078 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6079 t = build_call_expr_loc (loc, bfn_decl, 0);
6080 t = fold_convert_loc (loc, ptr_type, t);
6081 gimplify_assign (ctx->receiver_decl, t, pre_p);
6083 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6084 build_int_cst (ptr_type, 0));
6085 t = build3 (COND_EXPR, void_type_node, t,
6086 build_and_jump (&l0), build_and_jump (&l1));
6087 gimplify_and_add (t, pre_p);
6089 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6091 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6093 copyin_seq = NULL;
6094 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6095 &copyin_seq, ctx);
6097 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6098 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6099 t = build_call_expr_loc (loc, bfn_decl, 1, t);
6100 gimplify_and_add (t, pre_p);
6102 t = build_and_jump (&l2);
6103 gimplify_and_add (t, pre_p);
6105 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6107 gimple_seq_add_seq (pre_p, copyin_seq);
6109 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6113 /* Expand code for an OpenMP single directive. */
6115 static void
6116 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6118 tree block;
6119 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6120 gbind *bind;
6121 gimple_seq bind_body, bind_body_tail = NULL, dlist;
6123 push_gimplify_context ();
6125 block = make_node (BLOCK);
6126 bind = gimple_build_bind (NULL, NULL, block);
6127 gsi_replace (gsi_p, bind, true);
6128 bind_body = NULL;
6129 dlist = NULL;
6130 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6131 &bind_body, &dlist, ctx, NULL);
6132 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6134 gimple_seq_add_stmt (&bind_body, single_stmt);
6136 if (ctx->record_type)
6137 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6138 else
6139 lower_omp_single_simple (single_stmt, &bind_body);
6141 gimple_omp_set_body (single_stmt, NULL);
6143 gimple_seq_add_seq (&bind_body, dlist);
6145 bind_body = maybe_catch_exception (bind_body);
6147 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6148 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6149 gimple *g = gimple_build_omp_return (nowait);
6150 gimple_seq_add_stmt (&bind_body_tail, g);
6151 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6152 if (ctx->record_type)
6154 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6155 tree clobber = build_constructor (ctx->record_type, NULL);
6156 TREE_THIS_VOLATILE (clobber) = 1;
6157 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6158 clobber), GSI_SAME_STMT);
6160 gimple_seq_add_seq (&bind_body, bind_body_tail);
6161 gimple_bind_set_body (bind, bind_body);
6163 pop_gimplify_context (bind);
6165 gimple_bind_append_vars (bind, ctx->block_vars);
6166 BLOCK_VARS (block) = ctx->block_vars;
6167 if (BLOCK_VARS (block))
6168 TREE_USED (block) = 1;
6172 /* Expand code for an OpenMP master directive. */
6174 static void
6175 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6177 tree block, lab = NULL, x, bfn_decl;
6178 gimple *stmt = gsi_stmt (*gsi_p);
6179 gbind *bind;
6180 location_t loc = gimple_location (stmt);
6181 gimple_seq tseq;
6183 push_gimplify_context ();
6185 block = make_node (BLOCK);
6186 bind = gimple_build_bind (NULL, NULL, block);
6187 gsi_replace (gsi_p, bind, true);
6188 gimple_bind_add_stmt (bind, stmt);
6190 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6191 x = build_call_expr_loc (loc, bfn_decl, 0);
6192 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6193 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6194 tseq = NULL;
6195 gimplify_and_add (x, &tseq);
6196 gimple_bind_add_seq (bind, tseq);
6198 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6199 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6200 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6201 gimple_omp_set_body (stmt, NULL);
6203 gimple_bind_add_stmt (bind, gimple_build_label (lab));
6205 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6207 pop_gimplify_context (bind);
6209 gimple_bind_append_vars (bind, ctx->block_vars);
6210 BLOCK_VARS (block) = ctx->block_vars;
6214 /* Expand code for an OpenMP taskgroup directive. */
6216 static void
6217 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6219 gimple *stmt = gsi_stmt (*gsi_p);
6220 gcall *x;
6221 gbind *bind;
6222 tree block = make_node (BLOCK);
6224 bind = gimple_build_bind (NULL, NULL, block);
6225 gsi_replace (gsi_p, bind, true);
6226 gimple_bind_add_stmt (bind, stmt);
6228 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6230 gimple_bind_add_stmt (bind, x);
6232 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6233 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6234 gimple_omp_set_body (stmt, NULL);
6236 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6238 gimple_bind_append_vars (bind, ctx->block_vars);
6239 BLOCK_VARS (block) = ctx->block_vars;
6243 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6245 static void
6246 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6247 omp_context *ctx)
6249 struct omp_for_data fd;
6250 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6251 return;
6253 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6254 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6255 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6256 if (!fd.ordered)
6257 return;
6259 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6260 tree c = gimple_omp_ordered_clauses (ord_stmt);
6261 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6262 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6264 /* Merge depend clauses from multiple adjacent
6265 #pragma omp ordered depend(sink:...) constructs
6266 into one #pragma omp ordered depend(sink:...), so that
6267 we can optimize them together. */
6268 gimple_stmt_iterator gsi = *gsi_p;
6269 gsi_next (&gsi);
6270 while (!gsi_end_p (gsi))
6272 gimple *stmt = gsi_stmt (gsi);
6273 if (is_gimple_debug (stmt)
6274 || gimple_code (stmt) == GIMPLE_NOP)
6276 gsi_next (&gsi);
6277 continue;
6279 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6280 break;
6281 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6282 c = gimple_omp_ordered_clauses (ord_stmt2);
6283 if (c == NULL_TREE
6284 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6285 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6286 break;
6287 while (*list_p)
6288 list_p = &OMP_CLAUSE_CHAIN (*list_p);
6289 *list_p = c;
6290 gsi_remove (&gsi, true);
6294 /* Canonicalize sink dependence clauses into one folded clause if
6295 possible.
6297 The basic algorithm is to create a sink vector whose first
6298 element is the GCD of all the first elements, and whose remaining
6299 elements are the minimum of the subsequent columns.
6301 We ignore dependence vectors whose first element is zero because
6302 such dependencies are known to be executed by the same thread.
6304 We take into account the direction of the loop, so a minimum
6305 becomes a maximum if the loop is iterating forwards. We also
6306 ignore sink clauses where the loop direction is unknown, or where
6307 the offsets are clearly invalid because they are not a multiple
6308 of the loop increment.
6310 For example:
6312 #pragma omp for ordered(2)
6313 for (i=0; i < N; ++i)
6314 for (j=0; j < M; ++j)
6316 #pragma omp ordered \
6317 depend(sink:i-8,j-2) \
6318 depend(sink:i,j-1) \ // Completely ignored because i+0.
6319 depend(sink:i-4,j-3) \
6320 depend(sink:i-6,j-4)
6321 #pragma omp ordered depend(source)
6324 Folded clause is:
6326 depend(sink:-gcd(8,4,6),-min(2,3,4))
6327 -or-
6328 depend(sink:-2,-2)
6331 /* FIXME: Computing GCD's where the first element is zero is
6332 non-trivial in the presence of collapsed loops. Do this later. */
6333 if (fd.collapse > 1)
6334 return;
6336 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6338 /* wide_int is not a POD so it must be default-constructed. */
6339 for (unsigned i = 0; i != 2 * len - 1; ++i)
6340 new (static_cast<void*>(folded_deps + i)) wide_int ();
6342 tree folded_dep = NULL_TREE;
6343 /* TRUE if the first dimension's offset is negative. */
6344 bool neg_offset_p = false;
6346 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6347 unsigned int i;
6348 while ((c = *list_p) != NULL)
6350 bool remove = false;
6352 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6353 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6354 goto next_ordered_clause;
6356 tree vec;
6357 for (vec = OMP_CLAUSE_DECL (c), i = 0;
6358 vec && TREE_CODE (vec) == TREE_LIST;
6359 vec = TREE_CHAIN (vec), ++i)
6361 gcc_assert (i < len);
6363 /* omp_extract_for_data has canonicalized the condition. */
6364 gcc_assert (fd.loops[i].cond_code == LT_EXPR
6365 || fd.loops[i].cond_code == GT_EXPR);
6366 bool forward = fd.loops[i].cond_code == LT_EXPR;
6367 bool maybe_lexically_later = true;
6369 /* While the committee makes up its mind, bail if we have any
6370 non-constant steps. */
6371 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6372 goto lower_omp_ordered_ret;
6374 tree itype = TREE_TYPE (TREE_VALUE (vec));
6375 if (POINTER_TYPE_P (itype))
6376 itype = sizetype;
6377 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
6378 TYPE_PRECISION (itype),
6379 TYPE_SIGN (itype));
6381 /* Ignore invalid offsets that are not multiples of the step. */
6382 if (!wi::multiple_of_p (wi::abs (offset),
6383 wi::abs (wi::to_wide (fd.loops[i].step)),
6384 UNSIGNED))
6386 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6387 "ignoring sink clause with offset that is not "
6388 "a multiple of the loop step");
6389 remove = true;
6390 goto next_ordered_clause;
6393 /* Calculate the first dimension. The first dimension of
6394 the folded dependency vector is the GCD of the first
6395 elements, while ignoring any first elements whose offset
6396 is 0. */
6397 if (i == 0)
6399 /* Ignore dependence vectors whose first dimension is 0. */
6400 if (offset == 0)
6402 remove = true;
6403 goto next_ordered_clause;
6405 else
6407 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6409 error_at (OMP_CLAUSE_LOCATION (c),
6410 "first offset must be in opposite direction "
6411 "of loop iterations");
6412 goto lower_omp_ordered_ret;
6414 if (forward)
6415 offset = -offset;
6416 neg_offset_p = forward;
6417 /* Initialize the first time around. */
6418 if (folded_dep == NULL_TREE)
6420 folded_dep = c;
6421 folded_deps[0] = offset;
6423 else
6424 folded_deps[0] = wi::gcd (folded_deps[0],
6425 offset, UNSIGNED);
6428 /* Calculate minimum for the remaining dimensions. */
6429 else
6431 folded_deps[len + i - 1] = offset;
6432 if (folded_dep == c)
6433 folded_deps[i] = offset;
6434 else if (maybe_lexically_later
6435 && !wi::eq_p (folded_deps[i], offset))
6437 if (forward ^ wi::gts_p (folded_deps[i], offset))
6439 unsigned int j;
6440 folded_dep = c;
6441 for (j = 1; j <= i; j++)
6442 folded_deps[j] = folded_deps[len + j - 1];
6444 else
6445 maybe_lexically_later = false;
6449 gcc_assert (i == len);
6451 remove = true;
6453 next_ordered_clause:
6454 if (remove)
6455 *list_p = OMP_CLAUSE_CHAIN (c);
6456 else
6457 list_p = &OMP_CLAUSE_CHAIN (c);
6460 if (folded_dep)
6462 if (neg_offset_p)
6463 folded_deps[0] = -folded_deps[0];
6465 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6466 if (POINTER_TYPE_P (itype))
6467 itype = sizetype;
6469 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6470 = wide_int_to_tree (itype, folded_deps[0]);
6471 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6472 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6475 lower_omp_ordered_ret:
6477 /* Ordered without clauses is #pragma omp threads, while we want
6478 a nop instead if we remove all clauses. */
6479 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6480 gsi_replace (gsi_p, gimple_build_nop (), true);
6484 /* Expand code for an OpenMP ordered directive. */
6486 static void
6487 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6489 tree block;
6490 gimple *stmt = gsi_stmt (*gsi_p), *g;
6491 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6492 gcall *x;
6493 gbind *bind;
6494 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6495 OMP_CLAUSE_SIMD);
6496 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6497 loop. */
6498 bool maybe_simt
6499 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6500 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6501 OMP_CLAUSE_THREADS);
6503 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6504 OMP_CLAUSE_DEPEND))
6506 /* FIXME: This is needs to be moved to the expansion to verify various
6507 conditions only testable on cfg with dominators computed, and also
6508 all the depend clauses to be merged still might need to be available
6509 for the runtime checks. */
6510 if (0)
6511 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6512 return;
6515 push_gimplify_context ();
6517 block = make_node (BLOCK);
6518 bind = gimple_build_bind (NULL, NULL, block);
6519 gsi_replace (gsi_p, bind, true);
6520 gimple_bind_add_stmt (bind, stmt);
6522 if (simd)
6524 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6525 build_int_cst (NULL_TREE, threads));
6526 cfun->has_simduid_loops = true;
6528 else
6529 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6531 gimple_bind_add_stmt (bind, x);
6533 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6534 if (maybe_simt)
6536 counter = create_tmp_var (integer_type_node);
6537 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6538 gimple_call_set_lhs (g, counter);
6539 gimple_bind_add_stmt (bind, g);
6541 body = create_artificial_label (UNKNOWN_LOCATION);
6542 test = create_artificial_label (UNKNOWN_LOCATION);
6543 gimple_bind_add_stmt (bind, gimple_build_label (body));
6545 tree simt_pred = create_tmp_var (integer_type_node);
6546 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6547 gimple_call_set_lhs (g, simt_pred);
6548 gimple_bind_add_stmt (bind, g);
6550 tree t = create_artificial_label (UNKNOWN_LOCATION);
6551 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6552 gimple_bind_add_stmt (bind, g);
6554 gimple_bind_add_stmt (bind, gimple_build_label (t));
6556 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6557 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6558 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6559 gimple_omp_set_body (stmt, NULL);
6561 if (maybe_simt)
6563 gimple_bind_add_stmt (bind, gimple_build_label (test));
6564 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6565 gimple_bind_add_stmt (bind, g);
6567 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6568 tree nonneg = create_tmp_var (integer_type_node);
6569 gimple_seq tseq = NULL;
6570 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6571 gimple_bind_add_seq (bind, tseq);
6573 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6574 gimple_call_set_lhs (g, nonneg);
6575 gimple_bind_add_stmt (bind, g);
6577 tree end = create_artificial_label (UNKNOWN_LOCATION);
6578 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6579 gimple_bind_add_stmt (bind, g);
6581 gimple_bind_add_stmt (bind, gimple_build_label (end));
6583 if (simd)
6584 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6585 build_int_cst (NULL_TREE, threads));
6586 else
6587 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6589 gimple_bind_add_stmt (bind, x);
6591 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6593 pop_gimplify_context (bind);
6595 gimple_bind_append_vars (bind, ctx->block_vars);
6596 BLOCK_VARS (block) = gimple_bind_vars (bind);
6600 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6601 substitution of a couple of function calls. But in the NAMED case,
6602 requires that languages coordinate a symbol name. It is therefore
6603 best put here in common code. */
6605 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6607 static void
6608 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6610 tree block;
6611 tree name, lock, unlock;
6612 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6613 gbind *bind;
6614 location_t loc = gimple_location (stmt);
6615 gimple_seq tbody;
6617 name = gimple_omp_critical_name (stmt);
6618 if (name)
6620 tree decl;
6622 if (!critical_name_mutexes)
6623 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6625 tree *n = critical_name_mutexes->get (name);
6626 if (n == NULL)
6628 char *new_str;
6630 decl = create_tmp_var_raw (ptr_type_node);
6632 new_str = ACONCAT ((".gomp_critical_user_",
6633 IDENTIFIER_POINTER (name), NULL));
6634 DECL_NAME (decl) = get_identifier (new_str);
6635 TREE_PUBLIC (decl) = 1;
6636 TREE_STATIC (decl) = 1;
6637 DECL_COMMON (decl) = 1;
6638 DECL_ARTIFICIAL (decl) = 1;
6639 DECL_IGNORED_P (decl) = 1;
6641 varpool_node::finalize_decl (decl);
6643 critical_name_mutexes->put (name, decl);
6645 else
6646 decl = *n;
6648 /* If '#pragma omp critical' is inside offloaded region or
6649 inside function marked as offloadable, the symbol must be
6650 marked as offloadable too. */
6651 omp_context *octx;
6652 if (cgraph_node::get (current_function_decl)->offloadable)
6653 varpool_node::get_create (decl)->offloadable = 1;
6654 else
6655 for (octx = ctx->outer; octx; octx = octx->outer)
6656 if (is_gimple_omp_offloaded (octx->stmt))
6658 varpool_node::get_create (decl)->offloadable = 1;
6659 break;
6662 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6663 lock = build_call_expr_loc (loc, lock, 1,
6664 build_fold_addr_expr_loc (loc, decl));
6666 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6667 unlock = build_call_expr_loc (loc, unlock, 1,
6668 build_fold_addr_expr_loc (loc, decl));
6670 else
6672 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6673 lock = build_call_expr_loc (loc, lock, 0);
6675 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6676 unlock = build_call_expr_loc (loc, unlock, 0);
6679 push_gimplify_context ();
6681 block = make_node (BLOCK);
6682 bind = gimple_build_bind (NULL, NULL, block);
6683 gsi_replace (gsi_p, bind, true);
6684 gimple_bind_add_stmt (bind, stmt);
6686 tbody = gimple_bind_body (bind);
6687 gimplify_and_add (lock, &tbody);
6688 gimple_bind_set_body (bind, tbody);
6690 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6691 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6692 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6693 gimple_omp_set_body (stmt, NULL);
6695 tbody = gimple_bind_body (bind);
6696 gimplify_and_add (unlock, &tbody);
6697 gimple_bind_set_body (bind, tbody);
6699 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6701 pop_gimplify_context (bind);
6702 gimple_bind_append_vars (bind, ctx->block_vars);
6703 BLOCK_VARS (block) = gimple_bind_vars (bind);
6706 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6707 for a lastprivate clause. Given a loop control predicate of (V
6708 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6709 is appended to *DLIST, iterator initialization is appended to
6710 *BODY_P. */
6712 static void
6713 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6714 gimple_seq *dlist, struct omp_context *ctx)
6716 tree clauses, cond, vinit;
6717 enum tree_code cond_code;
6718 gimple_seq stmts;
6720 cond_code = fd->loop.cond_code;
6721 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6723 /* When possible, use a strict equality expression. This can let VRP
6724 type optimizations deduce the value and remove a copy. */
6725 if (tree_fits_shwi_p (fd->loop.step))
6727 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6728 if (step == 1 || step == -1)
6729 cond_code = EQ_EXPR;
6732 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6733 || gimple_omp_for_grid_phony (fd->for_stmt))
6734 cond = omp_grid_lastprivate_predicate (fd);
6735 else
6737 tree n2 = fd->loop.n2;
6738 if (fd->collapse > 1
6739 && TREE_CODE (n2) != INTEGER_CST
6740 && gimple_omp_for_combined_into_p (fd->for_stmt))
6742 struct omp_context *taskreg_ctx = NULL;
6743 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6745 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6746 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6747 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6749 if (gimple_omp_for_combined_into_p (gfor))
6751 gcc_assert (ctx->outer->outer
6752 && is_parallel_ctx (ctx->outer->outer));
6753 taskreg_ctx = ctx->outer->outer;
6755 else
6757 struct omp_for_data outer_fd;
6758 omp_extract_for_data (gfor, &outer_fd, NULL);
6759 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6762 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6763 taskreg_ctx = ctx->outer->outer;
6765 else if (is_taskreg_ctx (ctx->outer))
6766 taskreg_ctx = ctx->outer;
6767 if (taskreg_ctx)
6769 int i;
6770 tree taskreg_clauses
6771 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6772 tree innerc = omp_find_clause (taskreg_clauses,
6773 OMP_CLAUSE__LOOPTEMP_);
6774 gcc_assert (innerc);
6775 for (i = 0; i < fd->collapse; i++)
6777 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6778 OMP_CLAUSE__LOOPTEMP_);
6779 gcc_assert (innerc);
6781 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6782 OMP_CLAUSE__LOOPTEMP_);
6783 if (innerc)
6784 n2 = fold_convert (TREE_TYPE (n2),
6785 lookup_decl (OMP_CLAUSE_DECL (innerc),
6786 taskreg_ctx));
6789 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6792 clauses = gimple_omp_for_clauses (fd->for_stmt);
6793 stmts = NULL;
6794 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6795 if (!gimple_seq_empty_p (stmts))
6797 gimple_seq_add_seq (&stmts, *dlist);
6798 *dlist = stmts;
6800 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6801 vinit = fd->loop.n1;
6802 if (cond_code == EQ_EXPR
6803 && tree_fits_shwi_p (fd->loop.n2)
6804 && ! integer_zerop (fd->loop.n2))
6805 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6806 else
6807 vinit = unshare_expr (vinit);
6809 /* Initialize the iterator variable, so that threads that don't execute
6810 any iterations don't execute the lastprivate clauses by accident. */
6811 gimplify_assign (fd->loop.v, vinit, body_p);
6816 /* Lower code for an OMP loop directive. */
6818 static void
6819 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6821 tree *rhs_p, block;
6822 struct omp_for_data fd, *fdp = NULL;
6823 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6824 gbind *new_stmt;
6825 gimple_seq omp_for_body, body, dlist;
6826 gimple_seq oacc_head = NULL, oacc_tail = NULL;
6827 size_t i;
6829 push_gimplify_context ();
6831 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6833 block = make_node (BLOCK);
6834 new_stmt = gimple_build_bind (NULL, NULL, block);
6835 /* Replace at gsi right away, so that 'stmt' is no member
6836 of a sequence anymore as we're going to add to a different
6837 one below. */
6838 gsi_replace (gsi_p, new_stmt, true);
6840 /* Move declaration of temporaries in the loop body before we make
6841 it go away. */
6842 omp_for_body = gimple_omp_body (stmt);
6843 if (!gimple_seq_empty_p (omp_for_body)
6844 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6846 gbind *inner_bind
6847 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6848 tree vars = gimple_bind_vars (inner_bind);
6849 gimple_bind_append_vars (new_stmt, vars);
6850 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6851 keep them on the inner_bind and it's block. */
6852 gimple_bind_set_vars (inner_bind, NULL_TREE);
6853 if (gimple_bind_block (inner_bind))
6854 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6857 if (gimple_omp_for_combined_into_p (stmt))
6859 omp_extract_for_data (stmt, &fd, NULL);
6860 fdp = &fd;
6862 /* We need two temporaries with fd.loop.v type (istart/iend)
6863 and then (fd.collapse - 1) temporaries with the same
6864 type for count2 ... countN-1 vars if not constant. */
6865 size_t count = 2;
6866 tree type = fd.iter_type;
6867 if (fd.collapse > 1
6868 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6869 count += fd.collapse - 1;
6870 bool taskreg_for
6871 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6872 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6873 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6874 tree simtc = NULL;
6875 tree clauses = *pc;
6876 if (taskreg_for)
6877 outerc
6878 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6879 OMP_CLAUSE__LOOPTEMP_);
6880 if (ctx->simt_stmt)
6881 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6882 OMP_CLAUSE__LOOPTEMP_);
6883 for (i = 0; i < count; i++)
6885 tree temp;
6886 if (taskreg_for)
6888 gcc_assert (outerc);
6889 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6890 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6891 OMP_CLAUSE__LOOPTEMP_);
6893 else
6895 /* If there are 2 adjacent SIMD stmts, one with _simt_
6896 clause, another without, make sure they have the same
6897 decls in _looptemp_ clauses, because the outer stmt
6898 they are combined into will look up just one inner_stmt. */
6899 if (ctx->simt_stmt)
6900 temp = OMP_CLAUSE_DECL (simtc);
6901 else
6902 temp = create_tmp_var (type);
6903 insert_decl_map (&ctx->outer->cb, temp, temp);
6905 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6906 OMP_CLAUSE_DECL (*pc) = temp;
6907 pc = &OMP_CLAUSE_CHAIN (*pc);
6908 if (ctx->simt_stmt)
6909 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6910 OMP_CLAUSE__LOOPTEMP_);
6912 *pc = clauses;
6915 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6916 dlist = NULL;
6917 body = NULL;
6918 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6919 fdp);
6920 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6922 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6924 /* Lower the header expressions. At this point, we can assume that
6925 the header is of the form:
6927 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6929 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6930 using the .omp_data_s mapping, if needed. */
6931 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6933 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6934 if (!is_gimple_min_invariant (*rhs_p))
6935 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6936 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6937 recompute_tree_invariant_for_addr_expr (*rhs_p);
6939 rhs_p = gimple_omp_for_final_ptr (stmt, i);
6940 if (!is_gimple_min_invariant (*rhs_p))
6941 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6942 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6943 recompute_tree_invariant_for_addr_expr (*rhs_p);
6945 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6946 if (!is_gimple_min_invariant (*rhs_p))
6947 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6950 /* Once lowered, extract the bounds and clauses. */
6951 omp_extract_for_data (stmt, &fd, NULL);
6953 if (is_gimple_omp_oacc (ctx->stmt)
6954 && !ctx_in_oacc_kernels_region (ctx))
6955 lower_oacc_head_tail (gimple_location (stmt),
6956 gimple_omp_for_clauses (stmt),
6957 &oacc_head, &oacc_tail, ctx);
6959 /* Add OpenACC partitioning and reduction markers just before the loop. */
6960 if (oacc_head)
6961 gimple_seq_add_seq (&body, oacc_head);
6963 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
6965 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6966 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
6967 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6968 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6970 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6971 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6972 OMP_CLAUSE_LINEAR_STEP (c)
6973 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6974 ctx);
6977 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6978 && gimple_omp_for_grid_phony (stmt));
6979 if (!phony_loop)
6980 gimple_seq_add_stmt (&body, stmt);
6981 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6983 if (!phony_loop)
6984 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6985 fd.loop.v));
6987 /* After the loop, add exit clauses. */
6988 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
6990 if (ctx->cancellable)
6991 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
6993 gimple_seq_add_seq (&body, dlist);
6995 body = maybe_catch_exception (body);
6997 if (!phony_loop)
6999 /* Region exit marker goes at the end of the loop body. */
7000 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
7001 maybe_add_implicit_barrier_cancel (ctx, &body);
7004 /* Add OpenACC joining and reduction markers just after the loop. */
7005 if (oacc_tail)
7006 gimple_seq_add_seq (&body, oacc_tail);
7008 pop_gimplify_context (new_stmt);
7010 gimple_bind_append_vars (new_stmt, ctx->block_vars);
7011 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
7012 if (BLOCK_VARS (block))
7013 TREE_USED (block) = 1;
7015 gimple_bind_set_body (new_stmt, body);
7016 gimple_omp_set_body (stmt, NULL);
7017 gimple_omp_for_set_pre_body (stmt, NULL);
7020 /* Callback for walk_stmts. Check if the current statement only contains
7021 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
7023 static tree
7024 check_combined_parallel (gimple_stmt_iterator *gsi_p,
7025 bool *handled_ops_p,
7026 struct walk_stmt_info *wi)
7028 int *info = (int *) wi->info;
7029 gimple *stmt = gsi_stmt (*gsi_p);
7031 *handled_ops_p = true;
7032 switch (gimple_code (stmt))
7034 WALK_SUBSTMTS;
7036 case GIMPLE_OMP_FOR:
7037 case GIMPLE_OMP_SECTIONS:
7038 *info = *info == 0 ? 1 : -1;
7039 break;
7040 default:
7041 *info = -1;
7042 break;
7044 return NULL;
7047 struct omp_taskcopy_context
7049 /* This field must be at the beginning, as we do "inheritance": Some
7050 callback functions for tree-inline.c (e.g., omp_copy_decl)
7051 receive a copy_body_data pointer that is up-casted to an
7052 omp_context pointer. */
7053 copy_body_data cb;
7054 omp_context *ctx;
7057 static tree
7058 task_copyfn_copy_decl (tree var, copy_body_data *cb)
7060 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
7062 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
7063 return create_tmp_var (TREE_TYPE (var));
7065 return var;
7068 static tree
7069 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
7071 tree name, new_fields = NULL, type, f;
7073 type = lang_hooks.types.make_type (RECORD_TYPE);
7074 name = DECL_NAME (TYPE_NAME (orig_type));
7075 name = build_decl (gimple_location (tcctx->ctx->stmt),
7076 TYPE_DECL, name, type);
7077 TYPE_NAME (type) = name;
7079 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
7081 tree new_f = copy_node (f);
7082 DECL_CONTEXT (new_f) = type;
7083 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7084 TREE_CHAIN (new_f) = new_fields;
7085 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7086 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7087 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7088 &tcctx->cb, NULL);
7089 new_fields = new_f;
7090 tcctx->cb.decl_map->put (f, new_f);
7092 TYPE_FIELDS (type) = nreverse (new_fields);
7093 layout_type (type);
7094 return type;
7097 /* Create task copyfn. */
7099 static void
7100 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7102 struct function *child_cfun;
7103 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7104 tree record_type, srecord_type, bind, list;
7105 bool record_needs_remap = false, srecord_needs_remap = false;
7106 splay_tree_node n;
7107 struct omp_taskcopy_context tcctx;
7108 location_t loc = gimple_location (task_stmt);
7110 child_fn = gimple_omp_task_copy_fn (task_stmt);
7111 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7112 gcc_assert (child_cfun->cfg == NULL);
7113 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
7115 /* Reset DECL_CONTEXT on function arguments. */
7116 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7117 DECL_CONTEXT (t) = child_fn;
7119 /* Populate the function. */
7120 push_gimplify_context ();
7121 push_cfun (child_cfun);
7123 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7124 TREE_SIDE_EFFECTS (bind) = 1;
7125 list = NULL;
7126 DECL_SAVED_TREE (child_fn) = bind;
7127 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
7129 /* Remap src and dst argument types if needed. */
7130 record_type = ctx->record_type;
7131 srecord_type = ctx->srecord_type;
7132 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7133 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7135 record_needs_remap = true;
7136 break;
7138 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7139 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7141 srecord_needs_remap = true;
7142 break;
7145 if (record_needs_remap || srecord_needs_remap)
7147 memset (&tcctx, '\0', sizeof (tcctx));
7148 tcctx.cb.src_fn = ctx->cb.src_fn;
7149 tcctx.cb.dst_fn = child_fn;
7150 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7151 gcc_checking_assert (tcctx.cb.src_node);
7152 tcctx.cb.dst_node = tcctx.cb.src_node;
7153 tcctx.cb.src_cfun = ctx->cb.src_cfun;
7154 tcctx.cb.copy_decl = task_copyfn_copy_decl;
7155 tcctx.cb.eh_lp_nr = 0;
7156 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7157 tcctx.cb.decl_map = new hash_map<tree, tree>;
7158 tcctx.ctx = ctx;
7160 if (record_needs_remap)
7161 record_type = task_copyfn_remap_type (&tcctx, record_type);
7162 if (srecord_needs_remap)
7163 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7165 else
7166 tcctx.cb.decl_map = NULL;
7168 arg = DECL_ARGUMENTS (child_fn);
7169 TREE_TYPE (arg) = build_pointer_type (record_type);
7170 sarg = DECL_CHAIN (arg);
7171 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7173 /* First pass: initialize temporaries used in record_type and srecord_type
7174 sizes and field offsets. */
7175 if (tcctx.cb.decl_map)
7176 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7177 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7179 tree *p;
7181 decl = OMP_CLAUSE_DECL (c);
7182 p = tcctx.cb.decl_map->get (decl);
7183 if (p == NULL)
7184 continue;
7185 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7186 sf = (tree) n->value;
7187 sf = *tcctx.cb.decl_map->get (sf);
7188 src = build_simple_mem_ref_loc (loc, sarg);
7189 src = omp_build_component_ref (src, sf);
7190 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7191 append_to_statement_list (t, &list);
7194 /* Second pass: copy shared var pointers and copy construct non-VLA
7195 firstprivate vars. */
7196 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7197 switch (OMP_CLAUSE_CODE (c))
7199 splay_tree_key key;
7200 case OMP_CLAUSE_SHARED:
7201 decl = OMP_CLAUSE_DECL (c);
7202 key = (splay_tree_key) decl;
7203 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7204 key = (splay_tree_key) &DECL_UID (decl);
7205 n = splay_tree_lookup (ctx->field_map, key);
7206 if (n == NULL)
7207 break;
7208 f = (tree) n->value;
7209 if (tcctx.cb.decl_map)
7210 f = *tcctx.cb.decl_map->get (f);
7211 n = splay_tree_lookup (ctx->sfield_map, key);
7212 sf = (tree) n->value;
7213 if (tcctx.cb.decl_map)
7214 sf = *tcctx.cb.decl_map->get (sf);
7215 src = build_simple_mem_ref_loc (loc, sarg);
7216 src = omp_build_component_ref (src, sf);
7217 dst = build_simple_mem_ref_loc (loc, arg);
7218 dst = omp_build_component_ref (dst, f);
7219 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7220 append_to_statement_list (t, &list);
7221 break;
7222 case OMP_CLAUSE_FIRSTPRIVATE:
7223 decl = OMP_CLAUSE_DECL (c);
7224 if (is_variable_sized (decl))
7225 break;
7226 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7227 if (n == NULL)
7228 break;
7229 f = (tree) n->value;
7230 if (tcctx.cb.decl_map)
7231 f = *tcctx.cb.decl_map->get (f);
7232 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7233 if (n != NULL)
7235 sf = (tree) n->value;
7236 if (tcctx.cb.decl_map)
7237 sf = *tcctx.cb.decl_map->get (sf);
7238 src = build_simple_mem_ref_loc (loc, sarg);
7239 src = omp_build_component_ref (src, sf);
7240 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7241 src = build_simple_mem_ref_loc (loc, src);
7243 else
7244 src = decl;
7245 dst = build_simple_mem_ref_loc (loc, arg);
7246 dst = omp_build_component_ref (dst, f);
7247 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7248 append_to_statement_list (t, &list);
7249 break;
7250 case OMP_CLAUSE_PRIVATE:
7251 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7252 break;
7253 decl = OMP_CLAUSE_DECL (c);
7254 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7255 f = (tree) n->value;
7256 if (tcctx.cb.decl_map)
7257 f = *tcctx.cb.decl_map->get (f);
7258 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7259 if (n != NULL)
7261 sf = (tree) n->value;
7262 if (tcctx.cb.decl_map)
7263 sf = *tcctx.cb.decl_map->get (sf);
7264 src = build_simple_mem_ref_loc (loc, sarg);
7265 src = omp_build_component_ref (src, sf);
7266 if (use_pointer_for_field (decl, NULL))
7267 src = build_simple_mem_ref_loc (loc, src);
7269 else
7270 src = decl;
7271 dst = build_simple_mem_ref_loc (loc, arg);
7272 dst = omp_build_component_ref (dst, f);
7273 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7274 append_to_statement_list (t, &list);
7275 break;
7276 default:
7277 break;
7280 /* Last pass: handle VLA firstprivates. */
7281 if (tcctx.cb.decl_map)
7282 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7283 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7285 tree ind, ptr, df;
7287 decl = OMP_CLAUSE_DECL (c);
7288 if (!is_variable_sized (decl))
7289 continue;
7290 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7291 if (n == NULL)
7292 continue;
7293 f = (tree) n->value;
7294 f = *tcctx.cb.decl_map->get (f);
7295 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7296 ind = DECL_VALUE_EXPR (decl);
7297 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7298 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7299 n = splay_tree_lookup (ctx->sfield_map,
7300 (splay_tree_key) TREE_OPERAND (ind, 0));
7301 sf = (tree) n->value;
7302 sf = *tcctx.cb.decl_map->get (sf);
7303 src = build_simple_mem_ref_loc (loc, sarg);
7304 src = omp_build_component_ref (src, sf);
7305 src = build_simple_mem_ref_loc (loc, src);
7306 dst = build_simple_mem_ref_loc (loc, arg);
7307 dst = omp_build_component_ref (dst, f);
7308 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7309 append_to_statement_list (t, &list);
7310 n = splay_tree_lookup (ctx->field_map,
7311 (splay_tree_key) TREE_OPERAND (ind, 0));
7312 df = (tree) n->value;
7313 df = *tcctx.cb.decl_map->get (df);
7314 ptr = build_simple_mem_ref_loc (loc, arg);
7315 ptr = omp_build_component_ref (ptr, df);
7316 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7317 build_fold_addr_expr_loc (loc, dst));
7318 append_to_statement_list (t, &list);
7321 t = build1 (RETURN_EXPR, void_type_node, NULL);
7322 append_to_statement_list (t, &list);
7324 if (tcctx.cb.decl_map)
7325 delete tcctx.cb.decl_map;
7326 pop_gimplify_context (NULL);
7327 BIND_EXPR_BODY (bind) = list;
7328 pop_cfun ();
7331 static void
7332 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7334 tree c, clauses;
7335 gimple *g;
7336 size_t n_in = 0, n_out = 0, idx = 2, i;
7338 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7339 gcc_assert (clauses);
7340 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7341 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7342 switch (OMP_CLAUSE_DEPEND_KIND (c))
7344 case OMP_CLAUSE_DEPEND_IN:
7345 n_in++;
7346 break;
7347 case OMP_CLAUSE_DEPEND_OUT:
7348 case OMP_CLAUSE_DEPEND_INOUT:
7349 n_out++;
7350 break;
7351 case OMP_CLAUSE_DEPEND_SOURCE:
7352 case OMP_CLAUSE_DEPEND_SINK:
7353 /* FALLTHRU */
7354 default:
7355 gcc_unreachable ();
7357 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7358 tree array = create_tmp_var (type);
7359 TREE_ADDRESSABLE (array) = 1;
7360 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7361 NULL_TREE);
7362 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7363 gimple_seq_add_stmt (iseq, g);
7364 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7365 NULL_TREE);
7366 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7367 gimple_seq_add_stmt (iseq, g);
7368 for (i = 0; i < 2; i++)
7370 if ((i ? n_in : n_out) == 0)
7371 continue;
7372 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7373 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7374 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7376 tree t = OMP_CLAUSE_DECL (c);
7377 t = fold_convert (ptr_type_node, t);
7378 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7379 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7380 NULL_TREE, NULL_TREE);
7381 g = gimple_build_assign (r, t);
7382 gimple_seq_add_stmt (iseq, g);
7385 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7386 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7387 OMP_CLAUSE_CHAIN (c) = *pclauses;
7388 *pclauses = c;
7389 tree clobber = build_constructor (type, NULL);
7390 TREE_THIS_VOLATILE (clobber) = 1;
7391 g = gimple_build_assign (array, clobber);
7392 gimple_seq_add_stmt (oseq, g);
7395 /* Lower the OpenMP parallel or task directive in the current statement
7396 in GSI_P. CTX holds context information for the directive. */
7398 static void
7399 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7401 tree clauses;
7402 tree child_fn, t;
7403 gimple *stmt = gsi_stmt (*gsi_p);
7404 gbind *par_bind, *bind, *dep_bind = NULL;
7405 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7406 location_t loc = gimple_location (stmt);
7408 clauses = gimple_omp_taskreg_clauses (stmt);
7409 par_bind
7410 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7411 par_body = gimple_bind_body (par_bind);
7412 child_fn = ctx->cb.dst_fn;
7413 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7414 && !gimple_omp_parallel_combined_p (stmt))
7416 struct walk_stmt_info wi;
7417 int ws_num = 0;
7419 memset (&wi, 0, sizeof (wi));
7420 wi.info = &ws_num;
7421 wi.val_only = true;
7422 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7423 if (ws_num == 1)
7424 gimple_omp_parallel_set_combined_p (stmt, true);
7426 gimple_seq dep_ilist = NULL;
7427 gimple_seq dep_olist = NULL;
7428 if (gimple_code (stmt) == GIMPLE_OMP_TASK
7429 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7431 push_gimplify_context ();
7432 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7433 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7434 &dep_ilist, &dep_olist);
7437 if (ctx->srecord_type)
7438 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7440 push_gimplify_context ();
7442 par_olist = NULL;
7443 par_ilist = NULL;
7444 par_rlist = NULL;
7445 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7446 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7447 if (phony_construct && ctx->record_type)
7449 gcc_checking_assert (!ctx->receiver_decl);
7450 ctx->receiver_decl = create_tmp_var
7451 (build_reference_type (ctx->record_type), ".omp_rec");
7453 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7454 lower_omp (&par_body, ctx);
7455 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7456 lower_reduction_clauses (clauses, &par_rlist, ctx);
7458 /* Declare all the variables created by mapping and the variables
7459 declared in the scope of the parallel body. */
7460 record_vars_into (ctx->block_vars, child_fn);
7461 record_vars_into (gimple_bind_vars (par_bind), child_fn);
7463 if (ctx->record_type)
7465 ctx->sender_decl
7466 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7467 : ctx->record_type, ".omp_data_o");
7468 DECL_NAMELESS (ctx->sender_decl) = 1;
7469 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7470 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7473 olist = NULL;
7474 ilist = NULL;
7475 lower_send_clauses (clauses, &ilist, &olist, ctx);
7476 lower_send_shared_vars (&ilist, &olist, ctx);
7478 if (ctx->record_type)
7480 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7481 TREE_THIS_VOLATILE (clobber) = 1;
7482 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7483 clobber));
7486 /* Once all the expansions are done, sequence all the different
7487 fragments inside gimple_omp_body. */
7489 new_body = NULL;
7491 if (ctx->record_type)
7493 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7494 /* fixup_child_record_type might have changed receiver_decl's type. */
7495 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7496 gimple_seq_add_stmt (&new_body,
7497 gimple_build_assign (ctx->receiver_decl, t));
7500 gimple_seq_add_seq (&new_body, par_ilist);
7501 gimple_seq_add_seq (&new_body, par_body);
7502 gimple_seq_add_seq (&new_body, par_rlist);
7503 if (ctx->cancellable)
7504 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7505 gimple_seq_add_seq (&new_body, par_olist);
7506 new_body = maybe_catch_exception (new_body);
7507 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7508 gimple_seq_add_stmt (&new_body,
7509 gimple_build_omp_continue (integer_zero_node,
7510 integer_zero_node));
7511 if (!phony_construct)
7513 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7514 gimple_omp_set_body (stmt, new_body);
7517 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7518 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7519 gimple_bind_add_seq (bind, ilist);
7520 if (!phony_construct)
7521 gimple_bind_add_stmt (bind, stmt);
7522 else
7523 gimple_bind_add_seq (bind, new_body);
7524 gimple_bind_add_seq (bind, olist);
7526 pop_gimplify_context (NULL);
7528 if (dep_bind)
7530 gimple_bind_add_seq (dep_bind, dep_ilist);
7531 gimple_bind_add_stmt (dep_bind, bind);
7532 gimple_bind_add_seq (dep_bind, dep_olist);
7533 pop_gimplify_context (dep_bind);
7537 /* Lower the GIMPLE_OMP_TARGET in the current statement
7538 in GSI_P. CTX holds context information for the directive. */
7540 static void
7541 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7543 tree clauses;
7544 tree child_fn, t, c;
7545 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7546 gbind *tgt_bind, *bind, *dep_bind = NULL;
7547 gimple_seq tgt_body, olist, ilist, fplist, new_body;
7548 location_t loc = gimple_location (stmt);
7549 bool offloaded, data_region;
7550 unsigned int map_cnt = 0;
7552 offloaded = is_gimple_omp_offloaded (stmt);
7553 switch (gimple_omp_target_kind (stmt))
7555 case GF_OMP_TARGET_KIND_REGION:
7556 case GF_OMP_TARGET_KIND_UPDATE:
7557 case GF_OMP_TARGET_KIND_ENTER_DATA:
7558 case GF_OMP_TARGET_KIND_EXIT_DATA:
7559 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7560 case GF_OMP_TARGET_KIND_OACC_KERNELS:
7561 case GF_OMP_TARGET_KIND_OACC_UPDATE:
7562 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7563 case GF_OMP_TARGET_KIND_OACC_DECLARE:
7564 data_region = false;
7565 break;
7566 case GF_OMP_TARGET_KIND_DATA:
7567 case GF_OMP_TARGET_KIND_OACC_DATA:
7568 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7569 data_region = true;
7570 break;
7571 default:
7572 gcc_unreachable ();
7575 clauses = gimple_omp_target_clauses (stmt);
7577 gimple_seq dep_ilist = NULL;
7578 gimple_seq dep_olist = NULL;
7579 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7581 push_gimplify_context ();
7582 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7583 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7584 &dep_ilist, &dep_olist);
7587 tgt_bind = NULL;
7588 tgt_body = NULL;
7589 if (offloaded)
7591 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7592 tgt_body = gimple_bind_body (tgt_bind);
7594 else if (data_region)
7595 tgt_body = gimple_omp_body (stmt);
7596 child_fn = ctx->cb.dst_fn;
7598 push_gimplify_context ();
7599 fplist = NULL;
7601 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7602 switch (OMP_CLAUSE_CODE (c))
7604 tree var, x;
7606 default:
7607 break;
7608 case OMP_CLAUSE_MAP:
7609 #if CHECKING_P
7610 /* First check what we're prepared to handle in the following. */
7611 switch (OMP_CLAUSE_MAP_KIND (c))
7613 case GOMP_MAP_ALLOC:
7614 case GOMP_MAP_TO:
7615 case GOMP_MAP_FROM:
7616 case GOMP_MAP_TOFROM:
7617 case GOMP_MAP_POINTER:
7618 case GOMP_MAP_TO_PSET:
7619 case GOMP_MAP_DELETE:
7620 case GOMP_MAP_RELEASE:
7621 case GOMP_MAP_ALWAYS_TO:
7622 case GOMP_MAP_ALWAYS_FROM:
7623 case GOMP_MAP_ALWAYS_TOFROM:
7624 case GOMP_MAP_FIRSTPRIVATE_POINTER:
7625 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7626 case GOMP_MAP_STRUCT:
7627 case GOMP_MAP_ALWAYS_POINTER:
7628 break;
7629 case GOMP_MAP_FORCE_ALLOC:
7630 case GOMP_MAP_FORCE_TO:
7631 case GOMP_MAP_FORCE_FROM:
7632 case GOMP_MAP_FORCE_TOFROM:
7633 case GOMP_MAP_FORCE_PRESENT:
7634 case GOMP_MAP_FORCE_DEVICEPTR:
7635 case GOMP_MAP_DEVICE_RESIDENT:
7636 case GOMP_MAP_LINK:
7637 gcc_assert (is_gimple_omp_oacc (stmt));
7638 break;
7639 default:
7640 gcc_unreachable ();
7642 #endif
7643 /* FALLTHRU */
7644 case OMP_CLAUSE_TO:
7645 case OMP_CLAUSE_FROM:
7646 oacc_firstprivate:
7647 var = OMP_CLAUSE_DECL (c);
7648 if (!DECL_P (var))
7650 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7651 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7652 && (OMP_CLAUSE_MAP_KIND (c)
7653 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
7654 map_cnt++;
7655 continue;
7658 if (DECL_SIZE (var)
7659 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7661 tree var2 = DECL_VALUE_EXPR (var);
7662 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7663 var2 = TREE_OPERAND (var2, 0);
7664 gcc_assert (DECL_P (var2));
7665 var = var2;
7668 if (offloaded
7669 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7670 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7671 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7673 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7675 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7676 && varpool_node::get_create (var)->offloadable)
7677 continue;
7679 tree type = build_pointer_type (TREE_TYPE (var));
7680 tree new_var = lookup_decl (var, ctx);
7681 x = create_tmp_var_raw (type, get_name (new_var));
7682 gimple_add_tmp_var (x);
7683 x = build_simple_mem_ref (x);
7684 SET_DECL_VALUE_EXPR (new_var, x);
7685 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7687 continue;
7690 if (!maybe_lookup_field (var, ctx))
7691 continue;
7693 /* Don't remap oacc parallel reduction variables, because the
7694 intermediate result must be local to each gang. */
7695 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7696 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7698 x = build_receiver_ref (var, true, ctx);
7699 tree new_var = lookup_decl (var, ctx);
7701 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7702 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7703 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7704 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7705 x = build_simple_mem_ref (x);
7706 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7708 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7709 if (omp_is_reference (new_var))
7711 /* Create a local object to hold the instance
7712 value. */
7713 tree type = TREE_TYPE (TREE_TYPE (new_var));
7714 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7715 tree inst = create_tmp_var (type, id);
7716 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7717 x = build_fold_addr_expr (inst);
7719 gimplify_assign (new_var, x, &fplist);
7721 else if (DECL_P (new_var))
7723 SET_DECL_VALUE_EXPR (new_var, x);
7724 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7726 else
7727 gcc_unreachable ();
7729 map_cnt++;
7730 break;
7732 case OMP_CLAUSE_FIRSTPRIVATE:
7733 if (is_oacc_parallel (ctx))
7734 goto oacc_firstprivate;
7735 map_cnt++;
7736 var = OMP_CLAUSE_DECL (c);
7737 if (!omp_is_reference (var)
7738 && !is_gimple_reg_type (TREE_TYPE (var)))
7740 tree new_var = lookup_decl (var, ctx);
7741 if (is_variable_sized (var))
7743 tree pvar = DECL_VALUE_EXPR (var);
7744 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7745 pvar = TREE_OPERAND (pvar, 0);
7746 gcc_assert (DECL_P (pvar));
7747 tree new_pvar = lookup_decl (pvar, ctx);
7748 x = build_fold_indirect_ref (new_pvar);
7749 TREE_THIS_NOTRAP (x) = 1;
7751 else
7752 x = build_receiver_ref (var, true, ctx);
7753 SET_DECL_VALUE_EXPR (new_var, x);
7754 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7756 break;
7758 case OMP_CLAUSE_PRIVATE:
7759 if (is_gimple_omp_oacc (ctx->stmt))
7760 break;
7761 var = OMP_CLAUSE_DECL (c);
7762 if (is_variable_sized (var))
7764 tree new_var = lookup_decl (var, ctx);
7765 tree pvar = DECL_VALUE_EXPR (var);
7766 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7767 pvar = TREE_OPERAND (pvar, 0);
7768 gcc_assert (DECL_P (pvar));
7769 tree new_pvar = lookup_decl (pvar, ctx);
7770 x = build_fold_indirect_ref (new_pvar);
7771 TREE_THIS_NOTRAP (x) = 1;
7772 SET_DECL_VALUE_EXPR (new_var, x);
7773 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7775 break;
7777 case OMP_CLAUSE_USE_DEVICE_PTR:
7778 case OMP_CLAUSE_IS_DEVICE_PTR:
7779 var = OMP_CLAUSE_DECL (c);
7780 map_cnt++;
7781 if (is_variable_sized (var))
7783 tree new_var = lookup_decl (var, ctx);
7784 tree pvar = DECL_VALUE_EXPR (var);
7785 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7786 pvar = TREE_OPERAND (pvar, 0);
7787 gcc_assert (DECL_P (pvar));
7788 tree new_pvar = lookup_decl (pvar, ctx);
7789 x = build_fold_indirect_ref (new_pvar);
7790 TREE_THIS_NOTRAP (x) = 1;
7791 SET_DECL_VALUE_EXPR (new_var, x);
7792 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7794 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7796 tree new_var = lookup_decl (var, ctx);
7797 tree type = build_pointer_type (TREE_TYPE (var));
7798 x = create_tmp_var_raw (type, get_name (new_var));
7799 gimple_add_tmp_var (x);
7800 x = build_simple_mem_ref (x);
7801 SET_DECL_VALUE_EXPR (new_var, x);
7802 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7804 else
7806 tree new_var = lookup_decl (var, ctx);
7807 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7808 gimple_add_tmp_var (x);
7809 SET_DECL_VALUE_EXPR (new_var, x);
7810 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7812 break;
7815 if (offloaded)
7817 target_nesting_level++;
7818 lower_omp (&tgt_body, ctx);
7819 target_nesting_level--;
7821 else if (data_region)
7822 lower_omp (&tgt_body, ctx);
7824 if (offloaded)
7826 /* Declare all the variables created by mapping and the variables
7827 declared in the scope of the target body. */
7828 record_vars_into (ctx->block_vars, child_fn);
7829 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7832 olist = NULL;
7833 ilist = NULL;
7834 if (ctx->record_type)
7836 ctx->sender_decl
7837 = create_tmp_var (ctx->record_type, ".omp_data_arr");
7838 DECL_NAMELESS (ctx->sender_decl) = 1;
7839 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7840 t = make_tree_vec (3);
7841 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7842 TREE_VEC_ELT (t, 1)
7843 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7844 ".omp_data_sizes");
7845 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7846 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7847 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7848 tree tkind_type = short_unsigned_type_node;
7849 int talign_shift = 8;
7850 TREE_VEC_ELT (t, 2)
7851 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7852 ".omp_data_kinds");
7853 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7854 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7855 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7856 gimple_omp_target_set_data_arg (stmt, t);
7858 vec<constructor_elt, va_gc> *vsize;
7859 vec<constructor_elt, va_gc> *vkind;
7860 vec_alloc (vsize, map_cnt);
7861 vec_alloc (vkind, map_cnt);
7862 unsigned int map_idx = 0;
7864 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7865 switch (OMP_CLAUSE_CODE (c))
7867 tree ovar, nc, s, purpose, var, x, type;
7868 unsigned int talign;
7870 default:
7871 break;
7873 case OMP_CLAUSE_MAP:
7874 case OMP_CLAUSE_TO:
7875 case OMP_CLAUSE_FROM:
7876 oacc_firstprivate_map:
7877 nc = c;
7878 ovar = OMP_CLAUSE_DECL (c);
7879 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7880 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7881 || (OMP_CLAUSE_MAP_KIND (c)
7882 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7883 break;
7884 if (!DECL_P (ovar))
7886 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7887 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7889 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7890 == get_base_address (ovar));
7891 nc = OMP_CLAUSE_CHAIN (c);
7892 ovar = OMP_CLAUSE_DECL (nc);
7894 else
7896 tree x = build_sender_ref (ovar, ctx);
7897 tree v
7898 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7899 gimplify_assign (x, v, &ilist);
7900 nc = NULL_TREE;
7903 else
7905 if (DECL_SIZE (ovar)
7906 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7908 tree ovar2 = DECL_VALUE_EXPR (ovar);
7909 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7910 ovar2 = TREE_OPERAND (ovar2, 0);
7911 gcc_assert (DECL_P (ovar2));
7912 ovar = ovar2;
7914 if (!maybe_lookup_field (ovar, ctx))
7915 continue;
7918 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7919 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7920 talign = DECL_ALIGN_UNIT (ovar);
7921 if (nc)
7923 var = lookup_decl_in_outer_ctx (ovar, ctx);
7924 x = build_sender_ref (ovar, ctx);
7926 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7927 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7928 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7929 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7931 gcc_assert (offloaded);
7932 tree avar
7933 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7934 mark_addressable (avar);
7935 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7936 talign = DECL_ALIGN_UNIT (avar);
7937 avar = build_fold_addr_expr (avar);
7938 gimplify_assign (x, avar, &ilist);
7940 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7942 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7943 if (!omp_is_reference (var))
7945 if (is_gimple_reg (var)
7946 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7947 TREE_NO_WARNING (var) = 1;
7948 var = build_fold_addr_expr (var);
7950 else
7951 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7952 gimplify_assign (x, var, &ilist);
7954 else if (is_gimple_reg (var))
7956 gcc_assert (offloaded);
7957 tree avar = create_tmp_var (TREE_TYPE (var));
7958 mark_addressable (avar);
7959 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7960 if (GOMP_MAP_COPY_TO_P (map_kind)
7961 || map_kind == GOMP_MAP_POINTER
7962 || map_kind == GOMP_MAP_TO_PSET
7963 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7965 /* If we need to initialize a temporary
7966 with VAR because it is not addressable, and
7967 the variable hasn't been initialized yet, then
7968 we'll get a warning for the store to avar.
7969 Don't warn in that case, the mapping might
7970 be implicit. */
7971 TREE_NO_WARNING (var) = 1;
7972 gimplify_assign (avar, var, &ilist);
7974 avar = build_fold_addr_expr (avar);
7975 gimplify_assign (x, avar, &ilist);
7976 if ((GOMP_MAP_COPY_FROM_P (map_kind)
7977 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7978 && !TYPE_READONLY (TREE_TYPE (var)))
7980 x = unshare_expr (x);
7981 x = build_simple_mem_ref (x);
7982 gimplify_assign (var, x, &olist);
7985 else
7987 var = build_fold_addr_expr (var);
7988 gimplify_assign (x, var, &ilist);
7991 s = NULL_TREE;
7992 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7994 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7995 s = TREE_TYPE (ovar);
7996 if (TREE_CODE (s) == REFERENCE_TYPE)
7997 s = TREE_TYPE (s);
7998 s = TYPE_SIZE_UNIT (s);
8000 else
8001 s = OMP_CLAUSE_SIZE (c);
8002 if (s == NULL_TREE)
8003 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8004 s = fold_convert (size_type_node, s);
8005 purpose = size_int (map_idx++);
8006 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8007 if (TREE_CODE (s) != INTEGER_CST)
8008 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8010 unsigned HOST_WIDE_INT tkind, tkind_zero;
8011 switch (OMP_CLAUSE_CODE (c))
8013 case OMP_CLAUSE_MAP:
8014 tkind = OMP_CLAUSE_MAP_KIND (c);
8015 tkind_zero = tkind;
8016 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
8017 switch (tkind)
8019 case GOMP_MAP_ALLOC:
8020 case GOMP_MAP_TO:
8021 case GOMP_MAP_FROM:
8022 case GOMP_MAP_TOFROM:
8023 case GOMP_MAP_ALWAYS_TO:
8024 case GOMP_MAP_ALWAYS_FROM:
8025 case GOMP_MAP_ALWAYS_TOFROM:
8026 case GOMP_MAP_RELEASE:
8027 case GOMP_MAP_FORCE_TO:
8028 case GOMP_MAP_FORCE_FROM:
8029 case GOMP_MAP_FORCE_TOFROM:
8030 case GOMP_MAP_FORCE_PRESENT:
8031 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
8032 break;
8033 case GOMP_MAP_DELETE:
8034 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
8035 default:
8036 break;
8038 if (tkind_zero != tkind)
8040 if (integer_zerop (s))
8041 tkind = tkind_zero;
8042 else if (integer_nonzerop (s))
8043 tkind_zero = tkind;
8045 break;
8046 case OMP_CLAUSE_FIRSTPRIVATE:
8047 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
8048 tkind = GOMP_MAP_TO;
8049 tkind_zero = tkind;
8050 break;
8051 case OMP_CLAUSE_TO:
8052 tkind = GOMP_MAP_TO;
8053 tkind_zero = tkind;
8054 break;
8055 case OMP_CLAUSE_FROM:
8056 tkind = GOMP_MAP_FROM;
8057 tkind_zero = tkind;
8058 break;
8059 default:
8060 gcc_unreachable ();
8062 gcc_checking_assert (tkind
8063 < (HOST_WIDE_INT_C (1U) << talign_shift));
8064 gcc_checking_assert (tkind_zero
8065 < (HOST_WIDE_INT_C (1U) << talign_shift));
8066 talign = ceil_log2 (talign);
8067 tkind |= talign << talign_shift;
8068 tkind_zero |= talign << talign_shift;
8069 gcc_checking_assert (tkind
8070 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8071 gcc_checking_assert (tkind_zero
8072 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8073 if (tkind == tkind_zero)
8074 x = build_int_cstu (tkind_type, tkind);
8075 else
8077 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8078 x = build3 (COND_EXPR, tkind_type,
8079 fold_build2 (EQ_EXPR, boolean_type_node,
8080 unshare_expr (s), size_zero_node),
8081 build_int_cstu (tkind_type, tkind_zero),
8082 build_int_cstu (tkind_type, tkind));
8084 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8085 if (nc && nc != c)
8086 c = nc;
8087 break;
8089 case OMP_CLAUSE_FIRSTPRIVATE:
8090 if (is_oacc_parallel (ctx))
8091 goto oacc_firstprivate_map;
8092 ovar = OMP_CLAUSE_DECL (c);
8093 if (omp_is_reference (ovar))
8094 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8095 else
8096 talign = DECL_ALIGN_UNIT (ovar);
8097 var = lookup_decl_in_outer_ctx (ovar, ctx);
8098 x = build_sender_ref (ovar, ctx);
8099 tkind = GOMP_MAP_FIRSTPRIVATE;
8100 type = TREE_TYPE (ovar);
8101 if (omp_is_reference (ovar))
8102 type = TREE_TYPE (type);
8103 if ((INTEGRAL_TYPE_P (type)
8104 && TYPE_PRECISION (type) <= POINTER_SIZE)
8105 || TREE_CODE (type) == POINTER_TYPE)
8107 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8108 tree t = var;
8109 if (omp_is_reference (var))
8110 t = build_simple_mem_ref (var);
8111 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8112 TREE_NO_WARNING (var) = 1;
8113 if (TREE_CODE (type) != POINTER_TYPE)
8114 t = fold_convert (pointer_sized_int_node, t);
8115 t = fold_convert (TREE_TYPE (x), t);
8116 gimplify_assign (x, t, &ilist);
8118 else if (omp_is_reference (var))
8119 gimplify_assign (x, var, &ilist);
8120 else if (is_gimple_reg (var))
8122 tree avar = create_tmp_var (TREE_TYPE (var));
8123 mark_addressable (avar);
8124 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8125 TREE_NO_WARNING (var) = 1;
8126 gimplify_assign (avar, var, &ilist);
8127 avar = build_fold_addr_expr (avar);
8128 gimplify_assign (x, avar, &ilist);
8130 else
8132 var = build_fold_addr_expr (var);
8133 gimplify_assign (x, var, &ilist);
8135 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8136 s = size_int (0);
8137 else if (omp_is_reference (ovar))
8138 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8139 else
8140 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8141 s = fold_convert (size_type_node, s);
8142 purpose = size_int (map_idx++);
8143 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8144 if (TREE_CODE (s) != INTEGER_CST)
8145 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8147 gcc_checking_assert (tkind
8148 < (HOST_WIDE_INT_C (1U) << talign_shift));
8149 talign = ceil_log2 (talign);
8150 tkind |= talign << talign_shift;
8151 gcc_checking_assert (tkind
8152 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8153 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8154 build_int_cstu (tkind_type, tkind));
8155 break;
8157 case OMP_CLAUSE_USE_DEVICE_PTR:
8158 case OMP_CLAUSE_IS_DEVICE_PTR:
8159 ovar = OMP_CLAUSE_DECL (c);
8160 var = lookup_decl_in_outer_ctx (ovar, ctx);
8161 x = build_sender_ref (ovar, ctx);
8162 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8163 tkind = GOMP_MAP_USE_DEVICE_PTR;
8164 else
8165 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8166 type = TREE_TYPE (ovar);
8167 if (TREE_CODE (type) == ARRAY_TYPE)
8168 var = build_fold_addr_expr (var);
8169 else
8171 if (omp_is_reference (ovar))
8173 type = TREE_TYPE (type);
8174 if (TREE_CODE (type) != ARRAY_TYPE)
8175 var = build_simple_mem_ref (var);
8176 var = fold_convert (TREE_TYPE (x), var);
8179 gimplify_assign (x, var, &ilist);
8180 s = size_int (0);
8181 purpose = size_int (map_idx++);
8182 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8183 gcc_checking_assert (tkind
8184 < (HOST_WIDE_INT_C (1U) << talign_shift));
8185 gcc_checking_assert (tkind
8186 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8187 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8188 build_int_cstu (tkind_type, tkind));
8189 break;
8192 gcc_assert (map_idx == map_cnt);
8194 DECL_INITIAL (TREE_VEC_ELT (t, 1))
8195 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8196 DECL_INITIAL (TREE_VEC_ELT (t, 2))
8197 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8198 for (int i = 1; i <= 2; i++)
8199 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8201 gimple_seq initlist = NULL;
8202 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8203 TREE_VEC_ELT (t, i)),
8204 &initlist, true, NULL_TREE);
8205 gimple_seq_add_seq (&ilist, initlist);
8207 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8208 NULL);
8209 TREE_THIS_VOLATILE (clobber) = 1;
8210 gimple_seq_add_stmt (&olist,
8211 gimple_build_assign (TREE_VEC_ELT (t, i),
8212 clobber));
8215 tree clobber = build_constructor (ctx->record_type, NULL);
8216 TREE_THIS_VOLATILE (clobber) = 1;
8217 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8218 clobber));
8221 /* Once all the expansions are done, sequence all the different
8222 fragments inside gimple_omp_body. */
8224 new_body = NULL;
8226 if (offloaded
8227 && ctx->record_type)
8229 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8230 /* fixup_child_record_type might have changed receiver_decl's type. */
8231 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8232 gimple_seq_add_stmt (&new_body,
8233 gimple_build_assign (ctx->receiver_decl, t));
8235 gimple_seq_add_seq (&new_body, fplist);
8237 if (offloaded || data_region)
8239 tree prev = NULL_TREE;
8240 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8241 switch (OMP_CLAUSE_CODE (c))
8243 tree var, x;
8244 default:
8245 break;
8246 case OMP_CLAUSE_FIRSTPRIVATE:
8247 if (is_gimple_omp_oacc (ctx->stmt))
8248 break;
8249 var = OMP_CLAUSE_DECL (c);
8250 if (omp_is_reference (var)
8251 || is_gimple_reg_type (TREE_TYPE (var)))
8253 tree new_var = lookup_decl (var, ctx);
8254 tree type;
8255 type = TREE_TYPE (var);
8256 if (omp_is_reference (var))
8257 type = TREE_TYPE (type);
8258 if ((INTEGRAL_TYPE_P (type)
8259 && TYPE_PRECISION (type) <= POINTER_SIZE)
8260 || TREE_CODE (type) == POINTER_TYPE)
8262 x = build_receiver_ref (var, false, ctx);
8263 if (TREE_CODE (type) != POINTER_TYPE)
8264 x = fold_convert (pointer_sized_int_node, x);
8265 x = fold_convert (type, x);
8266 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8267 fb_rvalue);
8268 if (omp_is_reference (var))
8270 tree v = create_tmp_var_raw (type, get_name (var));
8271 gimple_add_tmp_var (v);
8272 TREE_ADDRESSABLE (v) = 1;
8273 gimple_seq_add_stmt (&new_body,
8274 gimple_build_assign (v, x));
8275 x = build_fold_addr_expr (v);
8277 gimple_seq_add_stmt (&new_body,
8278 gimple_build_assign (new_var, x));
8280 else
8282 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8283 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8284 fb_rvalue);
8285 gimple_seq_add_stmt (&new_body,
8286 gimple_build_assign (new_var, x));
8289 else if (is_variable_sized (var))
8291 tree pvar = DECL_VALUE_EXPR (var);
8292 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8293 pvar = TREE_OPERAND (pvar, 0);
8294 gcc_assert (DECL_P (pvar));
8295 tree new_var = lookup_decl (pvar, ctx);
8296 x = build_receiver_ref (var, false, ctx);
8297 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8298 gimple_seq_add_stmt (&new_body,
8299 gimple_build_assign (new_var, x));
8301 break;
8302 case OMP_CLAUSE_PRIVATE:
8303 if (is_gimple_omp_oacc (ctx->stmt))
8304 break;
8305 var = OMP_CLAUSE_DECL (c);
8306 if (omp_is_reference (var))
8308 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8309 tree new_var = lookup_decl (var, ctx);
8310 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8311 if (TREE_CONSTANT (x))
8313 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8314 get_name (var));
8315 gimple_add_tmp_var (x);
8316 TREE_ADDRESSABLE (x) = 1;
8317 x = build_fold_addr_expr_loc (clause_loc, x);
8319 else
8320 break;
8322 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8323 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8324 gimple_seq_add_stmt (&new_body,
8325 gimple_build_assign (new_var, x));
8327 break;
8328 case OMP_CLAUSE_USE_DEVICE_PTR:
8329 case OMP_CLAUSE_IS_DEVICE_PTR:
8330 var = OMP_CLAUSE_DECL (c);
8331 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8332 x = build_sender_ref (var, ctx);
8333 else
8334 x = build_receiver_ref (var, false, ctx);
8335 if (is_variable_sized (var))
8337 tree pvar = DECL_VALUE_EXPR (var);
8338 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8339 pvar = TREE_OPERAND (pvar, 0);
8340 gcc_assert (DECL_P (pvar));
8341 tree new_var = lookup_decl (pvar, ctx);
8342 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8343 gimple_seq_add_stmt (&new_body,
8344 gimple_build_assign (new_var, x));
8346 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8348 tree new_var = lookup_decl (var, ctx);
8349 new_var = DECL_VALUE_EXPR (new_var);
8350 gcc_assert (TREE_CODE (new_var) == MEM_REF);
8351 new_var = TREE_OPERAND (new_var, 0);
8352 gcc_assert (DECL_P (new_var));
8353 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8354 gimple_seq_add_stmt (&new_body,
8355 gimple_build_assign (new_var, x));
8357 else
8359 tree type = TREE_TYPE (var);
8360 tree new_var = lookup_decl (var, ctx);
8361 if (omp_is_reference (var))
8363 type = TREE_TYPE (type);
8364 if (TREE_CODE (type) != ARRAY_TYPE)
8366 tree v = create_tmp_var_raw (type, get_name (var));
8367 gimple_add_tmp_var (v);
8368 TREE_ADDRESSABLE (v) = 1;
8369 x = fold_convert (type, x);
8370 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8371 fb_rvalue);
8372 gimple_seq_add_stmt (&new_body,
8373 gimple_build_assign (v, x));
8374 x = build_fold_addr_expr (v);
8377 new_var = DECL_VALUE_EXPR (new_var);
8378 x = fold_convert (TREE_TYPE (new_var), x);
8379 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8380 gimple_seq_add_stmt (&new_body,
8381 gimple_build_assign (new_var, x));
8383 break;
8385 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8386 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8387 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8388 or references to VLAs. */
8389 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8390 switch (OMP_CLAUSE_CODE (c))
8392 tree var;
8393 default:
8394 break;
8395 case OMP_CLAUSE_MAP:
8396 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8397 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8399 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8400 HOST_WIDE_INT offset = 0;
8401 gcc_assert (prev);
8402 var = OMP_CLAUSE_DECL (c);
8403 if (DECL_P (var)
8404 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8405 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8406 ctx))
8407 && varpool_node::get_create (var)->offloadable)
8408 break;
8409 if (TREE_CODE (var) == INDIRECT_REF
8410 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8411 var = TREE_OPERAND (var, 0);
8412 if (TREE_CODE (var) == COMPONENT_REF)
8414 var = get_addr_base_and_unit_offset (var, &offset);
8415 gcc_assert (var != NULL_TREE && DECL_P (var));
8417 else if (DECL_SIZE (var)
8418 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8420 tree var2 = DECL_VALUE_EXPR (var);
8421 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8422 var2 = TREE_OPERAND (var2, 0);
8423 gcc_assert (DECL_P (var2));
8424 var = var2;
8426 tree new_var = lookup_decl (var, ctx), x;
8427 tree type = TREE_TYPE (new_var);
8428 bool is_ref;
8429 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8430 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8431 == COMPONENT_REF))
8433 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8434 is_ref = true;
8435 new_var = build2 (MEM_REF, type,
8436 build_fold_addr_expr (new_var),
8437 build_int_cst (build_pointer_type (type),
8438 offset));
8440 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8442 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8443 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8444 new_var = build2 (MEM_REF, type,
8445 build_fold_addr_expr (new_var),
8446 build_int_cst (build_pointer_type (type),
8447 offset));
8449 else
8450 is_ref = omp_is_reference (var);
8451 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8452 is_ref = false;
8453 bool ref_to_array = false;
8454 if (is_ref)
8456 type = TREE_TYPE (type);
8457 if (TREE_CODE (type) == ARRAY_TYPE)
8459 type = build_pointer_type (type);
8460 ref_to_array = true;
8463 else if (TREE_CODE (type) == ARRAY_TYPE)
8465 tree decl2 = DECL_VALUE_EXPR (new_var);
8466 gcc_assert (TREE_CODE (decl2) == MEM_REF);
8467 decl2 = TREE_OPERAND (decl2, 0);
8468 gcc_assert (DECL_P (decl2));
8469 new_var = decl2;
8470 type = TREE_TYPE (new_var);
8472 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8473 x = fold_convert_loc (clause_loc, type, x);
8474 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8476 tree bias = OMP_CLAUSE_SIZE (c);
8477 if (DECL_P (bias))
8478 bias = lookup_decl (bias, ctx);
8479 bias = fold_convert_loc (clause_loc, sizetype, bias);
8480 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8481 bias);
8482 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8483 TREE_TYPE (x), x, bias);
8485 if (ref_to_array)
8486 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8487 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8488 if (is_ref && !ref_to_array)
8490 tree t = create_tmp_var_raw (type, get_name (var));
8491 gimple_add_tmp_var (t);
8492 TREE_ADDRESSABLE (t) = 1;
8493 gimple_seq_add_stmt (&new_body,
8494 gimple_build_assign (t, x));
8495 x = build_fold_addr_expr_loc (clause_loc, t);
8497 gimple_seq_add_stmt (&new_body,
8498 gimple_build_assign (new_var, x));
8499 prev = NULL_TREE;
8501 else if (OMP_CLAUSE_CHAIN (c)
8502 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8503 == OMP_CLAUSE_MAP
8504 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8505 == GOMP_MAP_FIRSTPRIVATE_POINTER
8506 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8507 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8508 prev = c;
8509 break;
8510 case OMP_CLAUSE_PRIVATE:
8511 var = OMP_CLAUSE_DECL (c);
8512 if (is_variable_sized (var))
8514 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8515 tree new_var = lookup_decl (var, ctx);
8516 tree pvar = DECL_VALUE_EXPR (var);
8517 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8518 pvar = TREE_OPERAND (pvar, 0);
8519 gcc_assert (DECL_P (pvar));
8520 tree new_pvar = lookup_decl (pvar, ctx);
8521 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8522 tree al = size_int (DECL_ALIGN (var));
8523 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8524 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8525 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8526 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8527 gimple_seq_add_stmt (&new_body,
8528 gimple_build_assign (new_pvar, x));
8530 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8532 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8533 tree new_var = lookup_decl (var, ctx);
8534 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8535 if (TREE_CONSTANT (x))
8536 break;
8537 else
8539 tree atmp
8540 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8541 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8542 tree al = size_int (TYPE_ALIGN (rtype));
8543 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8546 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8547 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8548 gimple_seq_add_stmt (&new_body,
8549 gimple_build_assign (new_var, x));
8551 break;
8554 gimple_seq fork_seq = NULL;
8555 gimple_seq join_seq = NULL;
8557 if (is_oacc_parallel (ctx))
8559 /* If there are reductions on the offloaded region itself, treat
8560 them as a dummy GANG loop. */
8561 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8563 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8564 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8567 gimple_seq_add_seq (&new_body, fork_seq);
8568 gimple_seq_add_seq (&new_body, tgt_body);
8569 gimple_seq_add_seq (&new_body, join_seq);
8571 if (offloaded)
8572 new_body = maybe_catch_exception (new_body);
8574 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8575 gimple_omp_set_body (stmt, new_body);
8578 bind = gimple_build_bind (NULL, NULL,
8579 tgt_bind ? gimple_bind_block (tgt_bind)
8580 : NULL_TREE);
8581 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8582 gimple_bind_add_seq (bind, ilist);
8583 gimple_bind_add_stmt (bind, stmt);
8584 gimple_bind_add_seq (bind, olist);
8586 pop_gimplify_context (NULL);
8588 if (dep_bind)
8590 gimple_bind_add_seq (dep_bind, dep_ilist);
8591 gimple_bind_add_stmt (dep_bind, bind);
8592 gimple_bind_add_seq (dep_bind, dep_olist);
8593 pop_gimplify_context (dep_bind);
8597 /* Expand code for an OpenMP teams directive. */
8599 static void
8600 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8602 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8603 push_gimplify_context ();
8605 tree block = make_node (BLOCK);
8606 gbind *bind = gimple_build_bind (NULL, NULL, block);
8607 gsi_replace (gsi_p, bind, true);
8608 gimple_seq bind_body = NULL;
8609 gimple_seq dlist = NULL;
8610 gimple_seq olist = NULL;
8612 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8613 OMP_CLAUSE_NUM_TEAMS);
8614 if (num_teams == NULL_TREE)
8615 num_teams = build_int_cst (unsigned_type_node, 0);
8616 else
8618 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8619 num_teams = fold_convert (unsigned_type_node, num_teams);
8620 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8622 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8623 OMP_CLAUSE_THREAD_LIMIT);
8624 if (thread_limit == NULL_TREE)
8625 thread_limit = build_int_cst (unsigned_type_node, 0);
8626 else
8628 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8629 thread_limit = fold_convert (unsigned_type_node, thread_limit);
8630 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8631 fb_rvalue);
8634 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8635 &bind_body, &dlist, ctx, NULL);
8636 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8637 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8638 if (!gimple_omp_teams_grid_phony (teams_stmt))
8640 gimple_seq_add_stmt (&bind_body, teams_stmt);
8641 location_t loc = gimple_location (teams_stmt);
8642 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8643 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8644 gimple_set_location (call, loc);
8645 gimple_seq_add_stmt (&bind_body, call);
8648 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8649 gimple_omp_set_body (teams_stmt, NULL);
8650 gimple_seq_add_seq (&bind_body, olist);
8651 gimple_seq_add_seq (&bind_body, dlist);
8652 if (!gimple_omp_teams_grid_phony (teams_stmt))
8653 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8654 gimple_bind_set_body (bind, bind_body);
8656 pop_gimplify_context (bind);
8658 gimple_bind_append_vars (bind, ctx->block_vars);
8659 BLOCK_VARS (block) = ctx->block_vars;
8660 if (BLOCK_VARS (block))
8661 TREE_USED (block) = 1;
8664 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8666 static void
8667 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8669 gimple *stmt = gsi_stmt (*gsi_p);
8670 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8671 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8672 gimple_build_omp_return (false));
8676 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8677 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8678 of OMP context, but with task_shared_vars set. */
8680 static tree
8681 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8682 void *data)
8684 tree t = *tp;
8686 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8687 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8688 return t;
8690 if (task_shared_vars
8691 && DECL_P (t)
8692 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8693 return t;
8695 /* If a global variable has been privatized, TREE_CONSTANT on
8696 ADDR_EXPR might be wrong. */
8697 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8698 recompute_tree_invariant_for_addr_expr (t);
8700 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8701 return NULL_TREE;
8704 /* Data to be communicated between lower_omp_regimplify_operands and
8705 lower_omp_regimplify_operands_p. */
8707 struct lower_omp_regimplify_operands_data
8709 omp_context *ctx;
8710 vec<tree> *decls;
8713 /* Helper function for lower_omp_regimplify_operands. Find
8714 omp_member_access_dummy_var vars and adjust temporarily their
8715 DECL_VALUE_EXPRs if needed. */
8717 static tree
8718 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8719 void *data)
8721 tree t = omp_member_access_dummy_var (*tp);
8722 if (t)
8724 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8725 lower_omp_regimplify_operands_data *ldata
8726 = (lower_omp_regimplify_operands_data *) wi->info;
8727 tree o = maybe_lookup_decl (t, ldata->ctx);
8728 if (o != t)
8730 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8731 ldata->decls->safe_push (*tp);
8732 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8733 SET_DECL_VALUE_EXPR (*tp, v);
8736 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8737 return NULL_TREE;
8740 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8741 of omp_member_access_dummy_var vars during regimplification. */
8743 static void
8744 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8745 gimple_stmt_iterator *gsi_p)
8747 auto_vec<tree, 10> decls;
8748 if (ctx)
8750 struct walk_stmt_info wi;
8751 memset (&wi, '\0', sizeof (wi));
8752 struct lower_omp_regimplify_operands_data data;
8753 data.ctx = ctx;
8754 data.decls = &decls;
8755 wi.info = &data;
8756 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8758 gimple_regimplify_operands (stmt, gsi_p);
8759 while (!decls.is_empty ())
8761 tree t = decls.pop ();
8762 tree v = decls.pop ();
8763 SET_DECL_VALUE_EXPR (t, v);
8767 static void
8768 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8770 gimple *stmt = gsi_stmt (*gsi_p);
8771 struct walk_stmt_info wi;
8772 gcall *call_stmt;
8774 if (gimple_has_location (stmt))
8775 input_location = gimple_location (stmt);
8777 if (task_shared_vars)
8778 memset (&wi, '\0', sizeof (wi));
8780 /* If we have issued syntax errors, avoid doing any heavy lifting.
8781 Just replace the OMP directives with a NOP to avoid
8782 confusing RTL expansion. */
8783 if (seen_error () && is_gimple_omp (stmt))
8785 gsi_replace (gsi_p, gimple_build_nop (), true);
8786 return;
8789 switch (gimple_code (stmt))
8791 case GIMPLE_COND:
8793 gcond *cond_stmt = as_a <gcond *> (stmt);
8794 if ((ctx || task_shared_vars)
8795 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8796 lower_omp_regimplify_p,
8797 ctx ? NULL : &wi, NULL)
8798 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8799 lower_omp_regimplify_p,
8800 ctx ? NULL : &wi, NULL)))
8801 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8803 break;
8804 case GIMPLE_CATCH:
8805 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8806 break;
8807 case GIMPLE_EH_FILTER:
8808 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8809 break;
8810 case GIMPLE_TRY:
8811 lower_omp (gimple_try_eval_ptr (stmt), ctx);
8812 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8813 break;
8814 case GIMPLE_TRANSACTION:
8815 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8816 ctx);
8817 break;
8818 case GIMPLE_BIND:
8819 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8820 break;
8821 case GIMPLE_OMP_PARALLEL:
8822 case GIMPLE_OMP_TASK:
8823 ctx = maybe_lookup_ctx (stmt);
8824 gcc_assert (ctx);
8825 if (ctx->cancellable)
8826 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8827 lower_omp_taskreg (gsi_p, ctx);
8828 break;
8829 case GIMPLE_OMP_FOR:
8830 ctx = maybe_lookup_ctx (stmt);
8831 gcc_assert (ctx);
8832 if (ctx->cancellable)
8833 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8834 lower_omp_for (gsi_p, ctx);
8835 break;
8836 case GIMPLE_OMP_SECTIONS:
8837 ctx = maybe_lookup_ctx (stmt);
8838 gcc_assert (ctx);
8839 if (ctx->cancellable)
8840 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8841 lower_omp_sections (gsi_p, ctx);
8842 break;
8843 case GIMPLE_OMP_SINGLE:
8844 ctx = maybe_lookup_ctx (stmt);
8845 gcc_assert (ctx);
8846 lower_omp_single (gsi_p, ctx);
8847 break;
8848 case GIMPLE_OMP_MASTER:
8849 ctx = maybe_lookup_ctx (stmt);
8850 gcc_assert (ctx);
8851 lower_omp_master (gsi_p, ctx);
8852 break;
8853 case GIMPLE_OMP_TASKGROUP:
8854 ctx = maybe_lookup_ctx (stmt);
8855 gcc_assert (ctx);
8856 lower_omp_taskgroup (gsi_p, ctx);
8857 break;
8858 case GIMPLE_OMP_ORDERED:
8859 ctx = maybe_lookup_ctx (stmt);
8860 gcc_assert (ctx);
8861 lower_omp_ordered (gsi_p, ctx);
8862 break;
8863 case GIMPLE_OMP_CRITICAL:
8864 ctx = maybe_lookup_ctx (stmt);
8865 gcc_assert (ctx);
8866 lower_omp_critical (gsi_p, ctx);
8867 break;
8868 case GIMPLE_OMP_ATOMIC_LOAD:
8869 if ((ctx || task_shared_vars)
8870 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8871 as_a <gomp_atomic_load *> (stmt)),
8872 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8873 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8874 break;
8875 case GIMPLE_OMP_TARGET:
8876 ctx = maybe_lookup_ctx (stmt);
8877 gcc_assert (ctx);
8878 lower_omp_target (gsi_p, ctx);
8879 break;
8880 case GIMPLE_OMP_TEAMS:
8881 ctx = maybe_lookup_ctx (stmt);
8882 gcc_assert (ctx);
8883 lower_omp_teams (gsi_p, ctx);
8884 break;
8885 case GIMPLE_OMP_GRID_BODY:
8886 ctx = maybe_lookup_ctx (stmt);
8887 gcc_assert (ctx);
8888 lower_omp_grid_body (gsi_p, ctx);
8889 break;
8890 case GIMPLE_CALL:
8891 tree fndecl;
8892 call_stmt = as_a <gcall *> (stmt);
8893 fndecl = gimple_call_fndecl (call_stmt);
8894 if (fndecl
8895 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8896 switch (DECL_FUNCTION_CODE (fndecl))
8898 case BUILT_IN_GOMP_BARRIER:
8899 if (ctx == NULL)
8900 break;
8901 /* FALLTHRU */
8902 case BUILT_IN_GOMP_CANCEL:
8903 case BUILT_IN_GOMP_CANCELLATION_POINT:
8904 omp_context *cctx;
8905 cctx = ctx;
8906 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8907 cctx = cctx->outer;
8908 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8909 if (!cctx->cancellable)
8911 if (DECL_FUNCTION_CODE (fndecl)
8912 == BUILT_IN_GOMP_CANCELLATION_POINT)
8914 stmt = gimple_build_nop ();
8915 gsi_replace (gsi_p, stmt, false);
8917 break;
8919 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8921 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8922 gimple_call_set_fndecl (call_stmt, fndecl);
8923 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8925 tree lhs;
8926 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8927 gimple_call_set_lhs (call_stmt, lhs);
8928 tree fallthru_label;
8929 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8930 gimple *g;
8931 g = gimple_build_label (fallthru_label);
8932 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8933 g = gimple_build_cond (NE_EXPR, lhs,
8934 fold_convert (TREE_TYPE (lhs),
8935 boolean_false_node),
8936 cctx->cancel_label, fallthru_label);
8937 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8938 break;
8939 default:
8940 break;
8942 /* FALLTHRU */
8943 default:
8944 if ((ctx || task_shared_vars)
8945 && walk_gimple_op (stmt, lower_omp_regimplify_p,
8946 ctx ? NULL : &wi))
8948 /* Just remove clobbers, this should happen only if we have
8949 "privatized" local addressable variables in SIMD regions,
8950 the clobber isn't needed in that case and gimplifying address
8951 of the ARRAY_REF into a pointer and creating MEM_REF based
8952 clobber would create worse code than we get with the clobber
8953 dropped. */
8954 if (gimple_clobber_p (stmt))
8956 gsi_replace (gsi_p, gimple_build_nop (), true);
8957 break;
8959 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8961 break;
8965 static void
8966 lower_omp (gimple_seq *body, omp_context *ctx)
8968 location_t saved_location = input_location;
8969 gimple_stmt_iterator gsi;
8970 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8971 lower_omp_1 (&gsi, ctx);
8972 /* During gimplification, we haven't folded statments inside offloading
8973 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8974 if (target_nesting_level || taskreg_nesting_level)
8975 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8976 fold_stmt (&gsi);
8977 input_location = saved_location;
8980 /* Main entry point. */
8982 static unsigned int
8983 execute_lower_omp (void)
8985 gimple_seq body;
8986 int i;
8987 omp_context *ctx;
8989 /* This pass always runs, to provide PROP_gimple_lomp.
8990 But often, there is nothing to do. */
8991 if (flag_cilkplus == 0 && flag_openacc == 0 && flag_openmp == 0
8992 && flag_openmp_simd == 0)
8993 return 0;
8995 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
8996 delete_omp_context);
8998 body = gimple_body (current_function_decl);
9000 if (hsa_gen_requested_p ())
9001 omp_grid_gridify_all_targets (&body);
9003 scan_omp (&body, NULL);
9004 gcc_assert (taskreg_nesting_level == 0);
9005 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
9006 finish_taskreg_scan (ctx);
9007 taskreg_contexts.release ();
9009 if (all_contexts->root)
9011 if (task_shared_vars)
9012 push_gimplify_context ();
9013 lower_omp (&body, NULL);
9014 if (task_shared_vars)
9015 pop_gimplify_context (NULL);
9018 if (all_contexts)
9020 splay_tree_delete (all_contexts);
9021 all_contexts = NULL;
9023 BITMAP_FREE (task_shared_vars);
9024 return 0;
9027 namespace {
9029 const pass_data pass_data_lower_omp =
9031 GIMPLE_PASS, /* type */
9032 "omplower", /* name */
9033 OPTGROUP_OMP, /* optinfo_flags */
9034 TV_NONE, /* tv_id */
9035 PROP_gimple_any, /* properties_required */
9036 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
9037 0, /* properties_destroyed */
9038 0, /* todo_flags_start */
9039 0, /* todo_flags_finish */
9042 class pass_lower_omp : public gimple_opt_pass
9044 public:
9045 pass_lower_omp (gcc::context *ctxt)
9046 : gimple_opt_pass (pass_data_lower_omp, ctxt)
9049 /* opt_pass methods: */
9050 virtual unsigned int execute (function *) { return execute_lower_omp (); }
9052 }; // class pass_lower_omp
9054 } // anon namespace
9056 gimple_opt_pass *
9057 make_pass_lower_omp (gcc::context *ctxt)
9059 return new pass_lower_omp (ctxt);
9062 /* The following is a utility to diagnose structured block violations.
9063 It is not part of the "omplower" pass, as that's invoked too late. It
9064 should be invoked by the respective front ends after gimplification. */
9066 static splay_tree all_labels;
9068 /* Check for mismatched contexts and generate an error if needed. Return
9069 true if an error is detected. */
9071 static bool
9072 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9073 gimple *branch_ctx, gimple *label_ctx)
9075 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9076 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9078 if (label_ctx == branch_ctx)
9079 return false;
9081 const char* kind = NULL;
9083 if (flag_cilkplus)
9085 if ((branch_ctx
9086 && gimple_code (branch_ctx) == GIMPLE_OMP_FOR
9087 && gimple_omp_for_kind (branch_ctx) == GF_OMP_FOR_KIND_CILKSIMD)
9088 || (label_ctx
9089 && gimple_code (label_ctx) == GIMPLE_OMP_FOR
9090 && gimple_omp_for_kind (label_ctx) == GF_OMP_FOR_KIND_CILKSIMD))
9091 kind = "Cilk Plus";
9093 if (flag_openacc)
9095 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9096 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9098 gcc_checking_assert (kind == NULL);
9099 kind = "OpenACC";
9102 if (kind == NULL)
9104 gcc_checking_assert (flag_openmp || flag_openmp_simd);
9105 kind = "OpenMP";
9108 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9109 so we could traverse it and issue a correct "exit" or "enter" error
9110 message upon a structured block violation.
9112 We built the context by building a list with tree_cons'ing, but there is
9113 no easy counterpart in gimple tuples. It seems like far too much work
9114 for issuing exit/enter error messages. If someone really misses the
9115 distinct error message... patches welcome. */
9117 #if 0
9118 /* Try to avoid confusing the user by producing and error message
9119 with correct "exit" or "enter" verbiage. We prefer "exit"
9120 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9121 if (branch_ctx == NULL)
9122 exit_p = false;
9123 else
9125 while (label_ctx)
9127 if (TREE_VALUE (label_ctx) == branch_ctx)
9129 exit_p = false;
9130 break;
9132 label_ctx = TREE_CHAIN (label_ctx);
9136 if (exit_p)
9137 error ("invalid exit from %s structured block", kind);
9138 else
9139 error ("invalid entry to %s structured block", kind);
9140 #endif
9142 /* If it's obvious we have an invalid entry, be specific about the error. */
9143 if (branch_ctx == NULL)
9144 error ("invalid entry to %s structured block", kind);
9145 else
9147 /* Otherwise, be vague and lazy, but efficient. */
9148 error ("invalid branch to/from %s structured block", kind);
9151 gsi_replace (gsi_p, gimple_build_nop (), false);
9152 return true;
9155 /* Pass 1: Create a minimal tree of structured blocks, and record
9156 where each label is found. */
9158 static tree
9159 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9160 struct walk_stmt_info *wi)
9162 gimple *context = (gimple *) wi->info;
9163 gimple *inner_context;
9164 gimple *stmt = gsi_stmt (*gsi_p);
9166 *handled_ops_p = true;
9168 switch (gimple_code (stmt))
9170 WALK_SUBSTMTS;
9172 case GIMPLE_OMP_PARALLEL:
9173 case GIMPLE_OMP_TASK:
9174 case GIMPLE_OMP_SECTIONS:
9175 case GIMPLE_OMP_SINGLE:
9176 case GIMPLE_OMP_SECTION:
9177 case GIMPLE_OMP_MASTER:
9178 case GIMPLE_OMP_ORDERED:
9179 case GIMPLE_OMP_CRITICAL:
9180 case GIMPLE_OMP_TARGET:
9181 case GIMPLE_OMP_TEAMS:
9182 case GIMPLE_OMP_TASKGROUP:
9183 /* The minimal context here is just the current OMP construct. */
9184 inner_context = stmt;
9185 wi->info = inner_context;
9186 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9187 wi->info = context;
9188 break;
9190 case GIMPLE_OMP_FOR:
9191 inner_context = stmt;
9192 wi->info = inner_context;
9193 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9194 walk them. */
9195 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9196 diagnose_sb_1, NULL, wi);
9197 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9198 wi->info = context;
9199 break;
9201 case GIMPLE_LABEL:
9202 splay_tree_insert (all_labels,
9203 (splay_tree_key) gimple_label_label (
9204 as_a <glabel *> (stmt)),
9205 (splay_tree_value) context);
9206 break;
9208 default:
9209 break;
9212 return NULL_TREE;
9215 /* Pass 2: Check each branch and see if its context differs from that of
9216 the destination label's context. */
9218 static tree
9219 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9220 struct walk_stmt_info *wi)
9222 gimple *context = (gimple *) wi->info;
9223 splay_tree_node n;
9224 gimple *stmt = gsi_stmt (*gsi_p);
9226 *handled_ops_p = true;
9228 switch (gimple_code (stmt))
9230 WALK_SUBSTMTS;
9232 case GIMPLE_OMP_PARALLEL:
9233 case GIMPLE_OMP_TASK:
9234 case GIMPLE_OMP_SECTIONS:
9235 case GIMPLE_OMP_SINGLE:
9236 case GIMPLE_OMP_SECTION:
9237 case GIMPLE_OMP_MASTER:
9238 case GIMPLE_OMP_ORDERED:
9239 case GIMPLE_OMP_CRITICAL:
9240 case GIMPLE_OMP_TARGET:
9241 case GIMPLE_OMP_TEAMS:
9242 case GIMPLE_OMP_TASKGROUP:
9243 wi->info = stmt;
9244 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9245 wi->info = context;
9246 break;
9248 case GIMPLE_OMP_FOR:
9249 wi->info = stmt;
9250 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9251 walk them. */
9252 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9253 diagnose_sb_2, NULL, wi);
9254 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9255 wi->info = context;
9256 break;
9258 case GIMPLE_COND:
9260 gcond *cond_stmt = as_a <gcond *> (stmt);
9261 tree lab = gimple_cond_true_label (cond_stmt);
9262 if (lab)
9264 n = splay_tree_lookup (all_labels,
9265 (splay_tree_key) lab);
9266 diagnose_sb_0 (gsi_p, context,
9267 n ? (gimple *) n->value : NULL);
9269 lab = gimple_cond_false_label (cond_stmt);
9270 if (lab)
9272 n = splay_tree_lookup (all_labels,
9273 (splay_tree_key) lab);
9274 diagnose_sb_0 (gsi_p, context,
9275 n ? (gimple *) n->value : NULL);
9278 break;
9280 case GIMPLE_GOTO:
9282 tree lab = gimple_goto_dest (stmt);
9283 if (TREE_CODE (lab) != LABEL_DECL)
9284 break;
9286 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9287 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9289 break;
9291 case GIMPLE_SWITCH:
9293 gswitch *switch_stmt = as_a <gswitch *> (stmt);
9294 unsigned int i;
9295 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9297 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9298 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9299 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9300 break;
9303 break;
9305 case GIMPLE_RETURN:
9306 diagnose_sb_0 (gsi_p, context, NULL);
9307 break;
9309 default:
9310 break;
9313 return NULL_TREE;
9316 static unsigned int
9317 diagnose_omp_structured_block_errors (void)
9319 struct walk_stmt_info wi;
9320 gimple_seq body = gimple_body (current_function_decl);
9322 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9324 memset (&wi, 0, sizeof (wi));
9325 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9327 memset (&wi, 0, sizeof (wi));
9328 wi.want_locations = true;
9329 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9331 gimple_set_body (current_function_decl, body);
9333 splay_tree_delete (all_labels);
9334 all_labels = NULL;
9336 return 0;
9339 namespace {
9341 const pass_data pass_data_diagnose_omp_blocks =
9343 GIMPLE_PASS, /* type */
9344 "*diagnose_omp_blocks", /* name */
9345 OPTGROUP_OMP, /* optinfo_flags */
9346 TV_NONE, /* tv_id */
9347 PROP_gimple_any, /* properties_required */
9348 0, /* properties_provided */
9349 0, /* properties_destroyed */
9350 0, /* todo_flags_start */
9351 0, /* todo_flags_finish */
9354 class pass_diagnose_omp_blocks : public gimple_opt_pass
9356 public:
9357 pass_diagnose_omp_blocks (gcc::context *ctxt)
9358 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9361 /* opt_pass methods: */
9362 virtual bool gate (function *)
9364 return flag_cilkplus || flag_openacc || flag_openmp || flag_openmp_simd;
9366 virtual unsigned int execute (function *)
9368 return diagnose_omp_structured_block_errors ();
9371 }; // class pass_diagnose_omp_blocks
9373 } // anon namespace
9375 gimple_opt_pass *
9376 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9378 return new pass_diagnose_omp_blocks (ctxt);
9382 #include "gt-omp-low.h"