PR debug/84131
[official-gcc.git] / gcc / omp-low.c
blobebbf88e250ee289da1a543fb1747af67ded65fdf
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2018 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* Nesting depth of this context. Used to beautify error messages re
118 invalid gotos. The outermost ctx is depth 1, with depth 0 being
119 reserved for the main body of the function. */
120 int depth;
122 /* True if this parallel directive is nested within another. */
123 bool is_nested;
125 /* True if this construct can be cancelled. */
126 bool cancellable;
129 static splay_tree all_contexts;
130 static int taskreg_nesting_level;
131 static int target_nesting_level;
132 static bitmap task_shared_vars;
133 static vec<omp_context *> taskreg_contexts;
135 static void scan_omp (gimple_seq *, omp_context *);
136 static tree scan_omp_1_op (tree *, int *, void *);
138 #define WALK_SUBSTMTS \
139 case GIMPLE_BIND: \
140 case GIMPLE_TRY: \
141 case GIMPLE_CATCH: \
142 case GIMPLE_EH_FILTER: \
143 case GIMPLE_TRANSACTION: \
144 /* The sub-statements for these should be walked. */ \
145 *handled_ops_p = false; \
146 break;
148 /* Return true if CTX corresponds to an oacc parallel region. */
150 static bool
151 is_oacc_parallel (omp_context *ctx)
153 enum gimple_code outer_type = gimple_code (ctx->stmt);
154 return ((outer_type == GIMPLE_OMP_TARGET)
155 && (gimple_omp_target_kind (ctx->stmt)
156 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
159 /* Return true if CTX corresponds to an oacc kernels region. */
161 static bool
162 is_oacc_kernels (omp_context *ctx)
164 enum gimple_code outer_type = gimple_code (ctx->stmt);
165 return ((outer_type == GIMPLE_OMP_TARGET)
166 && (gimple_omp_target_kind (ctx->stmt)
167 == GF_OMP_TARGET_KIND_OACC_KERNELS));
170 /* If DECL is the artificial dummy VAR_DECL created for non-static
171 data member privatization, return the underlying "this" parameter,
172 otherwise return NULL. */
174 tree
175 omp_member_access_dummy_var (tree decl)
177 if (!VAR_P (decl)
178 || !DECL_ARTIFICIAL (decl)
179 || !DECL_IGNORED_P (decl)
180 || !DECL_HAS_VALUE_EXPR_P (decl)
181 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
182 return NULL_TREE;
184 tree v = DECL_VALUE_EXPR (decl);
185 if (TREE_CODE (v) != COMPONENT_REF)
186 return NULL_TREE;
188 while (1)
189 switch (TREE_CODE (v))
191 case COMPONENT_REF:
192 case MEM_REF:
193 case INDIRECT_REF:
194 CASE_CONVERT:
195 case POINTER_PLUS_EXPR:
196 v = TREE_OPERAND (v, 0);
197 continue;
198 case PARM_DECL:
199 if (DECL_CONTEXT (v) == current_function_decl
200 && DECL_ARTIFICIAL (v)
201 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
202 return v;
203 return NULL_TREE;
204 default:
205 return NULL_TREE;
209 /* Helper for unshare_and_remap, called through walk_tree. */
211 static tree
212 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
214 tree *pair = (tree *) data;
215 if (*tp == pair[0])
217 *tp = unshare_expr (pair[1]);
218 *walk_subtrees = 0;
220 else if (IS_TYPE_OR_DECL_P (*tp))
221 *walk_subtrees = 0;
222 return NULL_TREE;
225 /* Return unshare_expr (X) with all occurrences of FROM
226 replaced with TO. */
228 static tree
229 unshare_and_remap (tree x, tree from, tree to)
231 tree pair[2] = { from, to };
232 x = unshare_expr (x);
233 walk_tree (&x, unshare_and_remap_1, pair, NULL);
234 return x;
237 /* Convenience function for calling scan_omp_1_op on tree operands. */
239 static inline tree
240 scan_omp_op (tree *tp, omp_context *ctx)
242 struct walk_stmt_info wi;
244 memset (&wi, 0, sizeof (wi));
245 wi.info = ctx;
246 wi.want_locations = true;
248 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
251 static void lower_omp (gimple_seq *, omp_context *);
252 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
253 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
255 /* Return true if CTX is for an omp parallel. */
257 static inline bool
258 is_parallel_ctx (omp_context *ctx)
260 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
264 /* Return true if CTX is for an omp task. */
266 static inline bool
267 is_task_ctx (omp_context *ctx)
269 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
273 /* Return true if CTX is for an omp taskloop. */
275 static inline bool
276 is_taskloop_ctx (omp_context *ctx)
278 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
279 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
283 /* Return true if CTX is for an omp parallel or omp task. */
285 static inline bool
286 is_taskreg_ctx (omp_context *ctx)
288 return is_parallel_ctx (ctx) || is_task_ctx (ctx);
291 /* Return true if EXPR is variable sized. */
293 static inline bool
294 is_variable_sized (const_tree expr)
296 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
299 /* Lookup variables. The "maybe" form
300 allows for the variable form to not have been entered, otherwise we
301 assert that the variable must have been entered. */
303 static inline tree
304 lookup_decl (tree var, omp_context *ctx)
306 tree *n = ctx->cb.decl_map->get (var);
307 return *n;
310 static inline tree
311 maybe_lookup_decl (const_tree var, omp_context *ctx)
313 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
314 return n ? *n : NULL_TREE;
317 static inline tree
318 lookup_field (tree var, omp_context *ctx)
320 splay_tree_node n;
321 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
322 return (tree) n->value;
325 static inline tree
326 lookup_sfield (splay_tree_key key, omp_context *ctx)
328 splay_tree_node n;
329 n = splay_tree_lookup (ctx->sfield_map
330 ? ctx->sfield_map : ctx->field_map, key);
331 return (tree) n->value;
334 static inline tree
335 lookup_sfield (tree var, omp_context *ctx)
337 return lookup_sfield ((splay_tree_key) var, ctx);
340 static inline tree
341 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
343 splay_tree_node n;
344 n = splay_tree_lookup (ctx->field_map, key);
345 return n ? (tree) n->value : NULL_TREE;
348 static inline tree
349 maybe_lookup_field (tree var, omp_context *ctx)
351 return maybe_lookup_field ((splay_tree_key) var, ctx);
354 /* Return true if DECL should be copied by pointer. SHARED_CTX is
355 the parallel context if DECL is to be shared. */
357 static bool
358 use_pointer_for_field (tree decl, omp_context *shared_ctx)
360 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
361 || TYPE_ATOMIC (TREE_TYPE (decl)))
362 return true;
364 /* We can only use copy-in/copy-out semantics for shared variables
365 when we know the value is not accessible from an outer scope. */
366 if (shared_ctx)
368 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
370 /* ??? Trivially accessible from anywhere. But why would we even
371 be passing an address in this case? Should we simply assert
372 this to be false, or should we have a cleanup pass that removes
373 these from the list of mappings? */
374 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
375 return true;
377 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
378 without analyzing the expression whether or not its location
379 is accessible to anyone else. In the case of nested parallel
380 regions it certainly may be. */
381 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
382 return true;
384 /* Do not use copy-in/copy-out for variables that have their
385 address taken. */
386 if (TREE_ADDRESSABLE (decl))
387 return true;
389 /* lower_send_shared_vars only uses copy-in, but not copy-out
390 for these. */
391 if (TREE_READONLY (decl)
392 || ((TREE_CODE (decl) == RESULT_DECL
393 || TREE_CODE (decl) == PARM_DECL)
394 && DECL_BY_REFERENCE (decl)))
395 return false;
397 /* Disallow copy-in/out in nested parallel if
398 decl is shared in outer parallel, otherwise
399 each thread could store the shared variable
400 in its own copy-in location, making the
401 variable no longer really shared. */
402 if (shared_ctx->is_nested)
404 omp_context *up;
406 for (up = shared_ctx->outer; up; up = up->outer)
407 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
408 break;
410 if (up)
412 tree c;
414 for (c = gimple_omp_taskreg_clauses (up->stmt);
415 c; c = OMP_CLAUSE_CHAIN (c))
416 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
417 && OMP_CLAUSE_DECL (c) == decl)
418 break;
420 if (c)
421 goto maybe_mark_addressable_and_ret;
425 /* For tasks avoid using copy-in/out. As tasks can be
426 deferred or executed in different thread, when GOMP_task
427 returns, the task hasn't necessarily terminated. */
428 if (is_task_ctx (shared_ctx))
430 tree outer;
431 maybe_mark_addressable_and_ret:
432 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
433 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
435 /* Taking address of OUTER in lower_send_shared_vars
436 might need regimplification of everything that uses the
437 variable. */
438 if (!task_shared_vars)
439 task_shared_vars = BITMAP_ALLOC (NULL);
440 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
441 TREE_ADDRESSABLE (outer) = 1;
443 return true;
447 return false;
450 /* Construct a new automatic decl similar to VAR. */
452 static tree
453 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
455 tree copy = copy_var_decl (var, name, type);
457 DECL_CONTEXT (copy) = current_function_decl;
458 DECL_CHAIN (copy) = ctx->block_vars;
459 /* If VAR is listed in task_shared_vars, it means it wasn't
460 originally addressable and is just because task needs to take
461 it's address. But we don't need to take address of privatizations
462 from that var. */
463 if (TREE_ADDRESSABLE (var)
464 && task_shared_vars
465 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
466 TREE_ADDRESSABLE (copy) = 0;
467 ctx->block_vars = copy;
469 return copy;
472 static tree
473 omp_copy_decl_1 (tree var, omp_context *ctx)
475 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
478 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
479 as appropriate. */
480 static tree
481 omp_build_component_ref (tree obj, tree field)
483 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
484 if (TREE_THIS_VOLATILE (field))
485 TREE_THIS_VOLATILE (ret) |= 1;
486 if (TREE_READONLY (field))
487 TREE_READONLY (ret) |= 1;
488 return ret;
491 /* Build tree nodes to access the field for VAR on the receiver side. */
493 static tree
494 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
496 tree x, field = lookup_field (var, ctx);
498 /* If the receiver record type was remapped in the child function,
499 remap the field into the new record type. */
500 x = maybe_lookup_field (field, ctx);
501 if (x != NULL)
502 field = x;
504 x = build_simple_mem_ref (ctx->receiver_decl);
505 TREE_THIS_NOTRAP (x) = 1;
506 x = omp_build_component_ref (x, field);
507 if (by_ref)
509 x = build_simple_mem_ref (x);
510 TREE_THIS_NOTRAP (x) = 1;
513 return x;
516 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
517 of a parallel, this is a component reference; for workshare constructs
518 this is some variable. */
520 static tree
521 build_outer_var_ref (tree var, omp_context *ctx,
522 enum omp_clause_code code = OMP_CLAUSE_ERROR)
524 tree x;
526 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
527 x = var;
528 else if (is_variable_sized (var))
530 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
531 x = build_outer_var_ref (x, ctx, code);
532 x = build_simple_mem_ref (x);
534 else if (is_taskreg_ctx (ctx))
536 bool by_ref = use_pointer_for_field (var, NULL);
537 x = build_receiver_ref (var, by_ref, ctx);
539 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
540 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
541 || (code == OMP_CLAUSE_PRIVATE
542 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
543 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
544 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
546 /* #pragma omp simd isn't a worksharing construct, and can reference
547 even private vars in its linear etc. clauses.
548 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
549 to private vars in all worksharing constructs. */
550 x = NULL_TREE;
551 if (ctx->outer && is_taskreg_ctx (ctx))
552 x = lookup_decl (var, ctx->outer);
553 else if (ctx->outer)
554 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
555 if (x == NULL_TREE)
556 x = var;
558 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
560 gcc_assert (ctx->outer);
561 splay_tree_node n
562 = splay_tree_lookup (ctx->outer->field_map,
563 (splay_tree_key) &DECL_UID (var));
564 if (n == NULL)
566 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
567 x = var;
568 else
569 x = lookup_decl (var, ctx->outer);
571 else
573 tree field = (tree) n->value;
574 /* If the receiver record type was remapped in the child function,
575 remap the field into the new record type. */
576 x = maybe_lookup_field (field, ctx->outer);
577 if (x != NULL)
578 field = x;
580 x = build_simple_mem_ref (ctx->outer->receiver_decl);
581 x = omp_build_component_ref (x, field);
582 if (use_pointer_for_field (var, ctx->outer))
583 x = build_simple_mem_ref (x);
586 else if (ctx->outer)
588 omp_context *outer = ctx->outer;
589 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
591 outer = outer->outer;
592 gcc_assert (outer
593 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
595 x = lookup_decl (var, outer);
597 else if (omp_is_reference (var))
598 /* This can happen with orphaned constructs. If var is reference, it is
599 possible it is shared and as such valid. */
600 x = var;
601 else if (omp_member_access_dummy_var (var))
602 x = var;
603 else
604 gcc_unreachable ();
606 if (x == var)
608 tree t = omp_member_access_dummy_var (var);
609 if (t)
611 x = DECL_VALUE_EXPR (var);
612 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
613 if (o != t)
614 x = unshare_and_remap (x, t, o);
615 else
616 x = unshare_expr (x);
620 if (omp_is_reference (var))
621 x = build_simple_mem_ref (x);
623 return x;
626 /* Build tree nodes to access the field for VAR on the sender side. */
628 static tree
629 build_sender_ref (splay_tree_key key, omp_context *ctx)
631 tree field = lookup_sfield (key, ctx);
632 return omp_build_component_ref (ctx->sender_decl, field);
635 static tree
636 build_sender_ref (tree var, omp_context *ctx)
638 return build_sender_ref ((splay_tree_key) var, ctx);
641 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
642 BASE_POINTERS_RESTRICT, declare the field with restrict. */
644 static void
645 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx,
646 bool base_pointers_restrict = false)
648 tree field, type, sfield = NULL_TREE;
649 splay_tree_key key = (splay_tree_key) var;
651 if ((mask & 8) != 0)
653 key = (splay_tree_key) &DECL_UID (var);
654 gcc_checking_assert (key != (splay_tree_key) var);
656 gcc_assert ((mask & 1) == 0
657 || !splay_tree_lookup (ctx->field_map, key));
658 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
659 || !splay_tree_lookup (ctx->sfield_map, key));
660 gcc_assert ((mask & 3) == 3
661 || !is_gimple_omp_oacc (ctx->stmt));
663 type = TREE_TYPE (var);
664 /* Prevent redeclaring the var in the split-off function with a restrict
665 pointer type. Note that we only clear type itself, restrict qualifiers in
666 the pointed-to type will be ignored by points-to analysis. */
667 if (POINTER_TYPE_P (type)
668 && TYPE_RESTRICT (type))
669 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
671 if (mask & 4)
673 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
674 type = build_pointer_type (build_pointer_type (type));
676 else if (by_ref)
678 type = build_pointer_type (type);
679 if (base_pointers_restrict)
680 type = build_qualified_type (type, TYPE_QUAL_RESTRICT);
682 else if ((mask & 3) == 1 && omp_is_reference (var))
683 type = TREE_TYPE (type);
685 field = build_decl (DECL_SOURCE_LOCATION (var),
686 FIELD_DECL, DECL_NAME (var), type);
688 /* Remember what variable this field was created for. This does have a
689 side effect of making dwarf2out ignore this member, so for helpful
690 debugging we clear it later in delete_omp_context. */
691 DECL_ABSTRACT_ORIGIN (field) = var;
692 if (type == TREE_TYPE (var))
694 SET_DECL_ALIGN (field, DECL_ALIGN (var));
695 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
696 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
698 else
699 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
701 if ((mask & 3) == 3)
703 insert_field_into_struct (ctx->record_type, field);
704 if (ctx->srecord_type)
706 sfield = build_decl (DECL_SOURCE_LOCATION (var),
707 FIELD_DECL, DECL_NAME (var), type);
708 DECL_ABSTRACT_ORIGIN (sfield) = var;
709 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
710 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
711 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
712 insert_field_into_struct (ctx->srecord_type, sfield);
715 else
717 if (ctx->srecord_type == NULL_TREE)
719 tree t;
721 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
722 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
723 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
725 sfield = build_decl (DECL_SOURCE_LOCATION (t),
726 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
727 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
728 insert_field_into_struct (ctx->srecord_type, sfield);
729 splay_tree_insert (ctx->sfield_map,
730 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
731 (splay_tree_value) sfield);
734 sfield = field;
735 insert_field_into_struct ((mask & 1) ? ctx->record_type
736 : ctx->srecord_type, field);
739 if (mask & 1)
740 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
741 if ((mask & 2) && ctx->sfield_map)
742 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
745 static tree
746 install_var_local (tree var, omp_context *ctx)
748 tree new_var = omp_copy_decl_1 (var, ctx);
749 insert_decl_map (&ctx->cb, var, new_var);
750 return new_var;
753 /* Adjust the replacement for DECL in CTX for the new context. This means
754 copying the DECL_VALUE_EXPR, and fixing up the type. */
756 static void
757 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
759 tree new_decl, size;
761 new_decl = lookup_decl (decl, ctx);
763 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
765 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
766 && DECL_HAS_VALUE_EXPR_P (decl))
768 tree ve = DECL_VALUE_EXPR (decl);
769 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
770 SET_DECL_VALUE_EXPR (new_decl, ve);
771 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
774 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
776 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
777 if (size == error_mark_node)
778 size = TYPE_SIZE (TREE_TYPE (new_decl));
779 DECL_SIZE (new_decl) = size;
781 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
782 if (size == error_mark_node)
783 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
784 DECL_SIZE_UNIT (new_decl) = size;
788 /* The callback for remap_decl. Search all containing contexts for a
789 mapping of the variable; this avoids having to duplicate the splay
790 tree ahead of time. We know a mapping doesn't already exist in the
791 given context. Create new mappings to implement default semantics. */
793 static tree
794 omp_copy_decl (tree var, copy_body_data *cb)
796 omp_context *ctx = (omp_context *) cb;
797 tree new_var;
799 if (TREE_CODE (var) == LABEL_DECL)
801 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
802 return var;
803 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
804 DECL_CONTEXT (new_var) = current_function_decl;
805 insert_decl_map (&ctx->cb, var, new_var);
806 return new_var;
809 while (!is_taskreg_ctx (ctx))
811 ctx = ctx->outer;
812 if (ctx == NULL)
813 return var;
814 new_var = maybe_lookup_decl (var, ctx);
815 if (new_var)
816 return new_var;
819 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
820 return var;
822 return error_mark_node;
825 /* Create a new context, with OUTER_CTX being the surrounding context. */
827 static omp_context *
828 new_omp_context (gimple *stmt, omp_context *outer_ctx)
830 omp_context *ctx = XCNEW (omp_context);
832 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
833 (splay_tree_value) ctx);
834 ctx->stmt = stmt;
836 if (outer_ctx)
838 ctx->outer = outer_ctx;
839 ctx->cb = outer_ctx->cb;
840 ctx->cb.block = NULL;
841 ctx->depth = outer_ctx->depth + 1;
843 else
845 ctx->cb.src_fn = current_function_decl;
846 ctx->cb.dst_fn = current_function_decl;
847 ctx->cb.src_node = cgraph_node::get (current_function_decl);
848 gcc_checking_assert (ctx->cb.src_node);
849 ctx->cb.dst_node = ctx->cb.src_node;
850 ctx->cb.src_cfun = cfun;
851 ctx->cb.copy_decl = omp_copy_decl;
852 ctx->cb.eh_lp_nr = 0;
853 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
854 ctx->depth = 1;
857 ctx->cb.decl_map = new hash_map<tree, tree>;
859 return ctx;
862 static gimple_seq maybe_catch_exception (gimple_seq);
864 /* Finalize task copyfn. */
866 static void
867 finalize_task_copyfn (gomp_task *task_stmt)
869 struct function *child_cfun;
870 tree child_fn;
871 gimple_seq seq = NULL, new_seq;
872 gbind *bind;
874 child_fn = gimple_omp_task_copy_fn (task_stmt);
875 if (child_fn == NULL_TREE)
876 return;
878 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
879 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
881 push_cfun (child_cfun);
882 bind = gimplify_body (child_fn, false);
883 gimple_seq_add_stmt (&seq, bind);
884 new_seq = maybe_catch_exception (seq);
885 if (new_seq != seq)
887 bind = gimple_build_bind (NULL, new_seq, NULL);
888 seq = NULL;
889 gimple_seq_add_stmt (&seq, bind);
891 gimple_set_body (child_fn, seq);
892 pop_cfun ();
894 /* Inform the callgraph about the new function. */
895 cgraph_node *node = cgraph_node::get_create (child_fn);
896 node->parallelized_function = 1;
897 cgraph_node::add_new_function (child_fn, false);
900 /* Destroy a omp_context data structures. Called through the splay tree
901 value delete callback. */
903 static void
904 delete_omp_context (splay_tree_value value)
906 omp_context *ctx = (omp_context *) value;
908 delete ctx->cb.decl_map;
910 if (ctx->field_map)
911 splay_tree_delete (ctx->field_map);
912 if (ctx->sfield_map)
913 splay_tree_delete (ctx->sfield_map);
915 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
916 it produces corrupt debug information. */
917 if (ctx->record_type)
919 tree t;
920 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
921 DECL_ABSTRACT_ORIGIN (t) = NULL;
923 if (ctx->srecord_type)
925 tree t;
926 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
927 DECL_ABSTRACT_ORIGIN (t) = NULL;
930 if (is_task_ctx (ctx))
931 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
933 XDELETE (ctx);
936 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
937 context. */
939 static void
940 fixup_child_record_type (omp_context *ctx)
942 tree f, type = ctx->record_type;
944 if (!ctx->receiver_decl)
945 return;
946 /* ??? It isn't sufficient to just call remap_type here, because
947 variably_modified_type_p doesn't work the way we expect for
948 record types. Testing each field for whether it needs remapping
949 and creating a new record by hand works, however. */
950 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
951 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
952 break;
953 if (f)
955 tree name, new_fields = NULL;
957 type = lang_hooks.types.make_type (RECORD_TYPE);
958 name = DECL_NAME (TYPE_NAME (ctx->record_type));
959 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
960 TYPE_DECL, name, type);
961 TYPE_NAME (type) = name;
963 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
965 tree new_f = copy_node (f);
966 DECL_CONTEXT (new_f) = type;
967 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
968 DECL_CHAIN (new_f) = new_fields;
969 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
970 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
971 &ctx->cb, NULL);
972 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
973 &ctx->cb, NULL);
974 new_fields = new_f;
976 /* Arrange to be able to look up the receiver field
977 given the sender field. */
978 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
979 (splay_tree_value) new_f);
981 TYPE_FIELDS (type) = nreverse (new_fields);
982 layout_type (type);
985 /* In a target region we never modify any of the pointers in *.omp_data_i,
986 so attempt to help the optimizers. */
987 if (is_gimple_omp_offloaded (ctx->stmt))
988 type = build_qualified_type (type, TYPE_QUAL_CONST);
990 TREE_TYPE (ctx->receiver_decl)
991 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
994 /* Instantiate decls as necessary in CTX to satisfy the data sharing
995 specified by CLAUSES. If BASE_POINTERS_RESTRICT, install var field with
996 restrict. */
998 static void
999 scan_sharing_clauses (tree clauses, omp_context *ctx,
1000 bool base_pointers_restrict = false)
1002 tree c, decl;
1003 bool scan_array_reductions = false;
1005 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1007 bool by_ref;
1009 switch (OMP_CLAUSE_CODE (c))
1011 case OMP_CLAUSE_PRIVATE:
1012 decl = OMP_CLAUSE_DECL (c);
1013 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1014 goto do_private;
1015 else if (!is_variable_sized (decl))
1016 install_var_local (decl, ctx);
1017 break;
1019 case OMP_CLAUSE_SHARED:
1020 decl = OMP_CLAUSE_DECL (c);
1021 /* Ignore shared directives in teams construct. */
1022 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1024 /* Global variables don't need to be copied,
1025 the receiver side will use them directly. */
1026 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1027 if (is_global_var (odecl))
1028 break;
1029 insert_decl_map (&ctx->cb, decl, odecl);
1030 break;
1032 gcc_assert (is_taskreg_ctx (ctx));
1033 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1034 || !is_variable_sized (decl));
1035 /* Global variables don't need to be copied,
1036 the receiver side will use them directly. */
1037 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1038 break;
1039 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1041 use_pointer_for_field (decl, ctx);
1042 break;
1044 by_ref = use_pointer_for_field (decl, NULL);
1045 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1046 || TREE_ADDRESSABLE (decl)
1047 || by_ref
1048 || omp_is_reference (decl))
1050 by_ref = use_pointer_for_field (decl, ctx);
1051 install_var_field (decl, by_ref, 3, ctx);
1052 install_var_local (decl, ctx);
1053 break;
1055 /* We don't need to copy const scalar vars back. */
1056 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1057 goto do_private;
1059 case OMP_CLAUSE_REDUCTION:
1060 decl = OMP_CLAUSE_DECL (c);
1061 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1062 && TREE_CODE (decl) == MEM_REF)
1064 tree t = TREE_OPERAND (decl, 0);
1065 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1066 t = TREE_OPERAND (t, 0);
1067 if (TREE_CODE (t) == INDIRECT_REF
1068 || TREE_CODE (t) == ADDR_EXPR)
1069 t = TREE_OPERAND (t, 0);
1070 install_var_local (t, ctx);
1071 if (is_taskreg_ctx (ctx)
1072 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1073 && !is_variable_sized (t))
1075 by_ref = use_pointer_for_field (t, ctx);
1076 install_var_field (t, by_ref, 3, ctx);
1078 break;
1080 goto do_private;
1082 case OMP_CLAUSE_LASTPRIVATE:
1083 /* Let the corresponding firstprivate clause create
1084 the variable. */
1085 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1086 break;
1087 /* FALLTHRU */
1089 case OMP_CLAUSE_FIRSTPRIVATE:
1090 case OMP_CLAUSE_LINEAR:
1091 decl = OMP_CLAUSE_DECL (c);
1092 do_private:
1093 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1094 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1095 && is_gimple_omp_offloaded (ctx->stmt))
1097 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1098 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1099 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1100 install_var_field (decl, true, 3, ctx);
1101 else
1102 install_var_field (decl, false, 3, ctx);
1104 if (is_variable_sized (decl))
1106 if (is_task_ctx (ctx))
1107 install_var_field (decl, false, 1, ctx);
1108 break;
1110 else if (is_taskreg_ctx (ctx))
1112 bool global
1113 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1114 by_ref = use_pointer_for_field (decl, NULL);
1116 if (is_task_ctx (ctx)
1117 && (global || by_ref || omp_is_reference (decl)))
1119 install_var_field (decl, false, 1, ctx);
1120 if (!global)
1121 install_var_field (decl, by_ref, 2, ctx);
1123 else if (!global)
1124 install_var_field (decl, by_ref, 3, ctx);
1126 install_var_local (decl, ctx);
1127 break;
1129 case OMP_CLAUSE_USE_DEVICE_PTR:
1130 decl = OMP_CLAUSE_DECL (c);
1131 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1132 install_var_field (decl, true, 3, ctx);
1133 else
1134 install_var_field (decl, false, 3, ctx);
1135 if (DECL_SIZE (decl)
1136 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1138 tree decl2 = DECL_VALUE_EXPR (decl);
1139 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1140 decl2 = TREE_OPERAND (decl2, 0);
1141 gcc_assert (DECL_P (decl2));
1142 install_var_local (decl2, ctx);
1144 install_var_local (decl, ctx);
1145 break;
1147 case OMP_CLAUSE_IS_DEVICE_PTR:
1148 decl = OMP_CLAUSE_DECL (c);
1149 goto do_private;
1151 case OMP_CLAUSE__LOOPTEMP_:
1152 gcc_assert (is_taskreg_ctx (ctx));
1153 decl = OMP_CLAUSE_DECL (c);
1154 install_var_field (decl, false, 3, ctx);
1155 install_var_local (decl, ctx);
1156 break;
1158 case OMP_CLAUSE_COPYPRIVATE:
1159 case OMP_CLAUSE_COPYIN:
1160 decl = OMP_CLAUSE_DECL (c);
1161 by_ref = use_pointer_for_field (decl, NULL);
1162 install_var_field (decl, by_ref, 3, ctx);
1163 break;
1165 case OMP_CLAUSE_FINAL:
1166 case OMP_CLAUSE_IF:
1167 case OMP_CLAUSE_NUM_THREADS:
1168 case OMP_CLAUSE_NUM_TEAMS:
1169 case OMP_CLAUSE_THREAD_LIMIT:
1170 case OMP_CLAUSE_DEVICE:
1171 case OMP_CLAUSE_SCHEDULE:
1172 case OMP_CLAUSE_DIST_SCHEDULE:
1173 case OMP_CLAUSE_DEPEND:
1174 case OMP_CLAUSE_PRIORITY:
1175 case OMP_CLAUSE_GRAINSIZE:
1176 case OMP_CLAUSE_NUM_TASKS:
1177 case OMP_CLAUSE_NUM_GANGS:
1178 case OMP_CLAUSE_NUM_WORKERS:
1179 case OMP_CLAUSE_VECTOR_LENGTH:
1180 if (ctx->outer)
1181 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1182 break;
1184 case OMP_CLAUSE_TO:
1185 case OMP_CLAUSE_FROM:
1186 case OMP_CLAUSE_MAP:
1187 if (ctx->outer)
1188 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1189 decl = OMP_CLAUSE_DECL (c);
1190 /* Global variables with "omp declare target" attribute
1191 don't need to be copied, the receiver side will use them
1192 directly. However, global variables with "omp declare target link"
1193 attribute need to be copied. */
1194 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1195 && DECL_P (decl)
1196 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1197 && (OMP_CLAUSE_MAP_KIND (c)
1198 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1199 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1200 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1201 && varpool_node::get_create (decl)->offloadable
1202 && !lookup_attribute ("omp declare target link",
1203 DECL_ATTRIBUTES (decl)))
1204 break;
1205 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1206 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1208 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1209 not offloaded; there is nothing to map for those. */
1210 if (!is_gimple_omp_offloaded (ctx->stmt)
1211 && !POINTER_TYPE_P (TREE_TYPE (decl))
1212 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1213 break;
1215 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1216 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1217 || (OMP_CLAUSE_MAP_KIND (c)
1218 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1220 if (TREE_CODE (decl) == COMPONENT_REF
1221 || (TREE_CODE (decl) == INDIRECT_REF
1222 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1223 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1224 == REFERENCE_TYPE)))
1225 break;
1226 if (DECL_SIZE (decl)
1227 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1229 tree decl2 = DECL_VALUE_EXPR (decl);
1230 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1231 decl2 = TREE_OPERAND (decl2, 0);
1232 gcc_assert (DECL_P (decl2));
1233 install_var_local (decl2, ctx);
1235 install_var_local (decl, ctx);
1236 break;
1238 if (DECL_P (decl))
1240 if (DECL_SIZE (decl)
1241 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1243 tree decl2 = DECL_VALUE_EXPR (decl);
1244 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1245 decl2 = TREE_OPERAND (decl2, 0);
1246 gcc_assert (DECL_P (decl2));
1247 install_var_field (decl2, true, 3, ctx);
1248 install_var_local (decl2, ctx);
1249 install_var_local (decl, ctx);
1251 else
1253 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1254 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1255 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1256 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1257 install_var_field (decl, true, 7, ctx);
1258 else
1259 install_var_field (decl, true, 3, ctx,
1260 base_pointers_restrict);
1261 if (is_gimple_omp_offloaded (ctx->stmt)
1262 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1263 install_var_local (decl, ctx);
1266 else
1268 tree base = get_base_address (decl);
1269 tree nc = OMP_CLAUSE_CHAIN (c);
1270 if (DECL_P (base)
1271 && nc != NULL_TREE
1272 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1273 && OMP_CLAUSE_DECL (nc) == base
1274 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1275 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1277 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1278 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1280 else
1282 if (ctx->outer)
1284 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1285 decl = OMP_CLAUSE_DECL (c);
1287 gcc_assert (!splay_tree_lookup (ctx->field_map,
1288 (splay_tree_key) decl));
1289 tree field
1290 = build_decl (OMP_CLAUSE_LOCATION (c),
1291 FIELD_DECL, NULL_TREE, ptr_type_node);
1292 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1293 insert_field_into_struct (ctx->record_type, field);
1294 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1295 (splay_tree_value) field);
1298 break;
1300 case OMP_CLAUSE__GRIDDIM_:
1301 if (ctx->outer)
1303 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1304 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1306 break;
1308 case OMP_CLAUSE_NOWAIT:
1309 case OMP_CLAUSE_ORDERED:
1310 case OMP_CLAUSE_COLLAPSE:
1311 case OMP_CLAUSE_UNTIED:
1312 case OMP_CLAUSE_MERGEABLE:
1313 case OMP_CLAUSE_PROC_BIND:
1314 case OMP_CLAUSE_SAFELEN:
1315 case OMP_CLAUSE_SIMDLEN:
1316 case OMP_CLAUSE_THREADS:
1317 case OMP_CLAUSE_SIMD:
1318 case OMP_CLAUSE_NOGROUP:
1319 case OMP_CLAUSE_DEFAULTMAP:
1320 case OMP_CLAUSE_ASYNC:
1321 case OMP_CLAUSE_WAIT:
1322 case OMP_CLAUSE_GANG:
1323 case OMP_CLAUSE_WORKER:
1324 case OMP_CLAUSE_VECTOR:
1325 case OMP_CLAUSE_INDEPENDENT:
1326 case OMP_CLAUSE_AUTO:
1327 case OMP_CLAUSE_SEQ:
1328 case OMP_CLAUSE_TILE:
1329 case OMP_CLAUSE__SIMT_:
1330 case OMP_CLAUSE_DEFAULT:
1331 break;
1333 case OMP_CLAUSE_ALIGNED:
1334 decl = OMP_CLAUSE_DECL (c);
1335 if (is_global_var (decl)
1336 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1337 install_var_local (decl, ctx);
1338 break;
1340 case OMP_CLAUSE__CACHE_:
1341 default:
1342 gcc_unreachable ();
1346 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1348 switch (OMP_CLAUSE_CODE (c))
1350 case OMP_CLAUSE_LASTPRIVATE:
1351 /* Let the corresponding firstprivate clause create
1352 the variable. */
1353 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1354 scan_array_reductions = true;
1355 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1356 break;
1357 /* FALLTHRU */
1359 case OMP_CLAUSE_FIRSTPRIVATE:
1360 case OMP_CLAUSE_PRIVATE:
1361 case OMP_CLAUSE_LINEAR:
1362 case OMP_CLAUSE_IS_DEVICE_PTR:
1363 decl = OMP_CLAUSE_DECL (c);
1364 if (is_variable_sized (decl))
1366 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1367 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1368 && is_gimple_omp_offloaded (ctx->stmt))
1370 tree decl2 = DECL_VALUE_EXPR (decl);
1371 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1372 decl2 = TREE_OPERAND (decl2, 0);
1373 gcc_assert (DECL_P (decl2));
1374 install_var_local (decl2, ctx);
1375 fixup_remapped_decl (decl2, ctx, false);
1377 install_var_local (decl, ctx);
1379 fixup_remapped_decl (decl, ctx,
1380 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1381 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1382 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1383 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1384 scan_array_reductions = true;
1385 break;
1387 case OMP_CLAUSE_REDUCTION:
1388 decl = OMP_CLAUSE_DECL (c);
1389 if (TREE_CODE (decl) != MEM_REF)
1391 if (is_variable_sized (decl))
1392 install_var_local (decl, ctx);
1393 fixup_remapped_decl (decl, ctx, false);
1395 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1396 scan_array_reductions = true;
1397 break;
1399 case OMP_CLAUSE_SHARED:
1400 /* Ignore shared directives in teams construct. */
1401 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1402 break;
1403 decl = OMP_CLAUSE_DECL (c);
1404 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1405 break;
1406 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1408 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1409 ctx->outer)))
1410 break;
1411 bool by_ref = use_pointer_for_field (decl, ctx);
1412 install_var_field (decl, by_ref, 11, ctx);
1413 break;
1415 fixup_remapped_decl (decl, ctx, false);
1416 break;
1418 case OMP_CLAUSE_MAP:
1419 if (!is_gimple_omp_offloaded (ctx->stmt))
1420 break;
1421 decl = OMP_CLAUSE_DECL (c);
1422 if (DECL_P (decl)
1423 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1424 && (OMP_CLAUSE_MAP_KIND (c)
1425 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1426 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1427 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1428 && varpool_node::get_create (decl)->offloadable)
1429 break;
1430 if (DECL_P (decl))
1432 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1433 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1434 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1435 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1437 tree new_decl = lookup_decl (decl, ctx);
1438 TREE_TYPE (new_decl)
1439 = remap_type (TREE_TYPE (decl), &ctx->cb);
1441 else if (DECL_SIZE (decl)
1442 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1444 tree decl2 = DECL_VALUE_EXPR (decl);
1445 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1446 decl2 = TREE_OPERAND (decl2, 0);
1447 gcc_assert (DECL_P (decl2));
1448 fixup_remapped_decl (decl2, ctx, false);
1449 fixup_remapped_decl (decl, ctx, true);
1451 else
1452 fixup_remapped_decl (decl, ctx, false);
1454 break;
1456 case OMP_CLAUSE_COPYPRIVATE:
1457 case OMP_CLAUSE_COPYIN:
1458 case OMP_CLAUSE_DEFAULT:
1459 case OMP_CLAUSE_IF:
1460 case OMP_CLAUSE_NUM_THREADS:
1461 case OMP_CLAUSE_NUM_TEAMS:
1462 case OMP_CLAUSE_THREAD_LIMIT:
1463 case OMP_CLAUSE_DEVICE:
1464 case OMP_CLAUSE_SCHEDULE:
1465 case OMP_CLAUSE_DIST_SCHEDULE:
1466 case OMP_CLAUSE_NOWAIT:
1467 case OMP_CLAUSE_ORDERED:
1468 case OMP_CLAUSE_COLLAPSE:
1469 case OMP_CLAUSE_UNTIED:
1470 case OMP_CLAUSE_FINAL:
1471 case OMP_CLAUSE_MERGEABLE:
1472 case OMP_CLAUSE_PROC_BIND:
1473 case OMP_CLAUSE_SAFELEN:
1474 case OMP_CLAUSE_SIMDLEN:
1475 case OMP_CLAUSE_ALIGNED:
1476 case OMP_CLAUSE_DEPEND:
1477 case OMP_CLAUSE__LOOPTEMP_:
1478 case OMP_CLAUSE_TO:
1479 case OMP_CLAUSE_FROM:
1480 case OMP_CLAUSE_PRIORITY:
1481 case OMP_CLAUSE_GRAINSIZE:
1482 case OMP_CLAUSE_NUM_TASKS:
1483 case OMP_CLAUSE_THREADS:
1484 case OMP_CLAUSE_SIMD:
1485 case OMP_CLAUSE_NOGROUP:
1486 case OMP_CLAUSE_DEFAULTMAP:
1487 case OMP_CLAUSE_USE_DEVICE_PTR:
1488 case OMP_CLAUSE_ASYNC:
1489 case OMP_CLAUSE_WAIT:
1490 case OMP_CLAUSE_NUM_GANGS:
1491 case OMP_CLAUSE_NUM_WORKERS:
1492 case OMP_CLAUSE_VECTOR_LENGTH:
1493 case OMP_CLAUSE_GANG:
1494 case OMP_CLAUSE_WORKER:
1495 case OMP_CLAUSE_VECTOR:
1496 case OMP_CLAUSE_INDEPENDENT:
1497 case OMP_CLAUSE_AUTO:
1498 case OMP_CLAUSE_SEQ:
1499 case OMP_CLAUSE_TILE:
1500 case OMP_CLAUSE__GRIDDIM_:
1501 case OMP_CLAUSE__SIMT_:
1502 break;
1504 case OMP_CLAUSE__CACHE_:
1505 default:
1506 gcc_unreachable ();
1510 gcc_checking_assert (!scan_array_reductions
1511 || !is_gimple_omp_oacc (ctx->stmt));
1512 if (scan_array_reductions)
1514 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1515 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1516 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1518 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1519 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1521 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1522 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1523 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1524 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1525 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1526 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1530 /* Create a new name for omp child function. Returns an identifier. */
1532 static tree
1533 create_omp_child_function_name (bool task_copy)
1535 return clone_function_name (current_function_decl,
1536 task_copy ? "_omp_cpyfn" : "_omp_fn");
1539 /* Return true if CTX may belong to offloaded code: either if current function
1540 is offloaded, or any enclosing context corresponds to a target region. */
1542 static bool
1543 omp_maybe_offloaded_ctx (omp_context *ctx)
1545 if (cgraph_node::get (current_function_decl)->offloadable)
1546 return true;
1547 for (; ctx; ctx = ctx->outer)
1548 if (is_gimple_omp_offloaded (ctx->stmt))
1549 return true;
1550 return false;
1553 /* Build a decl for the omp child function. It'll not contain a body
1554 yet, just the bare decl. */
1556 static void
1557 create_omp_child_function (omp_context *ctx, bool task_copy)
1559 tree decl, type, name, t;
1561 name = create_omp_child_function_name (task_copy);
1562 if (task_copy)
1563 type = build_function_type_list (void_type_node, ptr_type_node,
1564 ptr_type_node, NULL_TREE);
1565 else
1566 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1568 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1570 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1571 || !task_copy);
1572 if (!task_copy)
1573 ctx->cb.dst_fn = decl;
1574 else
1575 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1577 TREE_STATIC (decl) = 1;
1578 TREE_USED (decl) = 1;
1579 DECL_ARTIFICIAL (decl) = 1;
1580 DECL_IGNORED_P (decl) = 0;
1581 TREE_PUBLIC (decl) = 0;
1582 DECL_UNINLINABLE (decl) = 1;
1583 DECL_EXTERNAL (decl) = 0;
1584 DECL_CONTEXT (decl) = NULL_TREE;
1585 DECL_INITIAL (decl) = make_node (BLOCK);
1586 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1587 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1588 /* Remove omp declare simd attribute from the new attributes. */
1589 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1591 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1592 a = a2;
1593 a = TREE_CHAIN (a);
1594 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1595 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1596 *p = TREE_CHAIN (*p);
1597 else
1599 tree chain = TREE_CHAIN (*p);
1600 *p = copy_node (*p);
1601 p = &TREE_CHAIN (*p);
1602 *p = chain;
1605 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1606 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1607 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1608 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1609 DECL_FUNCTION_VERSIONED (decl)
1610 = DECL_FUNCTION_VERSIONED (current_function_decl);
1612 if (omp_maybe_offloaded_ctx (ctx))
1614 cgraph_node::get_create (decl)->offloadable = 1;
1615 if (ENABLE_OFFLOADING)
1616 g->have_offload = true;
1619 if (cgraph_node::get_create (decl)->offloadable
1620 && !lookup_attribute ("omp declare target",
1621 DECL_ATTRIBUTES (current_function_decl)))
1623 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1624 ? "omp target entrypoint"
1625 : "omp declare target");
1626 DECL_ATTRIBUTES (decl)
1627 = tree_cons (get_identifier (target_attr),
1628 NULL_TREE, DECL_ATTRIBUTES (decl));
1631 t = build_decl (DECL_SOURCE_LOCATION (decl),
1632 RESULT_DECL, NULL_TREE, void_type_node);
1633 DECL_ARTIFICIAL (t) = 1;
1634 DECL_IGNORED_P (t) = 1;
1635 DECL_CONTEXT (t) = decl;
1636 DECL_RESULT (decl) = t;
1638 tree data_name = get_identifier (".omp_data_i");
1639 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1640 ptr_type_node);
1641 DECL_ARTIFICIAL (t) = 1;
1642 DECL_NAMELESS (t) = 1;
1643 DECL_ARG_TYPE (t) = ptr_type_node;
1644 DECL_CONTEXT (t) = current_function_decl;
1645 TREE_USED (t) = 1;
1646 TREE_READONLY (t) = 1;
1647 DECL_ARGUMENTS (decl) = t;
1648 if (!task_copy)
1649 ctx->receiver_decl = t;
1650 else
1652 t = build_decl (DECL_SOURCE_LOCATION (decl),
1653 PARM_DECL, get_identifier (".omp_data_o"),
1654 ptr_type_node);
1655 DECL_ARTIFICIAL (t) = 1;
1656 DECL_NAMELESS (t) = 1;
1657 DECL_ARG_TYPE (t) = ptr_type_node;
1658 DECL_CONTEXT (t) = current_function_decl;
1659 TREE_USED (t) = 1;
1660 TREE_ADDRESSABLE (t) = 1;
1661 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1662 DECL_ARGUMENTS (decl) = t;
1665 /* Allocate memory for the function structure. The call to
1666 allocate_struct_function clobbers CFUN, so we need to restore
1667 it afterward. */
1668 push_struct_function (decl);
1669 cfun->function_end_locus = gimple_location (ctx->stmt);
1670 init_tree_ssa (cfun);
1671 pop_cfun ();
1674 /* Callback for walk_gimple_seq. Check if combined parallel
1675 contains gimple_omp_for_combined_into_p OMP_FOR. */
1677 tree
1678 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1679 bool *handled_ops_p,
1680 struct walk_stmt_info *wi)
1682 gimple *stmt = gsi_stmt (*gsi_p);
1684 *handled_ops_p = true;
1685 switch (gimple_code (stmt))
1687 WALK_SUBSTMTS;
1689 case GIMPLE_OMP_FOR:
1690 if (gimple_omp_for_combined_into_p (stmt)
1691 && gimple_omp_for_kind (stmt)
1692 == *(const enum gf_mask *) (wi->info))
1694 wi->info = stmt;
1695 return integer_zero_node;
1697 break;
1698 default:
1699 break;
1701 return NULL;
1704 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1706 static void
1707 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1708 omp_context *outer_ctx)
1710 struct walk_stmt_info wi;
1712 memset (&wi, 0, sizeof (wi));
1713 wi.val_only = true;
1714 wi.info = (void *) &msk;
1715 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1716 if (wi.info != (void *) &msk)
1718 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1719 struct omp_for_data fd;
1720 omp_extract_for_data (for_stmt, &fd, NULL);
1721 /* We need two temporaries with fd.loop.v type (istart/iend)
1722 and then (fd.collapse - 1) temporaries with the same
1723 type for count2 ... countN-1 vars if not constant. */
1724 size_t count = 2, i;
1725 tree type = fd.iter_type;
1726 if (fd.collapse > 1
1727 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1729 count += fd.collapse - 1;
1730 /* If there are lastprivate clauses on the inner
1731 GIMPLE_OMP_FOR, add one more temporaries for the total number
1732 of iterations (product of count1 ... countN-1). */
1733 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1734 OMP_CLAUSE_LASTPRIVATE))
1735 count++;
1736 else if (msk == GF_OMP_FOR_KIND_FOR
1737 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1738 OMP_CLAUSE_LASTPRIVATE))
1739 count++;
1741 for (i = 0; i < count; i++)
1743 tree temp = create_tmp_var (type);
1744 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1745 insert_decl_map (&outer_ctx->cb, temp, temp);
1746 OMP_CLAUSE_DECL (c) = temp;
1747 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1748 gimple_omp_taskreg_set_clauses (stmt, c);
1753 /* Scan an OpenMP parallel directive. */
1755 static void
1756 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1758 omp_context *ctx;
1759 tree name;
1760 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1762 /* Ignore parallel directives with empty bodies, unless there
1763 are copyin clauses. */
1764 if (optimize > 0
1765 && empty_body_p (gimple_omp_body (stmt))
1766 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1767 OMP_CLAUSE_COPYIN) == NULL)
1769 gsi_replace (gsi, gimple_build_nop (), false);
1770 return;
1773 if (gimple_omp_parallel_combined_p (stmt))
1774 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1776 ctx = new_omp_context (stmt, outer_ctx);
1777 taskreg_contexts.safe_push (ctx);
1778 if (taskreg_nesting_level > 1)
1779 ctx->is_nested = true;
1780 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1781 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1782 name = create_tmp_var_name (".omp_data_s");
1783 name = build_decl (gimple_location (stmt),
1784 TYPE_DECL, name, ctx->record_type);
1785 DECL_ARTIFICIAL (name) = 1;
1786 DECL_NAMELESS (name) = 1;
1787 TYPE_NAME (ctx->record_type) = name;
1788 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1789 if (!gimple_omp_parallel_grid_phony (stmt))
1791 create_omp_child_function (ctx, false);
1792 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1795 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1796 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1798 if (TYPE_FIELDS (ctx->record_type) == NULL)
1799 ctx->record_type = ctx->receiver_decl = NULL;
1802 /* Scan an OpenMP task directive. */
1804 static void
1805 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1807 omp_context *ctx;
1808 tree name, t;
1809 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1811 /* Ignore task directives with empty bodies, unless they have depend
1812 clause. */
1813 if (optimize > 0
1814 && empty_body_p (gimple_omp_body (stmt))
1815 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1817 gsi_replace (gsi, gimple_build_nop (), false);
1818 return;
1821 if (gimple_omp_task_taskloop_p (stmt))
1822 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1824 ctx = new_omp_context (stmt, outer_ctx);
1825 taskreg_contexts.safe_push (ctx);
1826 if (taskreg_nesting_level > 1)
1827 ctx->is_nested = true;
1828 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1829 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1830 name = create_tmp_var_name (".omp_data_s");
1831 name = build_decl (gimple_location (stmt),
1832 TYPE_DECL, name, ctx->record_type);
1833 DECL_ARTIFICIAL (name) = 1;
1834 DECL_NAMELESS (name) = 1;
1835 TYPE_NAME (ctx->record_type) = name;
1836 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1837 create_omp_child_function (ctx, false);
1838 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1840 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1842 if (ctx->srecord_type)
1844 name = create_tmp_var_name (".omp_data_a");
1845 name = build_decl (gimple_location (stmt),
1846 TYPE_DECL, name, ctx->srecord_type);
1847 DECL_ARTIFICIAL (name) = 1;
1848 DECL_NAMELESS (name) = 1;
1849 TYPE_NAME (ctx->srecord_type) = name;
1850 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1851 create_omp_child_function (ctx, true);
1854 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1856 if (TYPE_FIELDS (ctx->record_type) == NULL)
1858 ctx->record_type = ctx->receiver_decl = NULL;
1859 t = build_int_cst (long_integer_type_node, 0);
1860 gimple_omp_task_set_arg_size (stmt, t);
1861 t = build_int_cst (long_integer_type_node, 1);
1862 gimple_omp_task_set_arg_align (stmt, t);
1866 /* Helper function for finish_taskreg_scan, called through walk_tree.
1867 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1868 tree, replace it in the expression. */
1870 static tree
1871 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1873 if (VAR_P (*tp))
1875 omp_context *ctx = (omp_context *) data;
1876 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1877 if (t != *tp)
1879 if (DECL_HAS_VALUE_EXPR_P (t))
1880 t = unshare_expr (DECL_VALUE_EXPR (t));
1881 *tp = t;
1883 *walk_subtrees = 0;
1885 else if (IS_TYPE_OR_DECL_P (*tp))
1886 *walk_subtrees = 0;
1887 return NULL_TREE;
1890 /* If any decls have been made addressable during scan_omp,
1891 adjust their fields if needed, and layout record types
1892 of parallel/task constructs. */
1894 static void
1895 finish_taskreg_scan (omp_context *ctx)
1897 if (ctx->record_type == NULL_TREE)
1898 return;
1900 /* If any task_shared_vars were needed, verify all
1901 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1902 statements if use_pointer_for_field hasn't changed
1903 because of that. If it did, update field types now. */
1904 if (task_shared_vars)
1906 tree c;
1908 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1909 c; c = OMP_CLAUSE_CHAIN (c))
1910 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1911 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1913 tree decl = OMP_CLAUSE_DECL (c);
1915 /* Global variables don't need to be copied,
1916 the receiver side will use them directly. */
1917 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1918 continue;
1919 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1920 || !use_pointer_for_field (decl, ctx))
1921 continue;
1922 tree field = lookup_field (decl, ctx);
1923 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1924 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1925 continue;
1926 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1927 TREE_THIS_VOLATILE (field) = 0;
1928 DECL_USER_ALIGN (field) = 0;
1929 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1930 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1931 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1932 if (ctx->srecord_type)
1934 tree sfield = lookup_sfield (decl, ctx);
1935 TREE_TYPE (sfield) = TREE_TYPE (field);
1936 TREE_THIS_VOLATILE (sfield) = 0;
1937 DECL_USER_ALIGN (sfield) = 0;
1938 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1939 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1940 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1945 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1947 layout_type (ctx->record_type);
1948 fixup_child_record_type (ctx);
1950 else
1952 location_t loc = gimple_location (ctx->stmt);
1953 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1954 /* Move VLA fields to the end. */
1955 p = &TYPE_FIELDS (ctx->record_type);
1956 while (*p)
1957 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1958 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1960 *q = *p;
1961 *p = TREE_CHAIN (*p);
1962 TREE_CHAIN (*q) = NULL_TREE;
1963 q = &TREE_CHAIN (*q);
1965 else
1966 p = &DECL_CHAIN (*p);
1967 *p = vla_fields;
1968 if (gimple_omp_task_taskloop_p (ctx->stmt))
1970 /* Move fields corresponding to first and second _looptemp_
1971 clause first. There are filled by GOMP_taskloop
1972 and thus need to be in specific positions. */
1973 tree c1 = gimple_omp_task_clauses (ctx->stmt);
1974 c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
1975 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
1976 OMP_CLAUSE__LOOPTEMP_);
1977 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
1978 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
1979 p = &TYPE_FIELDS (ctx->record_type);
1980 while (*p)
1981 if (*p == f1 || *p == f2)
1982 *p = DECL_CHAIN (*p);
1983 else
1984 p = &DECL_CHAIN (*p);
1985 DECL_CHAIN (f1) = f2;
1986 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
1987 TYPE_FIELDS (ctx->record_type) = f1;
1988 if (ctx->srecord_type)
1990 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
1991 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
1992 p = &TYPE_FIELDS (ctx->srecord_type);
1993 while (*p)
1994 if (*p == f1 || *p == f2)
1995 *p = DECL_CHAIN (*p);
1996 else
1997 p = &DECL_CHAIN (*p);
1998 DECL_CHAIN (f1) = f2;
1999 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2000 TYPE_FIELDS (ctx->srecord_type) = f1;
2003 layout_type (ctx->record_type);
2004 fixup_child_record_type (ctx);
2005 if (ctx->srecord_type)
2006 layout_type (ctx->srecord_type);
2007 tree t = fold_convert_loc (loc, long_integer_type_node,
2008 TYPE_SIZE_UNIT (ctx->record_type));
2009 if (TREE_CODE (t) != INTEGER_CST)
2011 t = unshare_expr (t);
2012 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2014 gimple_omp_task_set_arg_size (ctx->stmt, t);
2015 t = build_int_cst (long_integer_type_node,
2016 TYPE_ALIGN_UNIT (ctx->record_type));
2017 gimple_omp_task_set_arg_align (ctx->stmt, t);
2021 /* Find the enclosing offload context. */
2023 static omp_context *
2024 enclosing_target_ctx (omp_context *ctx)
2026 for (; ctx; ctx = ctx->outer)
2027 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2028 break;
2030 return ctx;
2033 /* Return true if ctx is part of an oacc kernels region. */
2035 static bool
2036 ctx_in_oacc_kernels_region (omp_context *ctx)
2038 for (;ctx != NULL; ctx = ctx->outer)
2040 gimple *stmt = ctx->stmt;
2041 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2042 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2043 return true;
2046 return false;
2049 /* Check the parallelism clauses inside a kernels regions.
2050 Until kernels handling moves to use the same loop indirection
2051 scheme as parallel, we need to do this checking early. */
2053 static unsigned
2054 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2056 bool checking = true;
2057 unsigned outer_mask = 0;
2058 unsigned this_mask = 0;
2059 bool has_seq = false, has_auto = false;
2061 if (ctx->outer)
2062 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2063 if (!stmt)
2065 checking = false;
2066 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2067 return outer_mask;
2068 stmt = as_a <gomp_for *> (ctx->stmt);
2071 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2073 switch (OMP_CLAUSE_CODE (c))
2075 case OMP_CLAUSE_GANG:
2076 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2077 break;
2078 case OMP_CLAUSE_WORKER:
2079 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2080 break;
2081 case OMP_CLAUSE_VECTOR:
2082 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2083 break;
2084 case OMP_CLAUSE_SEQ:
2085 has_seq = true;
2086 break;
2087 case OMP_CLAUSE_AUTO:
2088 has_auto = true;
2089 break;
2090 default:
2091 break;
2095 if (checking)
2097 if (has_seq && (this_mask || has_auto))
2098 error_at (gimple_location (stmt), "%<seq%> overrides other"
2099 " OpenACC loop specifiers");
2100 else if (has_auto && this_mask)
2101 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2102 " OpenACC loop specifiers");
2104 if (this_mask & outer_mask)
2105 error_at (gimple_location (stmt), "inner loop uses same"
2106 " OpenACC parallelism as containing loop");
2109 return outer_mask | this_mask;
2112 /* Scan a GIMPLE_OMP_FOR. */
2114 static omp_context *
2115 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2117 omp_context *ctx;
2118 size_t i;
2119 tree clauses = gimple_omp_for_clauses (stmt);
2121 ctx = new_omp_context (stmt, outer_ctx);
2123 if (is_gimple_omp_oacc (stmt))
2125 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2127 if (!tgt || is_oacc_parallel (tgt))
2128 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2130 char const *check = NULL;
2132 switch (OMP_CLAUSE_CODE (c))
2134 case OMP_CLAUSE_GANG:
2135 check = "gang";
2136 break;
2138 case OMP_CLAUSE_WORKER:
2139 check = "worker";
2140 break;
2142 case OMP_CLAUSE_VECTOR:
2143 check = "vector";
2144 break;
2146 default:
2147 break;
2150 if (check && OMP_CLAUSE_OPERAND (c, 0))
2151 error_at (gimple_location (stmt),
2152 "argument not permitted on %qs clause in"
2153 " OpenACC %<parallel%>", check);
2156 if (tgt && is_oacc_kernels (tgt))
2158 /* Strip out reductions, as they are not handled yet. */
2159 tree *prev_ptr = &clauses;
2161 while (tree probe = *prev_ptr)
2163 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2165 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2166 *prev_ptr = *next_ptr;
2167 else
2168 prev_ptr = next_ptr;
2171 gimple_omp_for_set_clauses (stmt, clauses);
2172 check_oacc_kernel_gwv (stmt, ctx);
2176 scan_sharing_clauses (clauses, ctx);
2178 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2179 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2181 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2182 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2183 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2184 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2186 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2187 return ctx;
2190 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2192 static void
2193 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2194 omp_context *outer_ctx)
2196 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2197 gsi_replace (gsi, bind, false);
2198 gimple_seq seq = NULL;
2199 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2200 tree cond = create_tmp_var_raw (integer_type_node);
2201 DECL_CONTEXT (cond) = current_function_decl;
2202 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2203 gimple_bind_set_vars (bind, cond);
2204 gimple_call_set_lhs (g, cond);
2205 gimple_seq_add_stmt (&seq, g);
2206 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2207 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2208 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2209 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2210 gimple_seq_add_stmt (&seq, g);
2211 g = gimple_build_label (lab1);
2212 gimple_seq_add_stmt (&seq, g);
2213 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2214 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2215 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2216 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2217 gimple_omp_for_set_clauses (new_stmt, clause);
2218 gimple_seq_add_stmt (&seq, new_stmt);
2219 g = gimple_build_goto (lab3);
2220 gimple_seq_add_stmt (&seq, g);
2221 g = gimple_build_label (lab2);
2222 gimple_seq_add_stmt (&seq, g);
2223 gimple_seq_add_stmt (&seq, stmt);
2224 g = gimple_build_label (lab3);
2225 gimple_seq_add_stmt (&seq, g);
2226 gimple_bind_set_body (bind, seq);
2227 update_stmt (bind);
2228 scan_omp_for (new_stmt, outer_ctx);
2229 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2232 /* Scan an OpenMP sections directive. */
2234 static void
2235 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2237 omp_context *ctx;
2239 ctx = new_omp_context (stmt, outer_ctx);
2240 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2241 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2244 /* Scan an OpenMP single directive. */
2246 static void
2247 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2249 omp_context *ctx;
2250 tree name;
2252 ctx = new_omp_context (stmt, outer_ctx);
2253 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2254 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2255 name = create_tmp_var_name (".omp_copy_s");
2256 name = build_decl (gimple_location (stmt),
2257 TYPE_DECL, name, ctx->record_type);
2258 TYPE_NAME (ctx->record_type) = name;
2260 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2261 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2263 if (TYPE_FIELDS (ctx->record_type) == NULL)
2264 ctx->record_type = NULL;
2265 else
2266 layout_type (ctx->record_type);
2269 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2270 used in the corresponding offloaded function are restrict. */
2272 static bool
2273 omp_target_base_pointers_restrict_p (tree clauses)
2275 /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2276 used by OpenACC. */
2277 if (flag_openacc == 0)
2278 return false;
2280 /* I. Basic example:
2282 void foo (void)
2284 unsigned int a[2], b[2];
2286 #pragma acc kernels \
2287 copyout (a) \
2288 copyout (b)
2290 a[0] = 0;
2291 b[0] = 1;
2295 After gimplification, we have:
2297 #pragma omp target oacc_kernels \
2298 map(force_from:a [len: 8]) \
2299 map(force_from:b [len: 8])
2301 a[0] = 0;
2302 b[0] = 1;
2305 Because both mappings have the force prefix, we know that they will be
2306 allocated when calling the corresponding offloaded function, which means we
2307 can mark the base pointers for a and b in the offloaded function as
2308 restrict. */
2310 tree c;
2311 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2313 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
2314 return false;
2316 switch (OMP_CLAUSE_MAP_KIND (c))
2318 case GOMP_MAP_FORCE_ALLOC:
2319 case GOMP_MAP_FORCE_TO:
2320 case GOMP_MAP_FORCE_FROM:
2321 case GOMP_MAP_FORCE_TOFROM:
2322 break;
2323 default:
2324 return false;
2328 return true;
2331 /* Scan a GIMPLE_OMP_TARGET. */
2333 static void
2334 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2336 omp_context *ctx;
2337 tree name;
2338 bool offloaded = is_gimple_omp_offloaded (stmt);
2339 tree clauses = gimple_omp_target_clauses (stmt);
2341 ctx = new_omp_context (stmt, outer_ctx);
2342 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2343 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2344 name = create_tmp_var_name (".omp_data_t");
2345 name = build_decl (gimple_location (stmt),
2346 TYPE_DECL, name, ctx->record_type);
2347 DECL_ARTIFICIAL (name) = 1;
2348 DECL_NAMELESS (name) = 1;
2349 TYPE_NAME (ctx->record_type) = name;
2350 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2352 bool base_pointers_restrict = false;
2353 if (offloaded)
2355 create_omp_child_function (ctx, false);
2356 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2358 base_pointers_restrict = omp_target_base_pointers_restrict_p (clauses);
2359 if (base_pointers_restrict
2360 && dump_file && (dump_flags & TDF_DETAILS))
2361 fprintf (dump_file,
2362 "Base pointers in offloaded function are restrict\n");
2365 scan_sharing_clauses (clauses, ctx, base_pointers_restrict);
2366 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2368 if (TYPE_FIELDS (ctx->record_type) == NULL)
2369 ctx->record_type = ctx->receiver_decl = NULL;
2370 else
2372 TYPE_FIELDS (ctx->record_type)
2373 = nreverse (TYPE_FIELDS (ctx->record_type));
2374 if (flag_checking)
2376 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2377 for (tree field = TYPE_FIELDS (ctx->record_type);
2378 field;
2379 field = DECL_CHAIN (field))
2380 gcc_assert (DECL_ALIGN (field) == align);
2382 layout_type (ctx->record_type);
2383 if (offloaded)
2384 fixup_child_record_type (ctx);
2388 /* Scan an OpenMP teams directive. */
2390 static void
2391 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2393 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2394 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2395 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2398 /* Check nesting restrictions. */
2399 static bool
2400 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2402 tree c;
2404 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2405 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2406 the original copy of its contents. */
2407 return true;
2409 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2410 inside an OpenACC CTX. */
2411 if (!(is_gimple_omp (stmt)
2412 && is_gimple_omp_oacc (stmt))
2413 /* Except for atomic codes that we share with OpenMP. */
2414 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2415 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2417 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2419 error_at (gimple_location (stmt),
2420 "non-OpenACC construct inside of OpenACC routine");
2421 return false;
2423 else
2424 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2425 if (is_gimple_omp (octx->stmt)
2426 && is_gimple_omp_oacc (octx->stmt))
2428 error_at (gimple_location (stmt),
2429 "non-OpenACC construct inside of OpenACC region");
2430 return false;
2434 if (ctx != NULL)
2436 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2437 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2439 c = NULL_TREE;
2440 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2442 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2443 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2445 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2446 && (ctx->outer == NULL
2447 || !gimple_omp_for_combined_into_p (ctx->stmt)
2448 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2449 || (gimple_omp_for_kind (ctx->outer->stmt)
2450 != GF_OMP_FOR_KIND_FOR)
2451 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2453 error_at (gimple_location (stmt),
2454 "%<ordered simd threads%> must be closely "
2455 "nested inside of %<for simd%> region");
2456 return false;
2458 return true;
2461 error_at (gimple_location (stmt),
2462 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2463 " may not be nested inside %<simd%> region");
2464 return false;
2466 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2468 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2469 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2470 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2471 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2473 error_at (gimple_location (stmt),
2474 "only %<distribute%> or %<parallel%> regions are "
2475 "allowed to be strictly nested inside %<teams%> "
2476 "region");
2477 return false;
2481 switch (gimple_code (stmt))
2483 case GIMPLE_OMP_FOR:
2484 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2485 return true;
2486 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2488 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2490 error_at (gimple_location (stmt),
2491 "%<distribute%> region must be strictly nested "
2492 "inside %<teams%> construct");
2493 return false;
2495 return true;
2497 /* We split taskloop into task and nested taskloop in it. */
2498 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2499 return true;
2500 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2502 bool ok = false;
2504 if (ctx)
2505 switch (gimple_code (ctx->stmt))
2507 case GIMPLE_OMP_FOR:
2508 ok = (gimple_omp_for_kind (ctx->stmt)
2509 == GF_OMP_FOR_KIND_OACC_LOOP);
2510 break;
2512 case GIMPLE_OMP_TARGET:
2513 switch (gimple_omp_target_kind (ctx->stmt))
2515 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2516 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2517 ok = true;
2518 break;
2520 default:
2521 break;
2524 default:
2525 break;
2527 else if (oacc_get_fn_attrib (current_function_decl))
2528 ok = true;
2529 if (!ok)
2531 error_at (gimple_location (stmt),
2532 "OpenACC loop directive must be associated with"
2533 " an OpenACC compute region");
2534 return false;
2537 /* FALLTHRU */
2538 case GIMPLE_CALL:
2539 if (is_gimple_call (stmt)
2540 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2541 == BUILT_IN_GOMP_CANCEL
2542 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2543 == BUILT_IN_GOMP_CANCELLATION_POINT))
2545 const char *bad = NULL;
2546 const char *kind = NULL;
2547 const char *construct
2548 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2549 == BUILT_IN_GOMP_CANCEL)
2550 ? "#pragma omp cancel"
2551 : "#pragma omp cancellation point";
2552 if (ctx == NULL)
2554 error_at (gimple_location (stmt), "orphaned %qs construct",
2555 construct);
2556 return false;
2558 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2559 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2560 : 0)
2562 case 1:
2563 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2564 bad = "#pragma omp parallel";
2565 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2566 == BUILT_IN_GOMP_CANCEL
2567 && !integer_zerop (gimple_call_arg (stmt, 1)))
2568 ctx->cancellable = true;
2569 kind = "parallel";
2570 break;
2571 case 2:
2572 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2573 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2574 bad = "#pragma omp for";
2575 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2576 == BUILT_IN_GOMP_CANCEL
2577 && !integer_zerop (gimple_call_arg (stmt, 1)))
2579 ctx->cancellable = true;
2580 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2581 OMP_CLAUSE_NOWAIT))
2582 warning_at (gimple_location (stmt), 0,
2583 "%<#pragma omp cancel for%> inside "
2584 "%<nowait%> for construct");
2585 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2586 OMP_CLAUSE_ORDERED))
2587 warning_at (gimple_location (stmt), 0,
2588 "%<#pragma omp cancel for%> inside "
2589 "%<ordered%> for construct");
2591 kind = "for";
2592 break;
2593 case 4:
2594 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2595 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2596 bad = "#pragma omp sections";
2597 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2598 == BUILT_IN_GOMP_CANCEL
2599 && !integer_zerop (gimple_call_arg (stmt, 1)))
2601 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2603 ctx->cancellable = true;
2604 if (omp_find_clause (gimple_omp_sections_clauses
2605 (ctx->stmt),
2606 OMP_CLAUSE_NOWAIT))
2607 warning_at (gimple_location (stmt), 0,
2608 "%<#pragma omp cancel sections%> inside "
2609 "%<nowait%> sections construct");
2611 else
2613 gcc_assert (ctx->outer
2614 && gimple_code (ctx->outer->stmt)
2615 == GIMPLE_OMP_SECTIONS);
2616 ctx->outer->cancellable = true;
2617 if (omp_find_clause (gimple_omp_sections_clauses
2618 (ctx->outer->stmt),
2619 OMP_CLAUSE_NOWAIT))
2620 warning_at (gimple_location (stmt), 0,
2621 "%<#pragma omp cancel sections%> inside "
2622 "%<nowait%> sections construct");
2625 kind = "sections";
2626 break;
2627 case 8:
2628 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2629 bad = "#pragma omp task";
2630 else
2632 for (omp_context *octx = ctx->outer;
2633 octx; octx = octx->outer)
2635 switch (gimple_code (octx->stmt))
2637 case GIMPLE_OMP_TASKGROUP:
2638 break;
2639 case GIMPLE_OMP_TARGET:
2640 if (gimple_omp_target_kind (octx->stmt)
2641 != GF_OMP_TARGET_KIND_REGION)
2642 continue;
2643 /* FALLTHRU */
2644 case GIMPLE_OMP_PARALLEL:
2645 case GIMPLE_OMP_TEAMS:
2646 error_at (gimple_location (stmt),
2647 "%<%s taskgroup%> construct not closely "
2648 "nested inside of %<taskgroup%> region",
2649 construct);
2650 return false;
2651 default:
2652 continue;
2654 break;
2656 ctx->cancellable = true;
2658 kind = "taskgroup";
2659 break;
2660 default:
2661 error_at (gimple_location (stmt), "invalid arguments");
2662 return false;
2664 if (bad)
2666 error_at (gimple_location (stmt),
2667 "%<%s %s%> construct not closely nested inside of %qs",
2668 construct, kind, bad);
2669 return false;
2672 /* FALLTHRU */
2673 case GIMPLE_OMP_SECTIONS:
2674 case GIMPLE_OMP_SINGLE:
2675 for (; ctx != NULL; ctx = ctx->outer)
2676 switch (gimple_code (ctx->stmt))
2678 case GIMPLE_OMP_FOR:
2679 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2680 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2681 break;
2682 /* FALLTHRU */
2683 case GIMPLE_OMP_SECTIONS:
2684 case GIMPLE_OMP_SINGLE:
2685 case GIMPLE_OMP_ORDERED:
2686 case GIMPLE_OMP_MASTER:
2687 case GIMPLE_OMP_TASK:
2688 case GIMPLE_OMP_CRITICAL:
2689 if (is_gimple_call (stmt))
2691 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2692 != BUILT_IN_GOMP_BARRIER)
2693 return true;
2694 error_at (gimple_location (stmt),
2695 "barrier region may not be closely nested inside "
2696 "of work-sharing, %<critical%>, %<ordered%>, "
2697 "%<master%>, explicit %<task%> or %<taskloop%> "
2698 "region");
2699 return false;
2701 error_at (gimple_location (stmt),
2702 "work-sharing region may not be closely nested inside "
2703 "of work-sharing, %<critical%>, %<ordered%>, "
2704 "%<master%>, explicit %<task%> or %<taskloop%> region");
2705 return false;
2706 case GIMPLE_OMP_PARALLEL:
2707 case GIMPLE_OMP_TEAMS:
2708 return true;
2709 case GIMPLE_OMP_TARGET:
2710 if (gimple_omp_target_kind (ctx->stmt)
2711 == GF_OMP_TARGET_KIND_REGION)
2712 return true;
2713 break;
2714 default:
2715 break;
2717 break;
2718 case GIMPLE_OMP_MASTER:
2719 for (; ctx != NULL; ctx = ctx->outer)
2720 switch (gimple_code (ctx->stmt))
2722 case GIMPLE_OMP_FOR:
2723 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2724 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2725 break;
2726 /* FALLTHRU */
2727 case GIMPLE_OMP_SECTIONS:
2728 case GIMPLE_OMP_SINGLE:
2729 case GIMPLE_OMP_TASK:
2730 error_at (gimple_location (stmt),
2731 "%<master%> region may not be closely nested inside "
2732 "of work-sharing, explicit %<task%> or %<taskloop%> "
2733 "region");
2734 return false;
2735 case GIMPLE_OMP_PARALLEL:
2736 case GIMPLE_OMP_TEAMS:
2737 return true;
2738 case GIMPLE_OMP_TARGET:
2739 if (gimple_omp_target_kind (ctx->stmt)
2740 == GF_OMP_TARGET_KIND_REGION)
2741 return true;
2742 break;
2743 default:
2744 break;
2746 break;
2747 case GIMPLE_OMP_TASK:
2748 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2749 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2750 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2751 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2753 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2754 error_at (OMP_CLAUSE_LOCATION (c),
2755 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2756 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2757 return false;
2759 break;
2760 case GIMPLE_OMP_ORDERED:
2761 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2762 c; c = OMP_CLAUSE_CHAIN (c))
2764 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2766 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2767 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2768 continue;
2770 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2771 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2772 || kind == OMP_CLAUSE_DEPEND_SINK)
2774 tree oclause;
2775 /* Look for containing ordered(N) loop. */
2776 if (ctx == NULL
2777 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2778 || (oclause
2779 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2780 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2782 error_at (OMP_CLAUSE_LOCATION (c),
2783 "%<ordered%> construct with %<depend%> clause "
2784 "must be closely nested inside an %<ordered%> "
2785 "loop");
2786 return false;
2788 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2790 error_at (OMP_CLAUSE_LOCATION (c),
2791 "%<ordered%> construct with %<depend%> clause "
2792 "must be closely nested inside a loop with "
2793 "%<ordered%> clause with a parameter");
2794 return false;
2797 else
2799 error_at (OMP_CLAUSE_LOCATION (c),
2800 "invalid depend kind in omp %<ordered%> %<depend%>");
2801 return false;
2804 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2805 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2807 /* ordered simd must be closely nested inside of simd region,
2808 and simd region must not encounter constructs other than
2809 ordered simd, therefore ordered simd may be either orphaned,
2810 or ctx->stmt must be simd. The latter case is handled already
2811 earlier. */
2812 if (ctx != NULL)
2814 error_at (gimple_location (stmt),
2815 "%<ordered%> %<simd%> must be closely nested inside "
2816 "%<simd%> region");
2817 return false;
2820 for (; ctx != NULL; ctx = ctx->outer)
2821 switch (gimple_code (ctx->stmt))
2823 case GIMPLE_OMP_CRITICAL:
2824 case GIMPLE_OMP_TASK:
2825 case GIMPLE_OMP_ORDERED:
2826 ordered_in_taskloop:
2827 error_at (gimple_location (stmt),
2828 "%<ordered%> region may not be closely nested inside "
2829 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2830 "%<taskloop%> region");
2831 return false;
2832 case GIMPLE_OMP_FOR:
2833 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2834 goto ordered_in_taskloop;
2835 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2836 OMP_CLAUSE_ORDERED) == NULL)
2838 error_at (gimple_location (stmt),
2839 "%<ordered%> region must be closely nested inside "
2840 "a loop region with an %<ordered%> clause");
2841 return false;
2843 return true;
2844 case GIMPLE_OMP_TARGET:
2845 if (gimple_omp_target_kind (ctx->stmt)
2846 != GF_OMP_TARGET_KIND_REGION)
2847 break;
2848 /* FALLTHRU */
2849 case GIMPLE_OMP_PARALLEL:
2850 case GIMPLE_OMP_TEAMS:
2851 error_at (gimple_location (stmt),
2852 "%<ordered%> region must be closely nested inside "
2853 "a loop region with an %<ordered%> clause");
2854 return false;
2855 default:
2856 break;
2858 break;
2859 case GIMPLE_OMP_CRITICAL:
2861 tree this_stmt_name
2862 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2863 for (; ctx != NULL; ctx = ctx->outer)
2864 if (gomp_critical *other_crit
2865 = dyn_cast <gomp_critical *> (ctx->stmt))
2866 if (this_stmt_name == gimple_omp_critical_name (other_crit))
2868 error_at (gimple_location (stmt),
2869 "%<critical%> region may not be nested inside "
2870 "a %<critical%> region with the same name");
2871 return false;
2874 break;
2875 case GIMPLE_OMP_TEAMS:
2876 if (ctx == NULL
2877 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2878 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2880 error_at (gimple_location (stmt),
2881 "%<teams%> construct not closely nested inside of "
2882 "%<target%> construct");
2883 return false;
2885 break;
2886 case GIMPLE_OMP_TARGET:
2887 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2888 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2889 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2890 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2892 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2893 error_at (OMP_CLAUSE_LOCATION (c),
2894 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2895 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2896 return false;
2898 if (is_gimple_omp_offloaded (stmt)
2899 && oacc_get_fn_attrib (cfun->decl) != NULL)
2901 error_at (gimple_location (stmt),
2902 "OpenACC region inside of OpenACC routine, nested "
2903 "parallelism not supported yet");
2904 return false;
2906 for (; ctx != NULL; ctx = ctx->outer)
2908 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2910 if (is_gimple_omp (stmt)
2911 && is_gimple_omp_oacc (stmt)
2912 && is_gimple_omp (ctx->stmt))
2914 error_at (gimple_location (stmt),
2915 "OpenACC construct inside of non-OpenACC region");
2916 return false;
2918 continue;
2921 const char *stmt_name, *ctx_stmt_name;
2922 switch (gimple_omp_target_kind (stmt))
2924 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2925 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2926 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2927 case GF_OMP_TARGET_KIND_ENTER_DATA:
2928 stmt_name = "target enter data"; break;
2929 case GF_OMP_TARGET_KIND_EXIT_DATA:
2930 stmt_name = "target exit data"; break;
2931 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2932 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2933 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2934 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
2935 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2936 stmt_name = "enter/exit data"; break;
2937 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2938 break;
2939 default: gcc_unreachable ();
2941 switch (gimple_omp_target_kind (ctx->stmt))
2943 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2944 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
2945 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2946 ctx_stmt_name = "parallel"; break;
2947 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2948 ctx_stmt_name = "kernels"; break;
2949 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
2950 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2951 ctx_stmt_name = "host_data"; break;
2952 default: gcc_unreachable ();
2955 /* OpenACC/OpenMP mismatch? */
2956 if (is_gimple_omp_oacc (stmt)
2957 != is_gimple_omp_oacc (ctx->stmt))
2959 error_at (gimple_location (stmt),
2960 "%s %qs construct inside of %s %qs region",
2961 (is_gimple_omp_oacc (stmt)
2962 ? "OpenACC" : "OpenMP"), stmt_name,
2963 (is_gimple_omp_oacc (ctx->stmt)
2964 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
2965 return false;
2967 if (is_gimple_omp_offloaded (ctx->stmt))
2969 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
2970 if (is_gimple_omp_oacc (ctx->stmt))
2972 error_at (gimple_location (stmt),
2973 "%qs construct inside of %qs region",
2974 stmt_name, ctx_stmt_name);
2975 return false;
2977 else
2979 warning_at (gimple_location (stmt), 0,
2980 "%qs construct inside of %qs region",
2981 stmt_name, ctx_stmt_name);
2985 break;
2986 default:
2987 break;
2989 return true;
2993 /* Helper function scan_omp.
2995 Callback for walk_tree or operators in walk_gimple_stmt used to
2996 scan for OMP directives in TP. */
2998 static tree
2999 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3001 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3002 omp_context *ctx = (omp_context *) wi->info;
3003 tree t = *tp;
3005 switch (TREE_CODE (t))
3007 case VAR_DECL:
3008 case PARM_DECL:
3009 case LABEL_DECL:
3010 case RESULT_DECL:
3011 if (ctx)
3013 tree repl = remap_decl (t, &ctx->cb);
3014 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3015 *tp = repl;
3017 break;
3019 default:
3020 if (ctx && TYPE_P (t))
3021 *tp = remap_type (t, &ctx->cb);
3022 else if (!DECL_P (t))
3024 *walk_subtrees = 1;
3025 if (ctx)
3027 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3028 if (tem != TREE_TYPE (t))
3030 if (TREE_CODE (t) == INTEGER_CST)
3031 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3032 else
3033 TREE_TYPE (t) = tem;
3037 break;
3040 return NULL_TREE;
3043 /* Return true if FNDECL is a setjmp or a longjmp. */
3045 static bool
3046 setjmp_or_longjmp_p (const_tree fndecl)
3048 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3049 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
3050 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
3051 return true;
3053 tree declname = DECL_NAME (fndecl);
3054 if (!declname)
3055 return false;
3056 const char *name = IDENTIFIER_POINTER (declname);
3057 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3061 /* Helper function for scan_omp.
3063 Callback for walk_gimple_stmt used to scan for OMP directives in
3064 the current statement in GSI. */
3066 static tree
3067 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3068 struct walk_stmt_info *wi)
3070 gimple *stmt = gsi_stmt (*gsi);
3071 omp_context *ctx = (omp_context *) wi->info;
3073 if (gimple_has_location (stmt))
3074 input_location = gimple_location (stmt);
3076 /* Check the nesting restrictions. */
3077 bool remove = false;
3078 if (is_gimple_omp (stmt))
3079 remove = !check_omp_nesting_restrictions (stmt, ctx);
3080 else if (is_gimple_call (stmt))
3082 tree fndecl = gimple_call_fndecl (stmt);
3083 if (fndecl)
3085 if (setjmp_or_longjmp_p (fndecl)
3086 && ctx
3087 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3088 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3090 remove = true;
3091 error_at (gimple_location (stmt),
3092 "setjmp/longjmp inside simd construct");
3094 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3095 switch (DECL_FUNCTION_CODE (fndecl))
3097 case BUILT_IN_GOMP_BARRIER:
3098 case BUILT_IN_GOMP_CANCEL:
3099 case BUILT_IN_GOMP_CANCELLATION_POINT:
3100 case BUILT_IN_GOMP_TASKYIELD:
3101 case BUILT_IN_GOMP_TASKWAIT:
3102 case BUILT_IN_GOMP_TASKGROUP_START:
3103 case BUILT_IN_GOMP_TASKGROUP_END:
3104 remove = !check_omp_nesting_restrictions (stmt, ctx);
3105 break;
3106 default:
3107 break;
3111 if (remove)
3113 stmt = gimple_build_nop ();
3114 gsi_replace (gsi, stmt, false);
3117 *handled_ops_p = true;
3119 switch (gimple_code (stmt))
3121 case GIMPLE_OMP_PARALLEL:
3122 taskreg_nesting_level++;
3123 scan_omp_parallel (gsi, ctx);
3124 taskreg_nesting_level--;
3125 break;
3127 case GIMPLE_OMP_TASK:
3128 taskreg_nesting_level++;
3129 scan_omp_task (gsi, ctx);
3130 taskreg_nesting_level--;
3131 break;
3133 case GIMPLE_OMP_FOR:
3134 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3135 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3136 && omp_maybe_offloaded_ctx (ctx)
3137 && omp_max_simt_vf ())
3138 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3139 else
3140 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3141 break;
3143 case GIMPLE_OMP_SECTIONS:
3144 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3145 break;
3147 case GIMPLE_OMP_SINGLE:
3148 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3149 break;
3151 case GIMPLE_OMP_SECTION:
3152 case GIMPLE_OMP_MASTER:
3153 case GIMPLE_OMP_TASKGROUP:
3154 case GIMPLE_OMP_ORDERED:
3155 case GIMPLE_OMP_CRITICAL:
3156 case GIMPLE_OMP_GRID_BODY:
3157 ctx = new_omp_context (stmt, ctx);
3158 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3159 break;
3161 case GIMPLE_OMP_TARGET:
3162 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3163 break;
3165 case GIMPLE_OMP_TEAMS:
3166 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3167 break;
3169 case GIMPLE_BIND:
3171 tree var;
3173 *handled_ops_p = false;
3174 if (ctx)
3175 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3176 var ;
3177 var = DECL_CHAIN (var))
3178 insert_decl_map (&ctx->cb, var, var);
3180 break;
3181 default:
3182 *handled_ops_p = false;
3183 break;
3186 return NULL_TREE;
3190 /* Scan all the statements starting at the current statement. CTX
3191 contains context information about the OMP directives and
3192 clauses found during the scan. */
3194 static void
3195 scan_omp (gimple_seq *body_p, omp_context *ctx)
3197 location_t saved_location;
3198 struct walk_stmt_info wi;
3200 memset (&wi, 0, sizeof (wi));
3201 wi.info = ctx;
3202 wi.want_locations = true;
3204 saved_location = input_location;
3205 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3206 input_location = saved_location;
3209 /* Re-gimplification and code generation routines. */
3211 /* If a context was created for STMT when it was scanned, return it. */
3213 static omp_context *
3214 maybe_lookup_ctx (gimple *stmt)
3216 splay_tree_node n;
3217 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3218 return n ? (omp_context *) n->value : NULL;
3222 /* Find the mapping for DECL in CTX or the immediately enclosing
3223 context that has a mapping for DECL.
3225 If CTX is a nested parallel directive, we may have to use the decl
3226 mappings created in CTX's parent context. Suppose that we have the
3227 following parallel nesting (variable UIDs showed for clarity):
3229 iD.1562 = 0;
3230 #omp parallel shared(iD.1562) -> outer parallel
3231 iD.1562 = iD.1562 + 1;
3233 #omp parallel shared (iD.1562) -> inner parallel
3234 iD.1562 = iD.1562 - 1;
3236 Each parallel structure will create a distinct .omp_data_s structure
3237 for copying iD.1562 in/out of the directive:
3239 outer parallel .omp_data_s.1.i -> iD.1562
3240 inner parallel .omp_data_s.2.i -> iD.1562
3242 A shared variable mapping will produce a copy-out operation before
3243 the parallel directive and a copy-in operation after it. So, in
3244 this case we would have:
3246 iD.1562 = 0;
3247 .omp_data_o.1.i = iD.1562;
3248 #omp parallel shared(iD.1562) -> outer parallel
3249 .omp_data_i.1 = &.omp_data_o.1
3250 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3252 .omp_data_o.2.i = iD.1562; -> **
3253 #omp parallel shared(iD.1562) -> inner parallel
3254 .omp_data_i.2 = &.omp_data_o.2
3255 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3258 ** This is a problem. The symbol iD.1562 cannot be referenced
3259 inside the body of the outer parallel region. But since we are
3260 emitting this copy operation while expanding the inner parallel
3261 directive, we need to access the CTX structure of the outer
3262 parallel directive to get the correct mapping:
3264 .omp_data_o.2.i = .omp_data_i.1->i
3266 Since there may be other workshare or parallel directives enclosing
3267 the parallel directive, it may be necessary to walk up the context
3268 parent chain. This is not a problem in general because nested
3269 parallelism happens only rarely. */
3271 static tree
3272 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3274 tree t;
3275 omp_context *up;
3277 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3278 t = maybe_lookup_decl (decl, up);
3280 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3282 return t ? t : decl;
3286 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3287 in outer contexts. */
3289 static tree
3290 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3292 tree t = NULL;
3293 omp_context *up;
3295 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3296 t = maybe_lookup_decl (decl, up);
3298 return t ? t : decl;
3302 /* Construct the initialization value for reduction operation OP. */
3304 tree
3305 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3307 switch (op)
3309 case PLUS_EXPR:
3310 case MINUS_EXPR:
3311 case BIT_IOR_EXPR:
3312 case BIT_XOR_EXPR:
3313 case TRUTH_OR_EXPR:
3314 case TRUTH_ORIF_EXPR:
3315 case TRUTH_XOR_EXPR:
3316 case NE_EXPR:
3317 return build_zero_cst (type);
3319 case MULT_EXPR:
3320 case TRUTH_AND_EXPR:
3321 case TRUTH_ANDIF_EXPR:
3322 case EQ_EXPR:
3323 return fold_convert_loc (loc, type, integer_one_node);
3325 case BIT_AND_EXPR:
3326 return fold_convert_loc (loc, type, integer_minus_one_node);
3328 case MAX_EXPR:
3329 if (SCALAR_FLOAT_TYPE_P (type))
3331 REAL_VALUE_TYPE max, min;
3332 if (HONOR_INFINITIES (type))
3334 real_inf (&max);
3335 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3337 else
3338 real_maxval (&min, 1, TYPE_MODE (type));
3339 return build_real (type, min);
3341 else if (POINTER_TYPE_P (type))
3343 wide_int min
3344 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3345 return wide_int_to_tree (type, min);
3347 else
3349 gcc_assert (INTEGRAL_TYPE_P (type));
3350 return TYPE_MIN_VALUE (type);
3353 case MIN_EXPR:
3354 if (SCALAR_FLOAT_TYPE_P (type))
3356 REAL_VALUE_TYPE max;
3357 if (HONOR_INFINITIES (type))
3358 real_inf (&max);
3359 else
3360 real_maxval (&max, 0, TYPE_MODE (type));
3361 return build_real (type, max);
3363 else if (POINTER_TYPE_P (type))
3365 wide_int max
3366 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3367 return wide_int_to_tree (type, max);
3369 else
3371 gcc_assert (INTEGRAL_TYPE_P (type));
3372 return TYPE_MAX_VALUE (type);
3375 default:
3376 gcc_unreachable ();
3380 /* Construct the initialization value for reduction CLAUSE. */
3382 tree
3383 omp_reduction_init (tree clause, tree type)
3385 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3386 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3389 /* Return alignment to be assumed for var in CLAUSE, which should be
3390 OMP_CLAUSE_ALIGNED. */
3392 static tree
3393 omp_clause_aligned_alignment (tree clause)
3395 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3396 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3398 /* Otherwise return implementation defined alignment. */
3399 unsigned int al = 1;
3400 opt_scalar_mode mode_iter;
3401 auto_vector_sizes sizes;
3402 targetm.vectorize.autovectorize_vector_sizes (&sizes);
3403 poly_uint64 vs = 0;
3404 for (unsigned int i = 0; i < sizes.length (); ++i)
3405 vs = ordered_max (vs, sizes[i]);
3406 static enum mode_class classes[]
3407 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3408 for (int i = 0; i < 4; i += 2)
3409 /* The for loop above dictates that we only walk through scalar classes. */
3410 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3412 scalar_mode mode = mode_iter.require ();
3413 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3414 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3415 continue;
3416 while (maybe_ne (vs, 0U)
3417 && known_lt (GET_MODE_SIZE (vmode), vs)
3418 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3419 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3421 tree type = lang_hooks.types.type_for_mode (mode, 1);
3422 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3423 continue;
3424 poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3425 GET_MODE_SIZE (mode));
3426 type = build_vector_type (type, nelts);
3427 if (TYPE_MODE (type) != vmode)
3428 continue;
3429 if (TYPE_ALIGN_UNIT (type) > al)
3430 al = TYPE_ALIGN_UNIT (type);
3432 return build_int_cst (integer_type_node, al);
3436 /* This structure is part of the interface between lower_rec_simd_input_clauses
3437 and lower_rec_input_clauses. */
3439 struct omplow_simd_context {
3440 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3441 tree idx;
3442 tree lane;
3443 vec<tree, va_heap> simt_eargs;
3444 gimple_seq simt_dlist;
3445 poly_uint64_pod max_vf;
3446 bool is_simt;
3449 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3450 privatization. */
3452 static bool
3453 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3454 omplow_simd_context *sctx, tree &ivar, tree &lvar)
3456 if (known_eq (sctx->max_vf, 0U))
3458 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3459 if (maybe_gt (sctx->max_vf, 1U))
3461 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3462 OMP_CLAUSE_SAFELEN);
3463 if (c)
3465 poly_uint64 safe_len;
3466 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3467 || maybe_lt (safe_len, 1U))
3468 sctx->max_vf = 1;
3469 else
3470 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3473 if (maybe_gt (sctx->max_vf, 1U))
3475 sctx->idx = create_tmp_var (unsigned_type_node);
3476 sctx->lane = create_tmp_var (unsigned_type_node);
3479 if (known_eq (sctx->max_vf, 1U))
3480 return false;
3482 if (sctx->is_simt)
3484 if (is_gimple_reg (new_var))
3486 ivar = lvar = new_var;
3487 return true;
3489 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3490 ivar = lvar = create_tmp_var (type);
3491 TREE_ADDRESSABLE (ivar) = 1;
3492 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3493 NULL, DECL_ATTRIBUTES (ivar));
3494 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3495 tree clobber = build_constructor (type, NULL);
3496 TREE_THIS_VOLATILE (clobber) = 1;
3497 gimple *g = gimple_build_assign (ivar, clobber);
3498 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3500 else
3502 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3503 tree avar = create_tmp_var_raw (atype);
3504 if (TREE_ADDRESSABLE (new_var))
3505 TREE_ADDRESSABLE (avar) = 1;
3506 DECL_ATTRIBUTES (avar)
3507 = tree_cons (get_identifier ("omp simd array"), NULL,
3508 DECL_ATTRIBUTES (avar));
3509 gimple_add_tmp_var (avar);
3510 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3511 NULL_TREE, NULL_TREE);
3512 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3513 NULL_TREE, NULL_TREE);
3515 if (DECL_P (new_var))
3517 SET_DECL_VALUE_EXPR (new_var, lvar);
3518 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3520 return true;
3523 /* Helper function of lower_rec_input_clauses. For a reference
3524 in simd reduction, add an underlying variable it will reference. */
3526 static void
3527 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3529 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3530 if (TREE_CONSTANT (z))
3532 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3533 get_name (new_vard));
3534 gimple_add_tmp_var (z);
3535 TREE_ADDRESSABLE (z) = 1;
3536 z = build_fold_addr_expr_loc (loc, z);
3537 gimplify_assign (new_vard, z, ilist);
3541 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3542 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3543 private variables. Initialization statements go in ILIST, while calls
3544 to destructors go in DLIST. */
3546 static void
3547 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3548 omp_context *ctx, struct omp_for_data *fd)
3550 tree c, dtor, copyin_seq, x, ptr;
3551 bool copyin_by_ref = false;
3552 bool lastprivate_firstprivate = false;
3553 bool reduction_omp_orig_ref = false;
3554 int pass;
3555 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3556 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3557 omplow_simd_context sctx = omplow_simd_context ();
3558 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3559 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3560 gimple_seq llist[3] = { };
3562 copyin_seq = NULL;
3563 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3565 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3566 with data sharing clauses referencing variable sized vars. That
3567 is unnecessarily hard to support and very unlikely to result in
3568 vectorized code anyway. */
3569 if (is_simd)
3570 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3571 switch (OMP_CLAUSE_CODE (c))
3573 case OMP_CLAUSE_LINEAR:
3574 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3575 sctx.max_vf = 1;
3576 /* FALLTHRU */
3577 case OMP_CLAUSE_PRIVATE:
3578 case OMP_CLAUSE_FIRSTPRIVATE:
3579 case OMP_CLAUSE_LASTPRIVATE:
3580 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3581 sctx.max_vf = 1;
3582 break;
3583 case OMP_CLAUSE_REDUCTION:
3584 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3585 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3586 sctx.max_vf = 1;
3587 break;
3588 default:
3589 continue;
3592 /* Add a placeholder for simduid. */
3593 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
3594 sctx.simt_eargs.safe_push (NULL_TREE);
3596 /* Do all the fixed sized types in the first pass, and the variable sized
3597 types in the second pass. This makes sure that the scalar arguments to
3598 the variable sized types are processed before we use them in the
3599 variable sized operations. */
3600 for (pass = 0; pass < 2; ++pass)
3602 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3604 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3605 tree var, new_var;
3606 bool by_ref;
3607 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3609 switch (c_kind)
3611 case OMP_CLAUSE_PRIVATE:
3612 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3613 continue;
3614 break;
3615 case OMP_CLAUSE_SHARED:
3616 /* Ignore shared directives in teams construct. */
3617 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3618 continue;
3619 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3621 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3622 || is_global_var (OMP_CLAUSE_DECL (c)));
3623 continue;
3625 case OMP_CLAUSE_FIRSTPRIVATE:
3626 case OMP_CLAUSE_COPYIN:
3627 break;
3628 case OMP_CLAUSE_LINEAR:
3629 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3630 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3631 lastprivate_firstprivate = true;
3632 break;
3633 case OMP_CLAUSE_REDUCTION:
3634 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3635 reduction_omp_orig_ref = true;
3636 break;
3637 case OMP_CLAUSE__LOOPTEMP_:
3638 /* Handle _looptemp_ clauses only on parallel/task. */
3639 if (fd)
3640 continue;
3641 break;
3642 case OMP_CLAUSE_LASTPRIVATE:
3643 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3645 lastprivate_firstprivate = true;
3646 if (pass != 0 || is_taskloop_ctx (ctx))
3647 continue;
3649 /* Even without corresponding firstprivate, if
3650 decl is Fortran allocatable, it needs outer var
3651 reference. */
3652 else if (pass == 0
3653 && lang_hooks.decls.omp_private_outer_ref
3654 (OMP_CLAUSE_DECL (c)))
3655 lastprivate_firstprivate = true;
3656 break;
3657 case OMP_CLAUSE_ALIGNED:
3658 if (pass == 0)
3659 continue;
3660 var = OMP_CLAUSE_DECL (c);
3661 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3662 && !is_global_var (var))
3664 new_var = maybe_lookup_decl (var, ctx);
3665 if (new_var == NULL_TREE)
3666 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3667 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3668 tree alarg = omp_clause_aligned_alignment (c);
3669 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3670 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3671 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3672 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3673 gimplify_and_add (x, ilist);
3675 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3676 && is_global_var (var))
3678 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3679 new_var = lookup_decl (var, ctx);
3680 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3681 t = build_fold_addr_expr_loc (clause_loc, t);
3682 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3683 tree alarg = omp_clause_aligned_alignment (c);
3684 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3685 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3686 t = fold_convert_loc (clause_loc, ptype, t);
3687 x = create_tmp_var (ptype);
3688 t = build2 (MODIFY_EXPR, ptype, x, t);
3689 gimplify_and_add (t, ilist);
3690 t = build_simple_mem_ref_loc (clause_loc, x);
3691 SET_DECL_VALUE_EXPR (new_var, t);
3692 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3694 continue;
3695 default:
3696 continue;
3699 new_var = var = OMP_CLAUSE_DECL (c);
3700 if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3702 var = TREE_OPERAND (var, 0);
3703 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3704 var = TREE_OPERAND (var, 0);
3705 if (TREE_CODE (var) == INDIRECT_REF
3706 || TREE_CODE (var) == ADDR_EXPR)
3707 var = TREE_OPERAND (var, 0);
3708 if (is_variable_sized (var))
3710 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3711 var = DECL_VALUE_EXPR (var);
3712 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3713 var = TREE_OPERAND (var, 0);
3714 gcc_assert (DECL_P (var));
3716 new_var = var;
3718 if (c_kind != OMP_CLAUSE_COPYIN)
3719 new_var = lookup_decl (var, ctx);
3721 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3723 if (pass != 0)
3724 continue;
3726 /* C/C++ array section reductions. */
3727 else if (c_kind == OMP_CLAUSE_REDUCTION
3728 && var != OMP_CLAUSE_DECL (c))
3730 if (pass == 0)
3731 continue;
3733 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3734 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3735 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3737 tree b = TREE_OPERAND (orig_var, 1);
3738 b = maybe_lookup_decl (b, ctx);
3739 if (b == NULL)
3741 b = TREE_OPERAND (orig_var, 1);
3742 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3744 if (integer_zerop (bias))
3745 bias = b;
3746 else
3748 bias = fold_convert_loc (clause_loc,
3749 TREE_TYPE (b), bias);
3750 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3751 TREE_TYPE (b), b, bias);
3753 orig_var = TREE_OPERAND (orig_var, 0);
3755 if (TREE_CODE (orig_var) == INDIRECT_REF
3756 || TREE_CODE (orig_var) == ADDR_EXPR)
3757 orig_var = TREE_OPERAND (orig_var, 0);
3758 tree d = OMP_CLAUSE_DECL (c);
3759 tree type = TREE_TYPE (d);
3760 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3761 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3762 const char *name = get_name (orig_var);
3763 if (TREE_CONSTANT (v))
3765 x = create_tmp_var_raw (type, name);
3766 gimple_add_tmp_var (x);
3767 TREE_ADDRESSABLE (x) = 1;
3768 x = build_fold_addr_expr_loc (clause_loc, x);
3770 else
3772 tree atmp
3773 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3774 tree t = maybe_lookup_decl (v, ctx);
3775 if (t)
3776 v = t;
3777 else
3778 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3779 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3780 t = fold_build2_loc (clause_loc, PLUS_EXPR,
3781 TREE_TYPE (v), v,
3782 build_int_cst (TREE_TYPE (v), 1));
3783 t = fold_build2_loc (clause_loc, MULT_EXPR,
3784 TREE_TYPE (v), t,
3785 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3786 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3787 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3790 tree ptype = build_pointer_type (TREE_TYPE (type));
3791 x = fold_convert_loc (clause_loc, ptype, x);
3792 tree y = create_tmp_var (ptype, name);
3793 gimplify_assign (y, x, ilist);
3794 x = y;
3795 tree yb = y;
3797 if (!integer_zerop (bias))
3799 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3800 bias);
3801 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3803 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3804 pointer_sized_int_node, yb, bias);
3805 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3806 yb = create_tmp_var (ptype, name);
3807 gimplify_assign (yb, x, ilist);
3808 x = yb;
3811 d = TREE_OPERAND (d, 0);
3812 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3813 d = TREE_OPERAND (d, 0);
3814 if (TREE_CODE (d) == ADDR_EXPR)
3816 if (orig_var != var)
3818 gcc_assert (is_variable_sized (orig_var));
3819 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3821 gimplify_assign (new_var, x, ilist);
3822 tree new_orig_var = lookup_decl (orig_var, ctx);
3823 tree t = build_fold_indirect_ref (new_var);
3824 DECL_IGNORED_P (new_var) = 0;
3825 TREE_THIS_NOTRAP (t);
3826 SET_DECL_VALUE_EXPR (new_orig_var, t);
3827 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3829 else
3831 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3832 build_int_cst (ptype, 0));
3833 SET_DECL_VALUE_EXPR (new_var, x);
3834 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3837 else
3839 gcc_assert (orig_var == var);
3840 if (TREE_CODE (d) == INDIRECT_REF)
3842 x = create_tmp_var (ptype, name);
3843 TREE_ADDRESSABLE (x) = 1;
3844 gimplify_assign (x, yb, ilist);
3845 x = build_fold_addr_expr_loc (clause_loc, x);
3847 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3848 gimplify_assign (new_var, x, ilist);
3850 tree y1 = create_tmp_var (ptype, NULL);
3851 gimplify_assign (y1, y, ilist);
3852 tree i2 = NULL_TREE, y2 = NULL_TREE;
3853 tree body2 = NULL_TREE, end2 = NULL_TREE;
3854 tree y3 = NULL_TREE, y4 = NULL_TREE;
3855 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3857 y2 = create_tmp_var (ptype, NULL);
3858 gimplify_assign (y2, y, ilist);
3859 tree ref = build_outer_var_ref (var, ctx);
3860 /* For ref build_outer_var_ref already performs this. */
3861 if (TREE_CODE (d) == INDIRECT_REF)
3862 gcc_assert (omp_is_reference (var));
3863 else if (TREE_CODE (d) == ADDR_EXPR)
3864 ref = build_fold_addr_expr (ref);
3865 else if (omp_is_reference (var))
3866 ref = build_fold_addr_expr (ref);
3867 ref = fold_convert_loc (clause_loc, ptype, ref);
3868 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3869 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3871 y3 = create_tmp_var (ptype, NULL);
3872 gimplify_assign (y3, unshare_expr (ref), ilist);
3874 if (is_simd)
3876 y4 = create_tmp_var (ptype, NULL);
3877 gimplify_assign (y4, ref, dlist);
3880 tree i = create_tmp_var (TREE_TYPE (v), NULL);
3881 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3882 tree body = create_artificial_label (UNKNOWN_LOCATION);
3883 tree end = create_artificial_label (UNKNOWN_LOCATION);
3884 gimple_seq_add_stmt (ilist, gimple_build_label (body));
3885 if (y2)
3887 i2 = create_tmp_var (TREE_TYPE (v), NULL);
3888 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3889 body2 = create_artificial_label (UNKNOWN_LOCATION);
3890 end2 = create_artificial_label (UNKNOWN_LOCATION);
3891 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3893 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3895 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3896 tree decl_placeholder
3897 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3898 SET_DECL_VALUE_EXPR (decl_placeholder,
3899 build_simple_mem_ref (y1));
3900 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3901 SET_DECL_VALUE_EXPR (placeholder,
3902 y3 ? build_simple_mem_ref (y3)
3903 : error_mark_node);
3904 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3905 x = lang_hooks.decls.omp_clause_default_ctor
3906 (c, build_simple_mem_ref (y1),
3907 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3908 if (x)
3909 gimplify_and_add (x, ilist);
3910 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3912 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3913 lower_omp (&tseq, ctx);
3914 gimple_seq_add_seq (ilist, tseq);
3916 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3917 if (is_simd)
3919 SET_DECL_VALUE_EXPR (decl_placeholder,
3920 build_simple_mem_ref (y2));
3921 SET_DECL_VALUE_EXPR (placeholder,
3922 build_simple_mem_ref (y4));
3923 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3924 lower_omp (&tseq, ctx);
3925 gimple_seq_add_seq (dlist, tseq);
3926 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3928 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3929 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3930 x = lang_hooks.decls.omp_clause_dtor
3931 (c, build_simple_mem_ref (y2));
3932 if (x)
3934 gimple_seq tseq = NULL;
3935 dtor = x;
3936 gimplify_stmt (&dtor, &tseq);
3937 gimple_seq_add_seq (dlist, tseq);
3940 else
3942 x = omp_reduction_init (c, TREE_TYPE (type));
3943 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3945 /* reduction(-:var) sums up the partial results, so it
3946 acts identically to reduction(+:var). */
3947 if (code == MINUS_EXPR)
3948 code = PLUS_EXPR;
3950 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
3951 if (is_simd)
3953 x = build2 (code, TREE_TYPE (type),
3954 build_simple_mem_ref (y4),
3955 build_simple_mem_ref (y2));
3956 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
3959 gimple *g
3960 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
3961 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3962 gimple_seq_add_stmt (ilist, g);
3963 if (y3)
3965 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
3966 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3967 gimple_seq_add_stmt (ilist, g);
3969 g = gimple_build_assign (i, PLUS_EXPR, i,
3970 build_int_cst (TREE_TYPE (i), 1));
3971 gimple_seq_add_stmt (ilist, g);
3972 g = gimple_build_cond (LE_EXPR, i, v, body, end);
3973 gimple_seq_add_stmt (ilist, g);
3974 gimple_seq_add_stmt (ilist, gimple_build_label (end));
3975 if (y2)
3977 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
3978 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3979 gimple_seq_add_stmt (dlist, g);
3980 if (y4)
3982 g = gimple_build_assign
3983 (y4, POINTER_PLUS_EXPR, y4,
3984 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3985 gimple_seq_add_stmt (dlist, g);
3987 g = gimple_build_assign (i2, PLUS_EXPR, i2,
3988 build_int_cst (TREE_TYPE (i2), 1));
3989 gimple_seq_add_stmt (dlist, g);
3990 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
3991 gimple_seq_add_stmt (dlist, g);
3992 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
3994 continue;
3996 else if (is_variable_sized (var))
3998 /* For variable sized types, we need to allocate the
3999 actual storage here. Call alloca and store the
4000 result in the pointer decl that we created elsewhere. */
4001 if (pass == 0)
4002 continue;
4004 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4006 gcall *stmt;
4007 tree tmp, atmp;
4009 ptr = DECL_VALUE_EXPR (new_var);
4010 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4011 ptr = TREE_OPERAND (ptr, 0);
4012 gcc_assert (DECL_P (ptr));
4013 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4015 /* void *tmp = __builtin_alloca */
4016 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4017 stmt = gimple_build_call (atmp, 2, x,
4018 size_int (DECL_ALIGN (var)));
4019 tmp = create_tmp_var_raw (ptr_type_node);
4020 gimple_add_tmp_var (tmp);
4021 gimple_call_set_lhs (stmt, tmp);
4023 gimple_seq_add_stmt (ilist, stmt);
4025 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4026 gimplify_assign (ptr, x, ilist);
4029 else if (omp_is_reference (var))
4031 /* For references that are being privatized for Fortran,
4032 allocate new backing storage for the new pointer
4033 variable. This allows us to avoid changing all the
4034 code that expects a pointer to something that expects
4035 a direct variable. */
4036 if (pass == 0)
4037 continue;
4039 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4040 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4042 x = build_receiver_ref (var, false, ctx);
4043 x = build_fold_addr_expr_loc (clause_loc, x);
4045 else if (TREE_CONSTANT (x))
4047 /* For reduction in SIMD loop, defer adding the
4048 initialization of the reference, because if we decide
4049 to use SIMD array for it, the initilization could cause
4050 expansion ICE. */
4051 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4052 x = NULL_TREE;
4053 else
4055 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4056 get_name (var));
4057 gimple_add_tmp_var (x);
4058 TREE_ADDRESSABLE (x) = 1;
4059 x = build_fold_addr_expr_loc (clause_loc, x);
4062 else
4064 tree atmp
4065 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4066 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4067 tree al = size_int (TYPE_ALIGN (rtype));
4068 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4071 if (x)
4073 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4074 gimplify_assign (new_var, x, ilist);
4077 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4079 else if (c_kind == OMP_CLAUSE_REDUCTION
4080 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4082 if (pass == 0)
4083 continue;
4085 else if (pass != 0)
4086 continue;
4088 switch (OMP_CLAUSE_CODE (c))
4090 case OMP_CLAUSE_SHARED:
4091 /* Ignore shared directives in teams construct. */
4092 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4093 continue;
4094 /* Shared global vars are just accessed directly. */
4095 if (is_global_var (new_var))
4096 break;
4097 /* For taskloop firstprivate/lastprivate, represented
4098 as firstprivate and shared clause on the task, new_var
4099 is the firstprivate var. */
4100 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4101 break;
4102 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4103 needs to be delayed until after fixup_child_record_type so
4104 that we get the correct type during the dereference. */
4105 by_ref = use_pointer_for_field (var, ctx);
4106 x = build_receiver_ref (var, by_ref, ctx);
4107 SET_DECL_VALUE_EXPR (new_var, x);
4108 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4110 /* ??? If VAR is not passed by reference, and the variable
4111 hasn't been initialized yet, then we'll get a warning for
4112 the store into the omp_data_s structure. Ideally, we'd be
4113 able to notice this and not store anything at all, but
4114 we're generating code too early. Suppress the warning. */
4115 if (!by_ref)
4116 TREE_NO_WARNING (var) = 1;
4117 break;
4119 case OMP_CLAUSE_LASTPRIVATE:
4120 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4121 break;
4122 /* FALLTHRU */
4124 case OMP_CLAUSE_PRIVATE:
4125 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4126 x = build_outer_var_ref (var, ctx);
4127 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4129 if (is_task_ctx (ctx))
4130 x = build_receiver_ref (var, false, ctx);
4131 else
4132 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4134 else
4135 x = NULL;
4136 do_private:
4137 tree nx;
4138 nx = lang_hooks.decls.omp_clause_default_ctor
4139 (c, unshare_expr (new_var), x);
4140 if (is_simd)
4142 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4143 if ((TREE_ADDRESSABLE (new_var) || nx || y
4144 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4145 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4146 ivar, lvar))
4148 if (nx)
4149 x = lang_hooks.decls.omp_clause_default_ctor
4150 (c, unshare_expr (ivar), x);
4151 if (nx && x)
4152 gimplify_and_add (x, &llist[0]);
4153 if (y)
4155 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4156 if (y)
4158 gimple_seq tseq = NULL;
4160 dtor = y;
4161 gimplify_stmt (&dtor, &tseq);
4162 gimple_seq_add_seq (&llist[1], tseq);
4165 break;
4168 if (nx)
4169 gimplify_and_add (nx, ilist);
4170 /* FALLTHRU */
4172 do_dtor:
4173 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4174 if (x)
4176 gimple_seq tseq = NULL;
4178 dtor = x;
4179 gimplify_stmt (&dtor, &tseq);
4180 gimple_seq_add_seq (dlist, tseq);
4182 break;
4184 case OMP_CLAUSE_LINEAR:
4185 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4186 goto do_firstprivate;
4187 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4188 x = NULL;
4189 else
4190 x = build_outer_var_ref (var, ctx);
4191 goto do_private;
4193 case OMP_CLAUSE_FIRSTPRIVATE:
4194 if (is_task_ctx (ctx))
4196 if (omp_is_reference (var) || is_variable_sized (var))
4197 goto do_dtor;
4198 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4199 ctx))
4200 || use_pointer_for_field (var, NULL))
4202 x = build_receiver_ref (var, false, ctx);
4203 SET_DECL_VALUE_EXPR (new_var, x);
4204 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4205 goto do_dtor;
4208 do_firstprivate:
4209 x = build_outer_var_ref (var, ctx);
4210 if (is_simd)
4212 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4213 && gimple_omp_for_combined_into_p (ctx->stmt))
4215 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4216 tree stept = TREE_TYPE (t);
4217 tree ct = omp_find_clause (clauses,
4218 OMP_CLAUSE__LOOPTEMP_);
4219 gcc_assert (ct);
4220 tree l = OMP_CLAUSE_DECL (ct);
4221 tree n1 = fd->loop.n1;
4222 tree step = fd->loop.step;
4223 tree itype = TREE_TYPE (l);
4224 if (POINTER_TYPE_P (itype))
4225 itype = signed_type_for (itype);
4226 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4227 if (TYPE_UNSIGNED (itype)
4228 && fd->loop.cond_code == GT_EXPR)
4229 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4230 fold_build1 (NEGATE_EXPR, itype, l),
4231 fold_build1 (NEGATE_EXPR,
4232 itype, step));
4233 else
4234 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4235 t = fold_build2 (MULT_EXPR, stept,
4236 fold_convert (stept, l), t);
4238 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4240 x = lang_hooks.decls.omp_clause_linear_ctor
4241 (c, new_var, x, t);
4242 gimplify_and_add (x, ilist);
4243 goto do_dtor;
4246 if (POINTER_TYPE_P (TREE_TYPE (x)))
4247 x = fold_build2 (POINTER_PLUS_EXPR,
4248 TREE_TYPE (x), x, t);
4249 else
4250 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4253 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4254 || TREE_ADDRESSABLE (new_var))
4255 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4256 ivar, lvar))
4258 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4260 tree iv = create_tmp_var (TREE_TYPE (new_var));
4261 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4262 gimplify_and_add (x, ilist);
4263 gimple_stmt_iterator gsi
4264 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4265 gassign *g
4266 = gimple_build_assign (unshare_expr (lvar), iv);
4267 gsi_insert_before_without_update (&gsi, g,
4268 GSI_SAME_STMT);
4269 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4270 enum tree_code code = PLUS_EXPR;
4271 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4272 code = POINTER_PLUS_EXPR;
4273 g = gimple_build_assign (iv, code, iv, t);
4274 gsi_insert_before_without_update (&gsi, g,
4275 GSI_SAME_STMT);
4276 break;
4278 x = lang_hooks.decls.omp_clause_copy_ctor
4279 (c, unshare_expr (ivar), x);
4280 gimplify_and_add (x, &llist[0]);
4281 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4282 if (x)
4284 gimple_seq tseq = NULL;
4286 dtor = x;
4287 gimplify_stmt (&dtor, &tseq);
4288 gimple_seq_add_seq (&llist[1], tseq);
4290 break;
4293 x = lang_hooks.decls.omp_clause_copy_ctor
4294 (c, unshare_expr (new_var), x);
4295 gimplify_and_add (x, ilist);
4296 goto do_dtor;
4298 case OMP_CLAUSE__LOOPTEMP_:
4299 gcc_assert (is_taskreg_ctx (ctx));
4300 x = build_outer_var_ref (var, ctx);
4301 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4302 gimplify_and_add (x, ilist);
4303 break;
4305 case OMP_CLAUSE_COPYIN:
4306 by_ref = use_pointer_for_field (var, NULL);
4307 x = build_receiver_ref (var, by_ref, ctx);
4308 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4309 append_to_statement_list (x, &copyin_seq);
4310 copyin_by_ref |= by_ref;
4311 break;
4313 case OMP_CLAUSE_REDUCTION:
4314 /* OpenACC reductions are initialized using the
4315 GOACC_REDUCTION internal function. */
4316 if (is_gimple_omp_oacc (ctx->stmt))
4317 break;
4318 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4320 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4321 gimple *tseq;
4322 x = build_outer_var_ref (var, ctx);
4324 if (omp_is_reference (var)
4325 && !useless_type_conversion_p (TREE_TYPE (placeholder),
4326 TREE_TYPE (x)))
4327 x = build_fold_addr_expr_loc (clause_loc, x);
4328 SET_DECL_VALUE_EXPR (placeholder, x);
4329 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4330 tree new_vard = new_var;
4331 if (omp_is_reference (var))
4333 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4334 new_vard = TREE_OPERAND (new_var, 0);
4335 gcc_assert (DECL_P (new_vard));
4337 if (is_simd
4338 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4339 ivar, lvar))
4341 if (new_vard == new_var)
4343 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4344 SET_DECL_VALUE_EXPR (new_var, ivar);
4346 else
4348 SET_DECL_VALUE_EXPR (new_vard,
4349 build_fold_addr_expr (ivar));
4350 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4352 x = lang_hooks.decls.omp_clause_default_ctor
4353 (c, unshare_expr (ivar),
4354 build_outer_var_ref (var, ctx));
4355 if (x)
4356 gimplify_and_add (x, &llist[0]);
4357 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4359 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4360 lower_omp (&tseq, ctx);
4361 gimple_seq_add_seq (&llist[0], tseq);
4363 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4364 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4365 lower_omp (&tseq, ctx);
4366 gimple_seq_add_seq (&llist[1], tseq);
4367 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4368 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4369 if (new_vard == new_var)
4370 SET_DECL_VALUE_EXPR (new_var, lvar);
4371 else
4372 SET_DECL_VALUE_EXPR (new_vard,
4373 build_fold_addr_expr (lvar));
4374 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4375 if (x)
4377 tseq = NULL;
4378 dtor = x;
4379 gimplify_stmt (&dtor, &tseq);
4380 gimple_seq_add_seq (&llist[1], tseq);
4382 break;
4384 /* If this is a reference to constant size reduction var
4385 with placeholder, we haven't emitted the initializer
4386 for it because it is undesirable if SIMD arrays are used.
4387 But if they aren't used, we need to emit the deferred
4388 initialization now. */
4389 else if (omp_is_reference (var) && is_simd)
4390 handle_simd_reference (clause_loc, new_vard, ilist);
4391 x = lang_hooks.decls.omp_clause_default_ctor
4392 (c, unshare_expr (new_var),
4393 build_outer_var_ref (var, ctx));
4394 if (x)
4395 gimplify_and_add (x, ilist);
4396 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4398 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4399 lower_omp (&tseq, ctx);
4400 gimple_seq_add_seq (ilist, tseq);
4402 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4403 if (is_simd)
4405 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4406 lower_omp (&tseq, ctx);
4407 gimple_seq_add_seq (dlist, tseq);
4408 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4410 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4411 goto do_dtor;
4413 else
4415 x = omp_reduction_init (c, TREE_TYPE (new_var));
4416 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4417 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4419 /* reduction(-:var) sums up the partial results, so it
4420 acts identically to reduction(+:var). */
4421 if (code == MINUS_EXPR)
4422 code = PLUS_EXPR;
4424 tree new_vard = new_var;
4425 if (is_simd && omp_is_reference (var))
4427 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4428 new_vard = TREE_OPERAND (new_var, 0);
4429 gcc_assert (DECL_P (new_vard));
4431 if (is_simd
4432 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4433 ivar, lvar))
4435 tree ref = build_outer_var_ref (var, ctx);
4437 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4439 if (sctx.is_simt)
4441 if (!simt_lane)
4442 simt_lane = create_tmp_var (unsigned_type_node);
4443 x = build_call_expr_internal_loc
4444 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4445 TREE_TYPE (ivar), 2, ivar, simt_lane);
4446 x = build2 (code, TREE_TYPE (ivar), ivar, x);
4447 gimplify_assign (ivar, x, &llist[2]);
4449 x = build2 (code, TREE_TYPE (ref), ref, ivar);
4450 ref = build_outer_var_ref (var, ctx);
4451 gimplify_assign (ref, x, &llist[1]);
4453 if (new_vard != new_var)
4455 SET_DECL_VALUE_EXPR (new_vard,
4456 build_fold_addr_expr (lvar));
4457 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4460 else
4462 if (omp_is_reference (var) && is_simd)
4463 handle_simd_reference (clause_loc, new_vard, ilist);
4464 gimplify_assign (new_var, x, ilist);
4465 if (is_simd)
4467 tree ref = build_outer_var_ref (var, ctx);
4469 x = build2 (code, TREE_TYPE (ref), ref, new_var);
4470 ref = build_outer_var_ref (var, ctx);
4471 gimplify_assign (ref, x, dlist);
4475 break;
4477 default:
4478 gcc_unreachable ();
4483 if (known_eq (sctx.max_vf, 1U))
4484 sctx.is_simt = false;
4486 if (sctx.lane || sctx.is_simt)
4488 uid = create_tmp_var (ptr_type_node, "simduid");
4489 /* Don't want uninit warnings on simduid, it is always uninitialized,
4490 but we use it not for the value, but for the DECL_UID only. */
4491 TREE_NO_WARNING (uid) = 1;
4492 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4493 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4494 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4495 gimple_omp_for_set_clauses (ctx->stmt, c);
4497 /* Emit calls denoting privatized variables and initializing a pointer to
4498 structure that holds private variables as fields after ompdevlow pass. */
4499 if (sctx.is_simt)
4501 sctx.simt_eargs[0] = uid;
4502 gimple *g
4503 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4504 gimple_call_set_lhs (g, uid);
4505 gimple_seq_add_stmt (ilist, g);
4506 sctx.simt_eargs.release ();
4508 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4509 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4510 gimple_call_set_lhs (g, simtrec);
4511 gimple_seq_add_stmt (ilist, g);
4513 if (sctx.lane)
4515 gimple *g
4516 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4517 gimple_call_set_lhs (g, sctx.lane);
4518 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4519 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4520 g = gimple_build_assign (sctx.lane, INTEGER_CST,
4521 build_int_cst (unsigned_type_node, 0));
4522 gimple_seq_add_stmt (ilist, g);
4523 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4524 if (llist[2])
4526 tree simt_vf = create_tmp_var (unsigned_type_node);
4527 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4528 gimple_call_set_lhs (g, simt_vf);
4529 gimple_seq_add_stmt (dlist, g);
4531 tree t = build_int_cst (unsigned_type_node, 1);
4532 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4533 gimple_seq_add_stmt (dlist, g);
4535 t = build_int_cst (unsigned_type_node, 0);
4536 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4537 gimple_seq_add_stmt (dlist, g);
4539 tree body = create_artificial_label (UNKNOWN_LOCATION);
4540 tree header = create_artificial_label (UNKNOWN_LOCATION);
4541 tree end = create_artificial_label (UNKNOWN_LOCATION);
4542 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4543 gimple_seq_add_stmt (dlist, gimple_build_label (body));
4545 gimple_seq_add_seq (dlist, llist[2]);
4547 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4548 gimple_seq_add_stmt (dlist, g);
4550 gimple_seq_add_stmt (dlist, gimple_build_label (header));
4551 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4552 gimple_seq_add_stmt (dlist, g);
4554 gimple_seq_add_stmt (dlist, gimple_build_label (end));
4556 for (int i = 0; i < 2; i++)
4557 if (llist[i])
4559 tree vf = create_tmp_var (unsigned_type_node);
4560 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4561 gimple_call_set_lhs (g, vf);
4562 gimple_seq *seq = i == 0 ? ilist : dlist;
4563 gimple_seq_add_stmt (seq, g);
4564 tree t = build_int_cst (unsigned_type_node, 0);
4565 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4566 gimple_seq_add_stmt (seq, g);
4567 tree body = create_artificial_label (UNKNOWN_LOCATION);
4568 tree header = create_artificial_label (UNKNOWN_LOCATION);
4569 tree end = create_artificial_label (UNKNOWN_LOCATION);
4570 gimple_seq_add_stmt (seq, gimple_build_goto (header));
4571 gimple_seq_add_stmt (seq, gimple_build_label (body));
4572 gimple_seq_add_seq (seq, llist[i]);
4573 t = build_int_cst (unsigned_type_node, 1);
4574 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
4575 gimple_seq_add_stmt (seq, g);
4576 gimple_seq_add_stmt (seq, gimple_build_label (header));
4577 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
4578 gimple_seq_add_stmt (seq, g);
4579 gimple_seq_add_stmt (seq, gimple_build_label (end));
4582 if (sctx.is_simt)
4584 gimple_seq_add_seq (dlist, sctx.simt_dlist);
4585 gimple *g
4586 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4587 gimple_seq_add_stmt (dlist, g);
4590 /* The copyin sequence is not to be executed by the main thread, since
4591 that would result in self-copies. Perhaps not visible to scalars,
4592 but it certainly is to C++ operator=. */
4593 if (copyin_seq)
4595 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4597 x = build2 (NE_EXPR, boolean_type_node, x,
4598 build_int_cst (TREE_TYPE (x), 0));
4599 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4600 gimplify_and_add (x, ilist);
4603 /* If any copyin variable is passed by reference, we must ensure the
4604 master thread doesn't modify it before it is copied over in all
4605 threads. Similarly for variables in both firstprivate and
4606 lastprivate clauses we need to ensure the lastprivate copying
4607 happens after firstprivate copying in all threads. And similarly
4608 for UDRs if initializer expression refers to omp_orig. */
4609 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4611 /* Don't add any barrier for #pragma omp simd or
4612 #pragma omp distribute. */
4613 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4614 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4615 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4618 /* If max_vf is non-zero, then we can use only a vectorization factor
4619 up to the max_vf we chose. So stick it into the safelen clause. */
4620 if (maybe_ne (sctx.max_vf, 0U))
4622 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4623 OMP_CLAUSE_SAFELEN);
4624 poly_uint64 safe_len;
4625 if (c == NULL_TREE
4626 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4627 && maybe_gt (safe_len, sctx.max_vf)))
4629 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4630 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4631 sctx.max_vf);
4632 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4633 gimple_omp_for_set_clauses (ctx->stmt, c);
4639 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4640 both parallel and workshare constructs. PREDICATE may be NULL if it's
4641 always true. */
4643 static void
4644 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4645 omp_context *ctx)
4647 tree x, c, label = NULL, orig_clauses = clauses;
4648 bool par_clauses = false;
4649 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4651 /* Early exit if there are no lastprivate or linear clauses. */
4652 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4653 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4654 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4655 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4656 break;
4657 if (clauses == NULL)
4659 /* If this was a workshare clause, see if it had been combined
4660 with its parallel. In that case, look for the clauses on the
4661 parallel statement itself. */
4662 if (is_parallel_ctx (ctx))
4663 return;
4665 ctx = ctx->outer;
4666 if (ctx == NULL || !is_parallel_ctx (ctx))
4667 return;
4669 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4670 OMP_CLAUSE_LASTPRIVATE);
4671 if (clauses == NULL)
4672 return;
4673 par_clauses = true;
4676 bool maybe_simt = false;
4677 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4678 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4680 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4681 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4682 if (simduid)
4683 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4686 if (predicate)
4688 gcond *stmt;
4689 tree label_true, arm1, arm2;
4690 enum tree_code pred_code = TREE_CODE (predicate);
4692 label = create_artificial_label (UNKNOWN_LOCATION);
4693 label_true = create_artificial_label (UNKNOWN_LOCATION);
4694 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4696 arm1 = TREE_OPERAND (predicate, 0);
4697 arm2 = TREE_OPERAND (predicate, 1);
4698 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4699 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4701 else
4703 arm1 = predicate;
4704 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4705 arm2 = boolean_false_node;
4706 pred_code = NE_EXPR;
4708 if (maybe_simt)
4710 c = build2 (pred_code, boolean_type_node, arm1, arm2);
4711 c = fold_convert (integer_type_node, c);
4712 simtcond = create_tmp_var (integer_type_node);
4713 gimplify_assign (simtcond, c, stmt_list);
4714 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4715 1, simtcond);
4716 c = create_tmp_var (integer_type_node);
4717 gimple_call_set_lhs (g, c);
4718 gimple_seq_add_stmt (stmt_list, g);
4719 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4720 label_true, label);
4722 else
4723 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4724 gimple_seq_add_stmt (stmt_list, stmt);
4725 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4728 for (c = clauses; c ;)
4730 tree var, new_var;
4731 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4733 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4734 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4735 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4737 var = OMP_CLAUSE_DECL (c);
4738 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4739 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4740 && is_taskloop_ctx (ctx))
4742 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4743 new_var = lookup_decl (var, ctx->outer);
4745 else
4747 new_var = lookup_decl (var, ctx);
4748 /* Avoid uninitialized warnings for lastprivate and
4749 for linear iterators. */
4750 if (predicate
4751 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4752 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4753 TREE_NO_WARNING (new_var) = 1;
4756 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4758 tree val = DECL_VALUE_EXPR (new_var);
4759 if (TREE_CODE (val) == ARRAY_REF
4760 && VAR_P (TREE_OPERAND (val, 0))
4761 && lookup_attribute ("omp simd array",
4762 DECL_ATTRIBUTES (TREE_OPERAND (val,
4763 0))))
4765 if (lastlane == NULL)
4767 lastlane = create_tmp_var (unsigned_type_node);
4768 gcall *g
4769 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4770 2, simduid,
4771 TREE_OPERAND (val, 1));
4772 gimple_call_set_lhs (g, lastlane);
4773 gimple_seq_add_stmt (stmt_list, g);
4775 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4776 TREE_OPERAND (val, 0), lastlane,
4777 NULL_TREE, NULL_TREE);
4780 else if (maybe_simt)
4782 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
4783 ? DECL_VALUE_EXPR (new_var)
4784 : new_var);
4785 if (simtlast == NULL)
4787 simtlast = create_tmp_var (unsigned_type_node);
4788 gcall *g = gimple_build_call_internal
4789 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4790 gimple_call_set_lhs (g, simtlast);
4791 gimple_seq_add_stmt (stmt_list, g);
4793 x = build_call_expr_internal_loc
4794 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4795 TREE_TYPE (val), 2, val, simtlast);
4796 new_var = unshare_expr (new_var);
4797 gimplify_assign (new_var, x, stmt_list);
4798 new_var = unshare_expr (new_var);
4801 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4802 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4804 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4805 gimple_seq_add_seq (stmt_list,
4806 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4807 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4809 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4810 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4812 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4813 gimple_seq_add_seq (stmt_list,
4814 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4815 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4818 x = NULL_TREE;
4819 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4820 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4822 gcc_checking_assert (is_taskloop_ctx (ctx));
4823 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4824 ctx->outer->outer);
4825 if (is_global_var (ovar))
4826 x = ovar;
4828 if (!x)
4829 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4830 if (omp_is_reference (var))
4831 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4832 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4833 gimplify_and_add (x, stmt_list);
4835 c = OMP_CLAUSE_CHAIN (c);
4836 if (c == NULL && !par_clauses)
4838 /* If this was a workshare clause, see if it had been combined
4839 with its parallel. In that case, continue looking for the
4840 clauses also on the parallel statement itself. */
4841 if (is_parallel_ctx (ctx))
4842 break;
4844 ctx = ctx->outer;
4845 if (ctx == NULL || !is_parallel_ctx (ctx))
4846 break;
4848 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4849 OMP_CLAUSE_LASTPRIVATE);
4850 par_clauses = true;
4854 if (label)
4855 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4858 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4859 (which might be a placeholder). INNER is true if this is an inner
4860 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4861 join markers. Generate the before-loop forking sequence in
4862 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4863 general form of these sequences is
4865 GOACC_REDUCTION_SETUP
4866 GOACC_FORK
4867 GOACC_REDUCTION_INIT
4869 GOACC_REDUCTION_FINI
4870 GOACC_JOIN
4871 GOACC_REDUCTION_TEARDOWN. */
4873 static void
4874 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4875 gcall *fork, gcall *join, gimple_seq *fork_seq,
4876 gimple_seq *join_seq, omp_context *ctx)
4878 gimple_seq before_fork = NULL;
4879 gimple_seq after_fork = NULL;
4880 gimple_seq before_join = NULL;
4881 gimple_seq after_join = NULL;
4882 tree init_code = NULL_TREE, fini_code = NULL_TREE,
4883 setup_code = NULL_TREE, teardown_code = NULL_TREE;
4884 unsigned offset = 0;
4886 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4887 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4889 tree orig = OMP_CLAUSE_DECL (c);
4890 tree var = maybe_lookup_decl (orig, ctx);
4891 tree ref_to_res = NULL_TREE;
4892 tree incoming, outgoing, v1, v2, v3;
4893 bool is_private = false;
4895 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4896 if (rcode == MINUS_EXPR)
4897 rcode = PLUS_EXPR;
4898 else if (rcode == TRUTH_ANDIF_EXPR)
4899 rcode = BIT_AND_EXPR;
4900 else if (rcode == TRUTH_ORIF_EXPR)
4901 rcode = BIT_IOR_EXPR;
4902 tree op = build_int_cst (unsigned_type_node, rcode);
4904 if (!var)
4905 var = orig;
4907 incoming = outgoing = var;
4909 if (!inner)
4911 /* See if an outer construct also reduces this variable. */
4912 omp_context *outer = ctx;
4914 while (omp_context *probe = outer->outer)
4916 enum gimple_code type = gimple_code (probe->stmt);
4917 tree cls;
4919 switch (type)
4921 case GIMPLE_OMP_FOR:
4922 cls = gimple_omp_for_clauses (probe->stmt);
4923 break;
4925 case GIMPLE_OMP_TARGET:
4926 if (gimple_omp_target_kind (probe->stmt)
4927 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
4928 goto do_lookup;
4930 cls = gimple_omp_target_clauses (probe->stmt);
4931 break;
4933 default:
4934 goto do_lookup;
4937 outer = probe;
4938 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
4939 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4940 && orig == OMP_CLAUSE_DECL (cls))
4942 incoming = outgoing = lookup_decl (orig, probe);
4943 goto has_outer_reduction;
4945 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4946 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4947 && orig == OMP_CLAUSE_DECL (cls))
4949 is_private = true;
4950 goto do_lookup;
4954 do_lookup:
4955 /* This is the outermost construct with this reduction,
4956 see if there's a mapping for it. */
4957 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
4958 && maybe_lookup_field (orig, outer) && !is_private)
4960 ref_to_res = build_receiver_ref (orig, false, outer);
4961 if (omp_is_reference (orig))
4962 ref_to_res = build_simple_mem_ref (ref_to_res);
4964 tree type = TREE_TYPE (var);
4965 if (POINTER_TYPE_P (type))
4966 type = TREE_TYPE (type);
4968 outgoing = var;
4969 incoming = omp_reduction_init_op (loc, rcode, type);
4971 else
4973 /* Try to look at enclosing contexts for reduction var,
4974 use original if no mapping found. */
4975 tree t = NULL_TREE;
4976 omp_context *c = ctx->outer;
4977 while (c && !t)
4979 t = maybe_lookup_decl (orig, c);
4980 c = c->outer;
4982 incoming = outgoing = (t ? t : orig);
4985 has_outer_reduction:;
4988 if (!ref_to_res)
4989 ref_to_res = integer_zero_node;
4991 if (omp_is_reference (orig))
4993 tree type = TREE_TYPE (var);
4994 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
4996 if (!inner)
4998 tree x = create_tmp_var (TREE_TYPE (type), id);
4999 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
5002 v1 = create_tmp_var (type, id);
5003 v2 = create_tmp_var (type, id);
5004 v3 = create_tmp_var (type, id);
5006 gimplify_assign (v1, var, fork_seq);
5007 gimplify_assign (v2, var, fork_seq);
5008 gimplify_assign (v3, var, fork_seq);
5010 var = build_simple_mem_ref (var);
5011 v1 = build_simple_mem_ref (v1);
5012 v2 = build_simple_mem_ref (v2);
5013 v3 = build_simple_mem_ref (v3);
5014 outgoing = build_simple_mem_ref (outgoing);
5016 if (!TREE_CONSTANT (incoming))
5017 incoming = build_simple_mem_ref (incoming);
5019 else
5020 v1 = v2 = v3 = var;
5022 /* Determine position in reduction buffer, which may be used
5023 by target. The parser has ensured that this is not a
5024 variable-sized type. */
5025 fixed_size_mode mode
5026 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
5027 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
5028 offset = (offset + align - 1) & ~(align - 1);
5029 tree off = build_int_cst (sizetype, offset);
5030 offset += GET_MODE_SIZE (mode);
5032 if (!init_code)
5034 init_code = build_int_cst (integer_type_node,
5035 IFN_GOACC_REDUCTION_INIT);
5036 fini_code = build_int_cst (integer_type_node,
5037 IFN_GOACC_REDUCTION_FINI);
5038 setup_code = build_int_cst (integer_type_node,
5039 IFN_GOACC_REDUCTION_SETUP);
5040 teardown_code = build_int_cst (integer_type_node,
5041 IFN_GOACC_REDUCTION_TEARDOWN);
5044 tree setup_call
5045 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5046 TREE_TYPE (var), 6, setup_code,
5047 unshare_expr (ref_to_res),
5048 incoming, level, op, off);
5049 tree init_call
5050 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5051 TREE_TYPE (var), 6, init_code,
5052 unshare_expr (ref_to_res),
5053 v1, level, op, off);
5054 tree fini_call
5055 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5056 TREE_TYPE (var), 6, fini_code,
5057 unshare_expr (ref_to_res),
5058 v2, level, op, off);
5059 tree teardown_call
5060 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5061 TREE_TYPE (var), 6, teardown_code,
5062 ref_to_res, v3, level, op, off);
5064 gimplify_assign (v1, setup_call, &before_fork);
5065 gimplify_assign (v2, init_call, &after_fork);
5066 gimplify_assign (v3, fini_call, &before_join);
5067 gimplify_assign (outgoing, teardown_call, &after_join);
5070 /* Now stitch things together. */
5071 gimple_seq_add_seq (fork_seq, before_fork);
5072 if (fork)
5073 gimple_seq_add_stmt (fork_seq, fork);
5074 gimple_seq_add_seq (fork_seq, after_fork);
5076 gimple_seq_add_seq (join_seq, before_join);
5077 if (join)
5078 gimple_seq_add_stmt (join_seq, join);
5079 gimple_seq_add_seq (join_seq, after_join);
5082 /* Generate code to implement the REDUCTION clauses. */
5084 static void
5085 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5087 gimple_seq sub_seq = NULL;
5088 gimple *stmt;
5089 tree x, c;
5090 int count = 0;
5092 /* OpenACC loop reductions are handled elsewhere. */
5093 if (is_gimple_omp_oacc (ctx->stmt))
5094 return;
5096 /* SIMD reductions are handled in lower_rec_input_clauses. */
5097 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5098 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5099 return;
5101 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5102 update in that case, otherwise use a lock. */
5103 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5104 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5106 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5107 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5109 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5110 count = -1;
5111 break;
5113 count++;
5116 if (count == 0)
5117 return;
5119 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5121 tree var, ref, new_var, orig_var;
5122 enum tree_code code;
5123 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5125 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5126 continue;
5128 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
5129 orig_var = var = OMP_CLAUSE_DECL (c);
5130 if (TREE_CODE (var) == MEM_REF)
5132 var = TREE_OPERAND (var, 0);
5133 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5134 var = TREE_OPERAND (var, 0);
5135 if (TREE_CODE (var) == ADDR_EXPR)
5136 var = TREE_OPERAND (var, 0);
5137 else
5139 /* If this is a pointer or referenced based array
5140 section, the var could be private in the outer
5141 context e.g. on orphaned loop construct. Pretend this
5142 is private variable's outer reference. */
5143 ccode = OMP_CLAUSE_PRIVATE;
5144 if (TREE_CODE (var) == INDIRECT_REF)
5145 var = TREE_OPERAND (var, 0);
5147 orig_var = var;
5148 if (is_variable_sized (var))
5150 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5151 var = DECL_VALUE_EXPR (var);
5152 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5153 var = TREE_OPERAND (var, 0);
5154 gcc_assert (DECL_P (var));
5157 new_var = lookup_decl (var, ctx);
5158 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5159 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5160 ref = build_outer_var_ref (var, ctx, ccode);
5161 code = OMP_CLAUSE_REDUCTION_CODE (c);
5163 /* reduction(-:var) sums up the partial results, so it acts
5164 identically to reduction(+:var). */
5165 if (code == MINUS_EXPR)
5166 code = PLUS_EXPR;
5168 if (count == 1)
5170 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5172 addr = save_expr (addr);
5173 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5174 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5175 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5176 gimplify_and_add (x, stmt_seqp);
5177 return;
5179 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5181 tree d = OMP_CLAUSE_DECL (c);
5182 tree type = TREE_TYPE (d);
5183 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5184 tree i = create_tmp_var (TREE_TYPE (v), NULL);
5185 tree ptype = build_pointer_type (TREE_TYPE (type));
5186 tree bias = TREE_OPERAND (d, 1);
5187 d = TREE_OPERAND (d, 0);
5188 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5190 tree b = TREE_OPERAND (d, 1);
5191 b = maybe_lookup_decl (b, ctx);
5192 if (b == NULL)
5194 b = TREE_OPERAND (d, 1);
5195 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5197 if (integer_zerop (bias))
5198 bias = b;
5199 else
5201 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5202 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5203 TREE_TYPE (b), b, bias);
5205 d = TREE_OPERAND (d, 0);
5207 /* For ref build_outer_var_ref already performs this, so
5208 only new_var needs a dereference. */
5209 if (TREE_CODE (d) == INDIRECT_REF)
5211 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5212 gcc_assert (omp_is_reference (var) && var == orig_var);
5214 else if (TREE_CODE (d) == ADDR_EXPR)
5216 if (orig_var == var)
5218 new_var = build_fold_addr_expr (new_var);
5219 ref = build_fold_addr_expr (ref);
5222 else
5224 gcc_assert (orig_var == var);
5225 if (omp_is_reference (var))
5226 ref = build_fold_addr_expr (ref);
5228 if (DECL_P (v))
5230 tree t = maybe_lookup_decl (v, ctx);
5231 if (t)
5232 v = t;
5233 else
5234 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5235 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5237 if (!integer_zerop (bias))
5239 bias = fold_convert_loc (clause_loc, sizetype, bias);
5240 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5241 TREE_TYPE (new_var), new_var,
5242 unshare_expr (bias));
5243 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5244 TREE_TYPE (ref), ref, bias);
5246 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5247 ref = fold_convert_loc (clause_loc, ptype, ref);
5248 tree m = create_tmp_var (ptype, NULL);
5249 gimplify_assign (m, new_var, stmt_seqp);
5250 new_var = m;
5251 m = create_tmp_var (ptype, NULL);
5252 gimplify_assign (m, ref, stmt_seqp);
5253 ref = m;
5254 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5255 tree body = create_artificial_label (UNKNOWN_LOCATION);
5256 tree end = create_artificial_label (UNKNOWN_LOCATION);
5257 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5258 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5259 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5260 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5262 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5263 tree decl_placeholder
5264 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5265 SET_DECL_VALUE_EXPR (placeholder, out);
5266 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5267 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5268 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5269 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5270 gimple_seq_add_seq (&sub_seq,
5271 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5272 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5273 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5274 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5276 else
5278 x = build2 (code, TREE_TYPE (out), out, priv);
5279 out = unshare_expr (out);
5280 gimplify_assign (out, x, &sub_seq);
5282 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5283 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5284 gimple_seq_add_stmt (&sub_seq, g);
5285 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5286 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5287 gimple_seq_add_stmt (&sub_seq, g);
5288 g = gimple_build_assign (i, PLUS_EXPR, i,
5289 build_int_cst (TREE_TYPE (i), 1));
5290 gimple_seq_add_stmt (&sub_seq, g);
5291 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5292 gimple_seq_add_stmt (&sub_seq, g);
5293 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5295 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5297 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5299 if (omp_is_reference (var)
5300 && !useless_type_conversion_p (TREE_TYPE (placeholder),
5301 TREE_TYPE (ref)))
5302 ref = build_fold_addr_expr_loc (clause_loc, ref);
5303 SET_DECL_VALUE_EXPR (placeholder, ref);
5304 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5305 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5306 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5307 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5308 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5310 else
5312 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5313 ref = build_outer_var_ref (var, ctx);
5314 gimplify_assign (ref, x, &sub_seq);
5318 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5320 gimple_seq_add_stmt (stmt_seqp, stmt);
5322 gimple_seq_add_seq (stmt_seqp, sub_seq);
5324 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5326 gimple_seq_add_stmt (stmt_seqp, stmt);
5330 /* Generate code to implement the COPYPRIVATE clauses. */
5332 static void
5333 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5334 omp_context *ctx)
5336 tree c;
5338 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5340 tree var, new_var, ref, x;
5341 bool by_ref;
5342 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5344 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5345 continue;
5347 var = OMP_CLAUSE_DECL (c);
5348 by_ref = use_pointer_for_field (var, NULL);
5350 ref = build_sender_ref (var, ctx);
5351 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5352 if (by_ref)
5354 x = build_fold_addr_expr_loc (clause_loc, new_var);
5355 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5357 gimplify_assign (ref, x, slist);
5359 ref = build_receiver_ref (var, false, ctx);
5360 if (by_ref)
5362 ref = fold_convert_loc (clause_loc,
5363 build_pointer_type (TREE_TYPE (new_var)),
5364 ref);
5365 ref = build_fold_indirect_ref_loc (clause_loc, ref);
5367 if (omp_is_reference (var))
5369 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5370 ref = build_simple_mem_ref_loc (clause_loc, ref);
5371 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5373 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5374 gimplify_and_add (x, rlist);
5379 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5380 and REDUCTION from the sender (aka parent) side. */
5382 static void
5383 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5384 omp_context *ctx)
5386 tree c, t;
5387 int ignored_looptemp = 0;
5388 bool is_taskloop = false;
5390 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5391 by GOMP_taskloop. */
5392 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5394 ignored_looptemp = 2;
5395 is_taskloop = true;
5398 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5400 tree val, ref, x, var;
5401 bool by_ref, do_in = false, do_out = false;
5402 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5404 switch (OMP_CLAUSE_CODE (c))
5406 case OMP_CLAUSE_PRIVATE:
5407 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5408 break;
5409 continue;
5410 case OMP_CLAUSE_FIRSTPRIVATE:
5411 case OMP_CLAUSE_COPYIN:
5412 case OMP_CLAUSE_LASTPRIVATE:
5413 case OMP_CLAUSE_REDUCTION:
5414 break;
5415 case OMP_CLAUSE_SHARED:
5416 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5417 break;
5418 continue;
5419 case OMP_CLAUSE__LOOPTEMP_:
5420 if (ignored_looptemp)
5422 ignored_looptemp--;
5423 continue;
5425 break;
5426 default:
5427 continue;
5430 val = OMP_CLAUSE_DECL (c);
5431 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5432 && TREE_CODE (val) == MEM_REF)
5434 val = TREE_OPERAND (val, 0);
5435 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5436 val = TREE_OPERAND (val, 0);
5437 if (TREE_CODE (val) == INDIRECT_REF
5438 || TREE_CODE (val) == ADDR_EXPR)
5439 val = TREE_OPERAND (val, 0);
5440 if (is_variable_sized (val))
5441 continue;
5444 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5445 outer taskloop region. */
5446 omp_context *ctx_for_o = ctx;
5447 if (is_taskloop
5448 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5449 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5450 ctx_for_o = ctx->outer;
5452 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5454 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5455 && is_global_var (var))
5456 continue;
5458 t = omp_member_access_dummy_var (var);
5459 if (t)
5461 var = DECL_VALUE_EXPR (var);
5462 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5463 if (o != t)
5464 var = unshare_and_remap (var, t, o);
5465 else
5466 var = unshare_expr (var);
5469 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5471 /* Handle taskloop firstprivate/lastprivate, where the
5472 lastprivate on GIMPLE_OMP_TASK is represented as
5473 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5474 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5475 x = omp_build_component_ref (ctx->sender_decl, f);
5476 if (use_pointer_for_field (val, ctx))
5477 var = build_fold_addr_expr (var);
5478 gimplify_assign (x, var, ilist);
5479 DECL_ABSTRACT_ORIGIN (f) = NULL;
5480 continue;
5483 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5484 || val == OMP_CLAUSE_DECL (c))
5485 && is_variable_sized (val))
5486 continue;
5487 by_ref = use_pointer_for_field (val, NULL);
5489 switch (OMP_CLAUSE_CODE (c))
5491 case OMP_CLAUSE_FIRSTPRIVATE:
5492 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5493 && !by_ref
5494 && is_task_ctx (ctx))
5495 TREE_NO_WARNING (var) = 1;
5496 do_in = true;
5497 break;
5499 case OMP_CLAUSE_PRIVATE:
5500 case OMP_CLAUSE_COPYIN:
5501 case OMP_CLAUSE__LOOPTEMP_:
5502 do_in = true;
5503 break;
5505 case OMP_CLAUSE_LASTPRIVATE:
5506 if (by_ref || omp_is_reference (val))
5508 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5509 continue;
5510 do_in = true;
5512 else
5514 do_out = true;
5515 if (lang_hooks.decls.omp_private_outer_ref (val))
5516 do_in = true;
5518 break;
5520 case OMP_CLAUSE_REDUCTION:
5521 do_in = true;
5522 if (val == OMP_CLAUSE_DECL (c))
5523 do_out = !(by_ref || omp_is_reference (val));
5524 else
5525 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5526 break;
5528 default:
5529 gcc_unreachable ();
5532 if (do_in)
5534 ref = build_sender_ref (val, ctx);
5535 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5536 gimplify_assign (ref, x, ilist);
5537 if (is_task_ctx (ctx))
5538 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5541 if (do_out)
5543 ref = build_sender_ref (val, ctx);
5544 gimplify_assign (var, ref, olist);
5549 /* Generate code to implement SHARED from the sender (aka parent)
5550 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5551 list things that got automatically shared. */
5553 static void
5554 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5556 tree var, ovar, nvar, t, f, x, record_type;
5558 if (ctx->record_type == NULL)
5559 return;
5561 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5562 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5564 ovar = DECL_ABSTRACT_ORIGIN (f);
5565 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5566 continue;
5568 nvar = maybe_lookup_decl (ovar, ctx);
5569 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5570 continue;
5572 /* If CTX is a nested parallel directive. Find the immediately
5573 enclosing parallel or workshare construct that contains a
5574 mapping for OVAR. */
5575 var = lookup_decl_in_outer_ctx (ovar, ctx);
5577 t = omp_member_access_dummy_var (var);
5578 if (t)
5580 var = DECL_VALUE_EXPR (var);
5581 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5582 if (o != t)
5583 var = unshare_and_remap (var, t, o);
5584 else
5585 var = unshare_expr (var);
5588 if (use_pointer_for_field (ovar, ctx))
5590 x = build_sender_ref (ovar, ctx);
5591 var = build_fold_addr_expr (var);
5592 gimplify_assign (x, var, ilist);
5594 else
5596 x = build_sender_ref (ovar, ctx);
5597 gimplify_assign (x, var, ilist);
5599 if (!TREE_READONLY (var)
5600 /* We don't need to receive a new reference to a result
5601 or parm decl. In fact we may not store to it as we will
5602 invalidate any pending RSO and generate wrong gimple
5603 during inlining. */
5604 && !((TREE_CODE (var) == RESULT_DECL
5605 || TREE_CODE (var) == PARM_DECL)
5606 && DECL_BY_REFERENCE (var)))
5608 x = build_sender_ref (ovar, ctx);
5609 gimplify_assign (var, x, olist);
5615 /* Emit an OpenACC head marker call, encapulating the partitioning and
5616 other information that must be processed by the target compiler.
5617 Return the maximum number of dimensions the associated loop might
5618 be partitioned over. */
5620 static unsigned
5621 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5622 gimple_seq *seq, omp_context *ctx)
5624 unsigned levels = 0;
5625 unsigned tag = 0;
5626 tree gang_static = NULL_TREE;
5627 auto_vec<tree, 5> args;
5629 args.quick_push (build_int_cst
5630 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5631 args.quick_push (ddvar);
5632 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5634 switch (OMP_CLAUSE_CODE (c))
5636 case OMP_CLAUSE_GANG:
5637 tag |= OLF_DIM_GANG;
5638 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5639 /* static:* is represented by -1, and we can ignore it, as
5640 scheduling is always static. */
5641 if (gang_static && integer_minus_onep (gang_static))
5642 gang_static = NULL_TREE;
5643 levels++;
5644 break;
5646 case OMP_CLAUSE_WORKER:
5647 tag |= OLF_DIM_WORKER;
5648 levels++;
5649 break;
5651 case OMP_CLAUSE_VECTOR:
5652 tag |= OLF_DIM_VECTOR;
5653 levels++;
5654 break;
5656 case OMP_CLAUSE_SEQ:
5657 tag |= OLF_SEQ;
5658 break;
5660 case OMP_CLAUSE_AUTO:
5661 tag |= OLF_AUTO;
5662 break;
5664 case OMP_CLAUSE_INDEPENDENT:
5665 tag |= OLF_INDEPENDENT;
5666 break;
5668 case OMP_CLAUSE_TILE:
5669 tag |= OLF_TILE;
5670 break;
5672 default:
5673 continue;
5677 if (gang_static)
5679 if (DECL_P (gang_static))
5680 gang_static = build_outer_var_ref (gang_static, ctx);
5681 tag |= OLF_GANG_STATIC;
5684 /* In a parallel region, loops are implicitly INDEPENDENT. */
5685 omp_context *tgt = enclosing_target_ctx (ctx);
5686 if (!tgt || is_oacc_parallel (tgt))
5687 tag |= OLF_INDEPENDENT;
5689 if (tag & OLF_TILE)
5690 /* Tiling could use all 3 levels. */
5691 levels = 3;
5692 else
5694 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5695 Ensure at least one level, or 2 for possible auto
5696 partitioning */
5697 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5698 << OLF_DIM_BASE) | OLF_SEQ));
5700 if (levels < 1u + maybe_auto)
5701 levels = 1u + maybe_auto;
5704 args.quick_push (build_int_cst (integer_type_node, levels));
5705 args.quick_push (build_int_cst (integer_type_node, tag));
5706 if (gang_static)
5707 args.quick_push (gang_static);
5709 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5710 gimple_set_location (call, loc);
5711 gimple_set_lhs (call, ddvar);
5712 gimple_seq_add_stmt (seq, call);
5714 return levels;
5717 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5718 partitioning level of the enclosed region. */
5720 static void
5721 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5722 tree tofollow, gimple_seq *seq)
5724 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5725 : IFN_UNIQUE_OACC_TAIL_MARK);
5726 tree marker = build_int_cst (integer_type_node, marker_kind);
5727 int nargs = 2 + (tofollow != NULL_TREE);
5728 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5729 marker, ddvar, tofollow);
5730 gimple_set_location (call, loc);
5731 gimple_set_lhs (call, ddvar);
5732 gimple_seq_add_stmt (seq, call);
5735 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5736 the loop clauses, from which we extract reductions. Initialize
5737 HEAD and TAIL. */
5739 static void
5740 lower_oacc_head_tail (location_t loc, tree clauses,
5741 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5743 bool inner = false;
5744 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5745 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5747 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5748 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5749 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5751 gcc_assert (count);
5752 for (unsigned done = 1; count; count--, done++)
5754 gimple_seq fork_seq = NULL;
5755 gimple_seq join_seq = NULL;
5757 tree place = build_int_cst (integer_type_node, -1);
5758 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5759 fork_kind, ddvar, place);
5760 gimple_set_location (fork, loc);
5761 gimple_set_lhs (fork, ddvar);
5763 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5764 join_kind, ddvar, place);
5765 gimple_set_location (join, loc);
5766 gimple_set_lhs (join, ddvar);
5768 /* Mark the beginning of this level sequence. */
5769 if (inner)
5770 lower_oacc_loop_marker (loc, ddvar, true,
5771 build_int_cst (integer_type_node, count),
5772 &fork_seq);
5773 lower_oacc_loop_marker (loc, ddvar, false,
5774 build_int_cst (integer_type_node, done),
5775 &join_seq);
5777 lower_oacc_reductions (loc, clauses, place, inner,
5778 fork, join, &fork_seq, &join_seq, ctx);
5780 /* Append this level to head. */
5781 gimple_seq_add_seq (head, fork_seq);
5782 /* Prepend it to tail. */
5783 gimple_seq_add_seq (&join_seq, *tail);
5784 *tail = join_seq;
5786 inner = true;
5789 /* Mark the end of the sequence. */
5790 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5791 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5794 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5795 catch handler and return it. This prevents programs from violating the
5796 structured block semantics with throws. */
5798 static gimple_seq
5799 maybe_catch_exception (gimple_seq body)
5801 gimple *g;
5802 tree decl;
5804 if (!flag_exceptions)
5805 return body;
5807 if (lang_hooks.eh_protect_cleanup_actions != NULL)
5808 decl = lang_hooks.eh_protect_cleanup_actions ();
5809 else
5810 decl = builtin_decl_explicit (BUILT_IN_TRAP);
5812 g = gimple_build_eh_must_not_throw (decl);
5813 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5814 GIMPLE_TRY_CATCH);
5816 return gimple_seq_alloc_with_stmt (g);
5820 /* Routines to lower OMP directives into OMP-GIMPLE. */
5822 /* If ctx is a worksharing context inside of a cancellable parallel
5823 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5824 and conditional branch to parallel's cancel_label to handle
5825 cancellation in the implicit barrier. */
5827 static void
5828 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5830 gimple *omp_return = gimple_seq_last_stmt (*body);
5831 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5832 if (gimple_omp_return_nowait_p (omp_return))
5833 return;
5834 if (ctx->outer
5835 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5836 && ctx->outer->cancellable)
5838 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5839 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5840 tree lhs = create_tmp_var (c_bool_type);
5841 gimple_omp_return_set_lhs (omp_return, lhs);
5842 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5843 gimple *g = gimple_build_cond (NE_EXPR, lhs,
5844 fold_convert (c_bool_type,
5845 boolean_false_node),
5846 ctx->outer->cancel_label, fallthru_label);
5847 gimple_seq_add_stmt (body, g);
5848 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5852 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5853 CTX is the enclosing OMP context for the current statement. */
5855 static void
5856 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5858 tree block, control;
5859 gimple_stmt_iterator tgsi;
5860 gomp_sections *stmt;
5861 gimple *t;
5862 gbind *new_stmt, *bind;
5863 gimple_seq ilist, dlist, olist, new_body;
5865 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5867 push_gimplify_context ();
5869 dlist = NULL;
5870 ilist = NULL;
5871 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5872 &ilist, &dlist, ctx, NULL);
5874 new_body = gimple_omp_body (stmt);
5875 gimple_omp_set_body (stmt, NULL);
5876 tgsi = gsi_start (new_body);
5877 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5879 omp_context *sctx;
5880 gimple *sec_start;
5882 sec_start = gsi_stmt (tgsi);
5883 sctx = maybe_lookup_ctx (sec_start);
5884 gcc_assert (sctx);
5886 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5887 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5888 GSI_CONTINUE_LINKING);
5889 gimple_omp_set_body (sec_start, NULL);
5891 if (gsi_one_before_end_p (tgsi))
5893 gimple_seq l = NULL;
5894 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5895 &l, ctx);
5896 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5897 gimple_omp_section_set_last (sec_start);
5900 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5901 GSI_CONTINUE_LINKING);
5904 block = make_node (BLOCK);
5905 bind = gimple_build_bind (NULL, new_body, block);
5907 olist = NULL;
5908 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
5910 block = make_node (BLOCK);
5911 new_stmt = gimple_build_bind (NULL, NULL, block);
5912 gsi_replace (gsi_p, new_stmt, true);
5914 pop_gimplify_context (new_stmt);
5915 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5916 BLOCK_VARS (block) = gimple_bind_vars (bind);
5917 if (BLOCK_VARS (block))
5918 TREE_USED (block) = 1;
5920 new_body = NULL;
5921 gimple_seq_add_seq (&new_body, ilist);
5922 gimple_seq_add_stmt (&new_body, stmt);
5923 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5924 gimple_seq_add_stmt (&new_body, bind);
5926 control = create_tmp_var (unsigned_type_node, ".section");
5927 t = gimple_build_omp_continue (control, control);
5928 gimple_omp_sections_set_control (stmt, control);
5929 gimple_seq_add_stmt (&new_body, t);
5931 gimple_seq_add_seq (&new_body, olist);
5932 if (ctx->cancellable)
5933 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5934 gimple_seq_add_seq (&new_body, dlist);
5936 new_body = maybe_catch_exception (new_body);
5938 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5939 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5940 t = gimple_build_omp_return (nowait);
5941 gimple_seq_add_stmt (&new_body, t);
5942 maybe_add_implicit_barrier_cancel (ctx, &new_body);
5944 gimple_bind_set_body (new_stmt, new_body);
5948 /* A subroutine of lower_omp_single. Expand the simple form of
5949 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5951 if (GOMP_single_start ())
5952 BODY;
5953 [ GOMP_barrier (); ] -> unless 'nowait' is present.
5955 FIXME. It may be better to delay expanding the logic of this until
5956 pass_expand_omp. The expanded logic may make the job more difficult
5957 to a synchronization analysis pass. */
5959 static void
5960 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
5962 location_t loc = gimple_location (single_stmt);
5963 tree tlabel = create_artificial_label (loc);
5964 tree flabel = create_artificial_label (loc);
5965 gimple *call, *cond;
5966 tree lhs, decl;
5968 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
5969 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
5970 call = gimple_build_call (decl, 0);
5971 gimple_call_set_lhs (call, lhs);
5972 gimple_seq_add_stmt (pre_p, call);
5974 cond = gimple_build_cond (EQ_EXPR, lhs,
5975 fold_convert_loc (loc, TREE_TYPE (lhs),
5976 boolean_true_node),
5977 tlabel, flabel);
5978 gimple_seq_add_stmt (pre_p, cond);
5979 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
5980 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
5981 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
5985 /* A subroutine of lower_omp_single. Expand the simple form of
5986 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
5988 #pragma omp single copyprivate (a, b, c)
5990 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
5993 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
5995 BODY;
5996 copyout.a = a;
5997 copyout.b = b;
5998 copyout.c = c;
5999 GOMP_single_copy_end (&copyout);
6001 else
6003 a = copyout_p->a;
6004 b = copyout_p->b;
6005 c = copyout_p->c;
6007 GOMP_barrier ();
6010 FIXME. It may be better to delay expanding the logic of this until
6011 pass_expand_omp. The expanded logic may make the job more difficult
6012 to a synchronization analysis pass. */
6014 static void
6015 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
6016 omp_context *ctx)
6018 tree ptr_type, t, l0, l1, l2, bfn_decl;
6019 gimple_seq copyin_seq;
6020 location_t loc = gimple_location (single_stmt);
6022 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
6024 ptr_type = build_pointer_type (ctx->record_type);
6025 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
6027 l0 = create_artificial_label (loc);
6028 l1 = create_artificial_label (loc);
6029 l2 = create_artificial_label (loc);
6031 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6032 t = build_call_expr_loc (loc, bfn_decl, 0);
6033 t = fold_convert_loc (loc, ptr_type, t);
6034 gimplify_assign (ctx->receiver_decl, t, pre_p);
6036 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6037 build_int_cst (ptr_type, 0));
6038 t = build3 (COND_EXPR, void_type_node, t,
6039 build_and_jump (&l0), build_and_jump (&l1));
6040 gimplify_and_add (t, pre_p);
6042 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6044 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6046 copyin_seq = NULL;
6047 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6048 &copyin_seq, ctx);
6050 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6051 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6052 t = build_call_expr_loc (loc, bfn_decl, 1, t);
6053 gimplify_and_add (t, pre_p);
6055 t = build_and_jump (&l2);
6056 gimplify_and_add (t, pre_p);
6058 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6060 gimple_seq_add_seq (pre_p, copyin_seq);
6062 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6066 /* Expand code for an OpenMP single directive. */
6068 static void
6069 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6071 tree block;
6072 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6073 gbind *bind;
6074 gimple_seq bind_body, bind_body_tail = NULL, dlist;
6076 push_gimplify_context ();
6078 block = make_node (BLOCK);
6079 bind = gimple_build_bind (NULL, NULL, block);
6080 gsi_replace (gsi_p, bind, true);
6081 bind_body = NULL;
6082 dlist = NULL;
6083 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6084 &bind_body, &dlist, ctx, NULL);
6085 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6087 gimple_seq_add_stmt (&bind_body, single_stmt);
6089 if (ctx->record_type)
6090 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6091 else
6092 lower_omp_single_simple (single_stmt, &bind_body);
6094 gimple_omp_set_body (single_stmt, NULL);
6096 gimple_seq_add_seq (&bind_body, dlist);
6098 bind_body = maybe_catch_exception (bind_body);
6100 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6101 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6102 gimple *g = gimple_build_omp_return (nowait);
6103 gimple_seq_add_stmt (&bind_body_tail, g);
6104 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6105 if (ctx->record_type)
6107 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6108 tree clobber = build_constructor (ctx->record_type, NULL);
6109 TREE_THIS_VOLATILE (clobber) = 1;
6110 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6111 clobber), GSI_SAME_STMT);
6113 gimple_seq_add_seq (&bind_body, bind_body_tail);
6114 gimple_bind_set_body (bind, bind_body);
6116 pop_gimplify_context (bind);
6118 gimple_bind_append_vars (bind, ctx->block_vars);
6119 BLOCK_VARS (block) = ctx->block_vars;
6120 if (BLOCK_VARS (block))
6121 TREE_USED (block) = 1;
6125 /* Expand code for an OpenMP master directive. */
6127 static void
6128 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6130 tree block, lab = NULL, x, bfn_decl;
6131 gimple *stmt = gsi_stmt (*gsi_p);
6132 gbind *bind;
6133 location_t loc = gimple_location (stmt);
6134 gimple_seq tseq;
6136 push_gimplify_context ();
6138 block = make_node (BLOCK);
6139 bind = gimple_build_bind (NULL, NULL, block);
6140 gsi_replace (gsi_p, bind, true);
6141 gimple_bind_add_stmt (bind, stmt);
6143 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6144 x = build_call_expr_loc (loc, bfn_decl, 0);
6145 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6146 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6147 tseq = NULL;
6148 gimplify_and_add (x, &tseq);
6149 gimple_bind_add_seq (bind, tseq);
6151 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6152 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6153 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6154 gimple_omp_set_body (stmt, NULL);
6156 gimple_bind_add_stmt (bind, gimple_build_label (lab));
6158 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6160 pop_gimplify_context (bind);
6162 gimple_bind_append_vars (bind, ctx->block_vars);
6163 BLOCK_VARS (block) = ctx->block_vars;
6167 /* Expand code for an OpenMP taskgroup directive. */
6169 static void
6170 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6172 gimple *stmt = gsi_stmt (*gsi_p);
6173 gcall *x;
6174 gbind *bind;
6175 tree block = make_node (BLOCK);
6177 bind = gimple_build_bind (NULL, NULL, block);
6178 gsi_replace (gsi_p, bind, true);
6179 gimple_bind_add_stmt (bind, stmt);
6181 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6183 gimple_bind_add_stmt (bind, x);
6185 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6186 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6187 gimple_omp_set_body (stmt, NULL);
6189 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6191 gimple_bind_append_vars (bind, ctx->block_vars);
6192 BLOCK_VARS (block) = ctx->block_vars;
6196 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6198 static void
6199 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6200 omp_context *ctx)
6202 struct omp_for_data fd;
6203 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6204 return;
6206 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6207 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6208 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6209 if (!fd.ordered)
6210 return;
6212 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6213 tree c = gimple_omp_ordered_clauses (ord_stmt);
6214 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6215 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6217 /* Merge depend clauses from multiple adjacent
6218 #pragma omp ordered depend(sink:...) constructs
6219 into one #pragma omp ordered depend(sink:...), so that
6220 we can optimize them together. */
6221 gimple_stmt_iterator gsi = *gsi_p;
6222 gsi_next (&gsi);
6223 while (!gsi_end_p (gsi))
6225 gimple *stmt = gsi_stmt (gsi);
6226 if (is_gimple_debug (stmt)
6227 || gimple_code (stmt) == GIMPLE_NOP)
6229 gsi_next (&gsi);
6230 continue;
6232 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6233 break;
6234 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6235 c = gimple_omp_ordered_clauses (ord_stmt2);
6236 if (c == NULL_TREE
6237 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6238 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6239 break;
6240 while (*list_p)
6241 list_p = &OMP_CLAUSE_CHAIN (*list_p);
6242 *list_p = c;
6243 gsi_remove (&gsi, true);
6247 /* Canonicalize sink dependence clauses into one folded clause if
6248 possible.
6250 The basic algorithm is to create a sink vector whose first
6251 element is the GCD of all the first elements, and whose remaining
6252 elements are the minimum of the subsequent columns.
6254 We ignore dependence vectors whose first element is zero because
6255 such dependencies are known to be executed by the same thread.
6257 We take into account the direction of the loop, so a minimum
6258 becomes a maximum if the loop is iterating forwards. We also
6259 ignore sink clauses where the loop direction is unknown, or where
6260 the offsets are clearly invalid because they are not a multiple
6261 of the loop increment.
6263 For example:
6265 #pragma omp for ordered(2)
6266 for (i=0; i < N; ++i)
6267 for (j=0; j < M; ++j)
6269 #pragma omp ordered \
6270 depend(sink:i-8,j-2) \
6271 depend(sink:i,j-1) \ // Completely ignored because i+0.
6272 depend(sink:i-4,j-3) \
6273 depend(sink:i-6,j-4)
6274 #pragma omp ordered depend(source)
6277 Folded clause is:
6279 depend(sink:-gcd(8,4,6),-min(2,3,4))
6280 -or-
6281 depend(sink:-2,-2)
6284 /* FIXME: Computing GCD's where the first element is zero is
6285 non-trivial in the presence of collapsed loops. Do this later. */
6286 if (fd.collapse > 1)
6287 return;
6289 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6291 /* wide_int is not a POD so it must be default-constructed. */
6292 for (unsigned i = 0; i != 2 * len - 1; ++i)
6293 new (static_cast<void*>(folded_deps + i)) wide_int ();
6295 tree folded_dep = NULL_TREE;
6296 /* TRUE if the first dimension's offset is negative. */
6297 bool neg_offset_p = false;
6299 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6300 unsigned int i;
6301 while ((c = *list_p) != NULL)
6303 bool remove = false;
6305 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6306 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6307 goto next_ordered_clause;
6309 tree vec;
6310 for (vec = OMP_CLAUSE_DECL (c), i = 0;
6311 vec && TREE_CODE (vec) == TREE_LIST;
6312 vec = TREE_CHAIN (vec), ++i)
6314 gcc_assert (i < len);
6316 /* omp_extract_for_data has canonicalized the condition. */
6317 gcc_assert (fd.loops[i].cond_code == LT_EXPR
6318 || fd.loops[i].cond_code == GT_EXPR);
6319 bool forward = fd.loops[i].cond_code == LT_EXPR;
6320 bool maybe_lexically_later = true;
6322 /* While the committee makes up its mind, bail if we have any
6323 non-constant steps. */
6324 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6325 goto lower_omp_ordered_ret;
6327 tree itype = TREE_TYPE (TREE_VALUE (vec));
6328 if (POINTER_TYPE_P (itype))
6329 itype = sizetype;
6330 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
6331 TYPE_PRECISION (itype),
6332 TYPE_SIGN (itype));
6334 /* Ignore invalid offsets that are not multiples of the step. */
6335 if (!wi::multiple_of_p (wi::abs (offset),
6336 wi::abs (wi::to_wide (fd.loops[i].step)),
6337 UNSIGNED))
6339 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6340 "ignoring sink clause with offset that is not "
6341 "a multiple of the loop step");
6342 remove = true;
6343 goto next_ordered_clause;
6346 /* Calculate the first dimension. The first dimension of
6347 the folded dependency vector is the GCD of the first
6348 elements, while ignoring any first elements whose offset
6349 is 0. */
6350 if (i == 0)
6352 /* Ignore dependence vectors whose first dimension is 0. */
6353 if (offset == 0)
6355 remove = true;
6356 goto next_ordered_clause;
6358 else
6360 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6362 error_at (OMP_CLAUSE_LOCATION (c),
6363 "first offset must be in opposite direction "
6364 "of loop iterations");
6365 goto lower_omp_ordered_ret;
6367 if (forward)
6368 offset = -offset;
6369 neg_offset_p = forward;
6370 /* Initialize the first time around. */
6371 if (folded_dep == NULL_TREE)
6373 folded_dep = c;
6374 folded_deps[0] = offset;
6376 else
6377 folded_deps[0] = wi::gcd (folded_deps[0],
6378 offset, UNSIGNED);
6381 /* Calculate minimum for the remaining dimensions. */
6382 else
6384 folded_deps[len + i - 1] = offset;
6385 if (folded_dep == c)
6386 folded_deps[i] = offset;
6387 else if (maybe_lexically_later
6388 && !wi::eq_p (folded_deps[i], offset))
6390 if (forward ^ wi::gts_p (folded_deps[i], offset))
6392 unsigned int j;
6393 folded_dep = c;
6394 for (j = 1; j <= i; j++)
6395 folded_deps[j] = folded_deps[len + j - 1];
6397 else
6398 maybe_lexically_later = false;
6402 gcc_assert (i == len);
6404 remove = true;
6406 next_ordered_clause:
6407 if (remove)
6408 *list_p = OMP_CLAUSE_CHAIN (c);
6409 else
6410 list_p = &OMP_CLAUSE_CHAIN (c);
6413 if (folded_dep)
6415 if (neg_offset_p)
6416 folded_deps[0] = -folded_deps[0];
6418 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6419 if (POINTER_TYPE_P (itype))
6420 itype = sizetype;
6422 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6423 = wide_int_to_tree (itype, folded_deps[0]);
6424 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6425 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6428 lower_omp_ordered_ret:
6430 /* Ordered without clauses is #pragma omp threads, while we want
6431 a nop instead if we remove all clauses. */
6432 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6433 gsi_replace (gsi_p, gimple_build_nop (), true);
6437 /* Expand code for an OpenMP ordered directive. */
6439 static void
6440 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6442 tree block;
6443 gimple *stmt = gsi_stmt (*gsi_p), *g;
6444 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6445 gcall *x;
6446 gbind *bind;
6447 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6448 OMP_CLAUSE_SIMD);
6449 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6450 loop. */
6451 bool maybe_simt
6452 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6453 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6454 OMP_CLAUSE_THREADS);
6456 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6457 OMP_CLAUSE_DEPEND))
6459 /* FIXME: This is needs to be moved to the expansion to verify various
6460 conditions only testable on cfg with dominators computed, and also
6461 all the depend clauses to be merged still might need to be available
6462 for the runtime checks. */
6463 if (0)
6464 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6465 return;
6468 push_gimplify_context ();
6470 block = make_node (BLOCK);
6471 bind = gimple_build_bind (NULL, NULL, block);
6472 gsi_replace (gsi_p, bind, true);
6473 gimple_bind_add_stmt (bind, stmt);
6475 if (simd)
6477 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6478 build_int_cst (NULL_TREE, threads));
6479 cfun->has_simduid_loops = true;
6481 else
6482 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6484 gimple_bind_add_stmt (bind, x);
6486 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6487 if (maybe_simt)
6489 counter = create_tmp_var (integer_type_node);
6490 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6491 gimple_call_set_lhs (g, counter);
6492 gimple_bind_add_stmt (bind, g);
6494 body = create_artificial_label (UNKNOWN_LOCATION);
6495 test = create_artificial_label (UNKNOWN_LOCATION);
6496 gimple_bind_add_stmt (bind, gimple_build_label (body));
6498 tree simt_pred = create_tmp_var (integer_type_node);
6499 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6500 gimple_call_set_lhs (g, simt_pred);
6501 gimple_bind_add_stmt (bind, g);
6503 tree t = create_artificial_label (UNKNOWN_LOCATION);
6504 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6505 gimple_bind_add_stmt (bind, g);
6507 gimple_bind_add_stmt (bind, gimple_build_label (t));
6509 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6510 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6511 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6512 gimple_omp_set_body (stmt, NULL);
6514 if (maybe_simt)
6516 gimple_bind_add_stmt (bind, gimple_build_label (test));
6517 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6518 gimple_bind_add_stmt (bind, g);
6520 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6521 tree nonneg = create_tmp_var (integer_type_node);
6522 gimple_seq tseq = NULL;
6523 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6524 gimple_bind_add_seq (bind, tseq);
6526 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6527 gimple_call_set_lhs (g, nonneg);
6528 gimple_bind_add_stmt (bind, g);
6530 tree end = create_artificial_label (UNKNOWN_LOCATION);
6531 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6532 gimple_bind_add_stmt (bind, g);
6534 gimple_bind_add_stmt (bind, gimple_build_label (end));
6536 if (simd)
6537 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6538 build_int_cst (NULL_TREE, threads));
6539 else
6540 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6542 gimple_bind_add_stmt (bind, x);
6544 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6546 pop_gimplify_context (bind);
6548 gimple_bind_append_vars (bind, ctx->block_vars);
6549 BLOCK_VARS (block) = gimple_bind_vars (bind);
6553 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6554 substitution of a couple of function calls. But in the NAMED case,
6555 requires that languages coordinate a symbol name. It is therefore
6556 best put here in common code. */
6558 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6560 static void
6561 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6563 tree block;
6564 tree name, lock, unlock;
6565 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6566 gbind *bind;
6567 location_t loc = gimple_location (stmt);
6568 gimple_seq tbody;
6570 name = gimple_omp_critical_name (stmt);
6571 if (name)
6573 tree decl;
6575 if (!critical_name_mutexes)
6576 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6578 tree *n = critical_name_mutexes->get (name);
6579 if (n == NULL)
6581 char *new_str;
6583 decl = create_tmp_var_raw (ptr_type_node);
6585 new_str = ACONCAT ((".gomp_critical_user_",
6586 IDENTIFIER_POINTER (name), NULL));
6587 DECL_NAME (decl) = get_identifier (new_str);
6588 TREE_PUBLIC (decl) = 1;
6589 TREE_STATIC (decl) = 1;
6590 DECL_COMMON (decl) = 1;
6591 DECL_ARTIFICIAL (decl) = 1;
6592 DECL_IGNORED_P (decl) = 1;
6594 varpool_node::finalize_decl (decl);
6596 critical_name_mutexes->put (name, decl);
6598 else
6599 decl = *n;
6601 /* If '#pragma omp critical' is inside offloaded region or
6602 inside function marked as offloadable, the symbol must be
6603 marked as offloadable too. */
6604 omp_context *octx;
6605 if (cgraph_node::get (current_function_decl)->offloadable)
6606 varpool_node::get_create (decl)->offloadable = 1;
6607 else
6608 for (octx = ctx->outer; octx; octx = octx->outer)
6609 if (is_gimple_omp_offloaded (octx->stmt))
6611 varpool_node::get_create (decl)->offloadable = 1;
6612 break;
6615 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6616 lock = build_call_expr_loc (loc, lock, 1,
6617 build_fold_addr_expr_loc (loc, decl));
6619 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6620 unlock = build_call_expr_loc (loc, unlock, 1,
6621 build_fold_addr_expr_loc (loc, decl));
6623 else
6625 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6626 lock = build_call_expr_loc (loc, lock, 0);
6628 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6629 unlock = build_call_expr_loc (loc, unlock, 0);
6632 push_gimplify_context ();
6634 block = make_node (BLOCK);
6635 bind = gimple_build_bind (NULL, NULL, block);
6636 gsi_replace (gsi_p, bind, true);
6637 gimple_bind_add_stmt (bind, stmt);
6639 tbody = gimple_bind_body (bind);
6640 gimplify_and_add (lock, &tbody);
6641 gimple_bind_set_body (bind, tbody);
6643 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6644 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6645 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6646 gimple_omp_set_body (stmt, NULL);
6648 tbody = gimple_bind_body (bind);
6649 gimplify_and_add (unlock, &tbody);
6650 gimple_bind_set_body (bind, tbody);
6652 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6654 pop_gimplify_context (bind);
6655 gimple_bind_append_vars (bind, ctx->block_vars);
6656 BLOCK_VARS (block) = gimple_bind_vars (bind);
6659 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6660 for a lastprivate clause. Given a loop control predicate of (V
6661 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6662 is appended to *DLIST, iterator initialization is appended to
6663 *BODY_P. */
6665 static void
6666 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6667 gimple_seq *dlist, struct omp_context *ctx)
6669 tree clauses, cond, vinit;
6670 enum tree_code cond_code;
6671 gimple_seq stmts;
6673 cond_code = fd->loop.cond_code;
6674 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6676 /* When possible, use a strict equality expression. This can let VRP
6677 type optimizations deduce the value and remove a copy. */
6678 if (tree_fits_shwi_p (fd->loop.step))
6680 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6681 if (step == 1 || step == -1)
6682 cond_code = EQ_EXPR;
6685 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6686 || gimple_omp_for_grid_phony (fd->for_stmt))
6687 cond = omp_grid_lastprivate_predicate (fd);
6688 else
6690 tree n2 = fd->loop.n2;
6691 if (fd->collapse > 1
6692 && TREE_CODE (n2) != INTEGER_CST
6693 && gimple_omp_for_combined_into_p (fd->for_stmt))
6695 struct omp_context *taskreg_ctx = NULL;
6696 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6698 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6699 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6700 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6702 if (gimple_omp_for_combined_into_p (gfor))
6704 gcc_assert (ctx->outer->outer
6705 && is_parallel_ctx (ctx->outer->outer));
6706 taskreg_ctx = ctx->outer->outer;
6708 else
6710 struct omp_for_data outer_fd;
6711 omp_extract_for_data (gfor, &outer_fd, NULL);
6712 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6715 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6716 taskreg_ctx = ctx->outer->outer;
6718 else if (is_taskreg_ctx (ctx->outer))
6719 taskreg_ctx = ctx->outer;
6720 if (taskreg_ctx)
6722 int i;
6723 tree taskreg_clauses
6724 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6725 tree innerc = omp_find_clause (taskreg_clauses,
6726 OMP_CLAUSE__LOOPTEMP_);
6727 gcc_assert (innerc);
6728 for (i = 0; i < fd->collapse; i++)
6730 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6731 OMP_CLAUSE__LOOPTEMP_);
6732 gcc_assert (innerc);
6734 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6735 OMP_CLAUSE__LOOPTEMP_);
6736 if (innerc)
6737 n2 = fold_convert (TREE_TYPE (n2),
6738 lookup_decl (OMP_CLAUSE_DECL (innerc),
6739 taskreg_ctx));
6742 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6745 clauses = gimple_omp_for_clauses (fd->for_stmt);
6746 stmts = NULL;
6747 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6748 if (!gimple_seq_empty_p (stmts))
6750 gimple_seq_add_seq (&stmts, *dlist);
6751 *dlist = stmts;
6753 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6754 vinit = fd->loop.n1;
6755 if (cond_code == EQ_EXPR
6756 && tree_fits_shwi_p (fd->loop.n2)
6757 && ! integer_zerop (fd->loop.n2))
6758 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6759 else
6760 vinit = unshare_expr (vinit);
6762 /* Initialize the iterator variable, so that threads that don't execute
6763 any iterations don't execute the lastprivate clauses by accident. */
6764 gimplify_assign (fd->loop.v, vinit, body_p);
6769 /* Lower code for an OMP loop directive. */
6771 static void
6772 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6774 tree *rhs_p, block;
6775 struct omp_for_data fd, *fdp = NULL;
6776 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6777 gbind *new_stmt;
6778 gimple_seq omp_for_body, body, dlist;
6779 gimple_seq oacc_head = NULL, oacc_tail = NULL;
6780 size_t i;
6782 push_gimplify_context ();
6784 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6786 block = make_node (BLOCK);
6787 new_stmt = gimple_build_bind (NULL, NULL, block);
6788 /* Replace at gsi right away, so that 'stmt' is no member
6789 of a sequence anymore as we're going to add to a different
6790 one below. */
6791 gsi_replace (gsi_p, new_stmt, true);
6793 /* Move declaration of temporaries in the loop body before we make
6794 it go away. */
6795 omp_for_body = gimple_omp_body (stmt);
6796 if (!gimple_seq_empty_p (omp_for_body)
6797 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6799 gbind *inner_bind
6800 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6801 tree vars = gimple_bind_vars (inner_bind);
6802 gimple_bind_append_vars (new_stmt, vars);
6803 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6804 keep them on the inner_bind and it's block. */
6805 gimple_bind_set_vars (inner_bind, NULL_TREE);
6806 if (gimple_bind_block (inner_bind))
6807 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6810 if (gimple_omp_for_combined_into_p (stmt))
6812 omp_extract_for_data (stmt, &fd, NULL);
6813 fdp = &fd;
6815 /* We need two temporaries with fd.loop.v type (istart/iend)
6816 and then (fd.collapse - 1) temporaries with the same
6817 type for count2 ... countN-1 vars if not constant. */
6818 size_t count = 2;
6819 tree type = fd.iter_type;
6820 if (fd.collapse > 1
6821 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6822 count += fd.collapse - 1;
6823 bool taskreg_for
6824 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6825 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6826 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6827 tree simtc = NULL;
6828 tree clauses = *pc;
6829 if (taskreg_for)
6830 outerc
6831 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6832 OMP_CLAUSE__LOOPTEMP_);
6833 if (ctx->simt_stmt)
6834 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6835 OMP_CLAUSE__LOOPTEMP_);
6836 for (i = 0; i < count; i++)
6838 tree temp;
6839 if (taskreg_for)
6841 gcc_assert (outerc);
6842 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6843 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6844 OMP_CLAUSE__LOOPTEMP_);
6846 else
6848 /* If there are 2 adjacent SIMD stmts, one with _simt_
6849 clause, another without, make sure they have the same
6850 decls in _looptemp_ clauses, because the outer stmt
6851 they are combined into will look up just one inner_stmt. */
6852 if (ctx->simt_stmt)
6853 temp = OMP_CLAUSE_DECL (simtc);
6854 else
6855 temp = create_tmp_var (type);
6856 insert_decl_map (&ctx->outer->cb, temp, temp);
6858 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6859 OMP_CLAUSE_DECL (*pc) = temp;
6860 pc = &OMP_CLAUSE_CHAIN (*pc);
6861 if (ctx->simt_stmt)
6862 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6863 OMP_CLAUSE__LOOPTEMP_);
6865 *pc = clauses;
6868 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6869 dlist = NULL;
6870 body = NULL;
6871 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6872 fdp);
6873 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6875 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6877 /* Lower the header expressions. At this point, we can assume that
6878 the header is of the form:
6880 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6882 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6883 using the .omp_data_s mapping, if needed. */
6884 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6886 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6887 if (!is_gimple_min_invariant (*rhs_p))
6888 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6889 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6890 recompute_tree_invariant_for_addr_expr (*rhs_p);
6892 rhs_p = gimple_omp_for_final_ptr (stmt, i);
6893 if (!is_gimple_min_invariant (*rhs_p))
6894 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6895 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6896 recompute_tree_invariant_for_addr_expr (*rhs_p);
6898 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6899 if (!is_gimple_min_invariant (*rhs_p))
6900 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6903 /* Once lowered, extract the bounds and clauses. */
6904 omp_extract_for_data (stmt, &fd, NULL);
6906 if (is_gimple_omp_oacc (ctx->stmt)
6907 && !ctx_in_oacc_kernels_region (ctx))
6908 lower_oacc_head_tail (gimple_location (stmt),
6909 gimple_omp_for_clauses (stmt),
6910 &oacc_head, &oacc_tail, ctx);
6912 /* Add OpenACC partitioning and reduction markers just before the loop. */
6913 if (oacc_head)
6914 gimple_seq_add_seq (&body, oacc_head);
6916 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
6918 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6919 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
6920 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6921 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6923 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6924 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6925 OMP_CLAUSE_LINEAR_STEP (c)
6926 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6927 ctx);
6930 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6931 && gimple_omp_for_grid_phony (stmt));
6932 if (!phony_loop)
6933 gimple_seq_add_stmt (&body, stmt);
6934 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6936 if (!phony_loop)
6937 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6938 fd.loop.v));
6940 /* After the loop, add exit clauses. */
6941 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
6943 if (ctx->cancellable)
6944 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
6946 gimple_seq_add_seq (&body, dlist);
6948 body = maybe_catch_exception (body);
6950 if (!phony_loop)
6952 /* Region exit marker goes at the end of the loop body. */
6953 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
6954 maybe_add_implicit_barrier_cancel (ctx, &body);
6957 /* Add OpenACC joining and reduction markers just after the loop. */
6958 if (oacc_tail)
6959 gimple_seq_add_seq (&body, oacc_tail);
6961 pop_gimplify_context (new_stmt);
6963 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6964 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
6965 if (BLOCK_VARS (block))
6966 TREE_USED (block) = 1;
6968 gimple_bind_set_body (new_stmt, body);
6969 gimple_omp_set_body (stmt, NULL);
6970 gimple_omp_for_set_pre_body (stmt, NULL);
6973 /* Callback for walk_stmts. Check if the current statement only contains
6974 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
6976 static tree
6977 check_combined_parallel (gimple_stmt_iterator *gsi_p,
6978 bool *handled_ops_p,
6979 struct walk_stmt_info *wi)
6981 int *info = (int *) wi->info;
6982 gimple *stmt = gsi_stmt (*gsi_p);
6984 *handled_ops_p = true;
6985 switch (gimple_code (stmt))
6987 WALK_SUBSTMTS;
6989 case GIMPLE_DEBUG:
6990 break;
6991 case GIMPLE_OMP_FOR:
6992 case GIMPLE_OMP_SECTIONS:
6993 *info = *info == 0 ? 1 : -1;
6994 break;
6995 default:
6996 *info = -1;
6997 break;
6999 return NULL;
7002 struct omp_taskcopy_context
7004 /* This field must be at the beginning, as we do "inheritance": Some
7005 callback functions for tree-inline.c (e.g., omp_copy_decl)
7006 receive a copy_body_data pointer that is up-casted to an
7007 omp_context pointer. */
7008 copy_body_data cb;
7009 omp_context *ctx;
7012 static tree
7013 task_copyfn_copy_decl (tree var, copy_body_data *cb)
7015 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
7017 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
7018 return create_tmp_var (TREE_TYPE (var));
7020 return var;
7023 static tree
7024 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
7026 tree name, new_fields = NULL, type, f;
7028 type = lang_hooks.types.make_type (RECORD_TYPE);
7029 name = DECL_NAME (TYPE_NAME (orig_type));
7030 name = build_decl (gimple_location (tcctx->ctx->stmt),
7031 TYPE_DECL, name, type);
7032 TYPE_NAME (type) = name;
7034 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
7036 tree new_f = copy_node (f);
7037 DECL_CONTEXT (new_f) = type;
7038 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7039 TREE_CHAIN (new_f) = new_fields;
7040 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7041 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7042 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7043 &tcctx->cb, NULL);
7044 new_fields = new_f;
7045 tcctx->cb.decl_map->put (f, new_f);
7047 TYPE_FIELDS (type) = nreverse (new_fields);
7048 layout_type (type);
7049 return type;
7052 /* Create task copyfn. */
7054 static void
7055 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7057 struct function *child_cfun;
7058 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7059 tree record_type, srecord_type, bind, list;
7060 bool record_needs_remap = false, srecord_needs_remap = false;
7061 splay_tree_node n;
7062 struct omp_taskcopy_context tcctx;
7063 location_t loc = gimple_location (task_stmt);
7065 child_fn = gimple_omp_task_copy_fn (task_stmt);
7066 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7067 gcc_assert (child_cfun->cfg == NULL);
7068 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
7070 /* Reset DECL_CONTEXT on function arguments. */
7071 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7072 DECL_CONTEXT (t) = child_fn;
7074 /* Populate the function. */
7075 push_gimplify_context ();
7076 push_cfun (child_cfun);
7078 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7079 TREE_SIDE_EFFECTS (bind) = 1;
7080 list = NULL;
7081 DECL_SAVED_TREE (child_fn) = bind;
7082 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
7084 /* Remap src and dst argument types if needed. */
7085 record_type = ctx->record_type;
7086 srecord_type = ctx->srecord_type;
7087 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7088 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7090 record_needs_remap = true;
7091 break;
7093 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7094 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7096 srecord_needs_remap = true;
7097 break;
7100 if (record_needs_remap || srecord_needs_remap)
7102 memset (&tcctx, '\0', sizeof (tcctx));
7103 tcctx.cb.src_fn = ctx->cb.src_fn;
7104 tcctx.cb.dst_fn = child_fn;
7105 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7106 gcc_checking_assert (tcctx.cb.src_node);
7107 tcctx.cb.dst_node = tcctx.cb.src_node;
7108 tcctx.cb.src_cfun = ctx->cb.src_cfun;
7109 tcctx.cb.copy_decl = task_copyfn_copy_decl;
7110 tcctx.cb.eh_lp_nr = 0;
7111 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7112 tcctx.cb.decl_map = new hash_map<tree, tree>;
7113 tcctx.ctx = ctx;
7115 if (record_needs_remap)
7116 record_type = task_copyfn_remap_type (&tcctx, record_type);
7117 if (srecord_needs_remap)
7118 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7120 else
7121 tcctx.cb.decl_map = NULL;
7123 arg = DECL_ARGUMENTS (child_fn);
7124 TREE_TYPE (arg) = build_pointer_type (record_type);
7125 sarg = DECL_CHAIN (arg);
7126 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7128 /* First pass: initialize temporaries used in record_type and srecord_type
7129 sizes and field offsets. */
7130 if (tcctx.cb.decl_map)
7131 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7132 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7134 tree *p;
7136 decl = OMP_CLAUSE_DECL (c);
7137 p = tcctx.cb.decl_map->get (decl);
7138 if (p == NULL)
7139 continue;
7140 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7141 sf = (tree) n->value;
7142 sf = *tcctx.cb.decl_map->get (sf);
7143 src = build_simple_mem_ref_loc (loc, sarg);
7144 src = omp_build_component_ref (src, sf);
7145 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7146 append_to_statement_list (t, &list);
7149 /* Second pass: copy shared var pointers and copy construct non-VLA
7150 firstprivate vars. */
7151 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7152 switch (OMP_CLAUSE_CODE (c))
7154 splay_tree_key key;
7155 case OMP_CLAUSE_SHARED:
7156 decl = OMP_CLAUSE_DECL (c);
7157 key = (splay_tree_key) decl;
7158 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7159 key = (splay_tree_key) &DECL_UID (decl);
7160 n = splay_tree_lookup (ctx->field_map, key);
7161 if (n == NULL)
7162 break;
7163 f = (tree) n->value;
7164 if (tcctx.cb.decl_map)
7165 f = *tcctx.cb.decl_map->get (f);
7166 n = splay_tree_lookup (ctx->sfield_map, key);
7167 sf = (tree) n->value;
7168 if (tcctx.cb.decl_map)
7169 sf = *tcctx.cb.decl_map->get (sf);
7170 src = build_simple_mem_ref_loc (loc, sarg);
7171 src = omp_build_component_ref (src, sf);
7172 dst = build_simple_mem_ref_loc (loc, arg);
7173 dst = omp_build_component_ref (dst, f);
7174 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7175 append_to_statement_list (t, &list);
7176 break;
7177 case OMP_CLAUSE_FIRSTPRIVATE:
7178 decl = OMP_CLAUSE_DECL (c);
7179 if (is_variable_sized (decl))
7180 break;
7181 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7182 if (n == NULL)
7183 break;
7184 f = (tree) n->value;
7185 if (tcctx.cb.decl_map)
7186 f = *tcctx.cb.decl_map->get (f);
7187 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7188 if (n != NULL)
7190 sf = (tree) n->value;
7191 if (tcctx.cb.decl_map)
7192 sf = *tcctx.cb.decl_map->get (sf);
7193 src = build_simple_mem_ref_loc (loc, sarg);
7194 src = omp_build_component_ref (src, sf);
7195 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7196 src = build_simple_mem_ref_loc (loc, src);
7198 else
7199 src = decl;
7200 dst = build_simple_mem_ref_loc (loc, arg);
7201 dst = omp_build_component_ref (dst, f);
7202 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7203 append_to_statement_list (t, &list);
7204 break;
7205 case OMP_CLAUSE_PRIVATE:
7206 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7207 break;
7208 decl = OMP_CLAUSE_DECL (c);
7209 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7210 f = (tree) n->value;
7211 if (tcctx.cb.decl_map)
7212 f = *tcctx.cb.decl_map->get (f);
7213 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7214 if (n != NULL)
7216 sf = (tree) n->value;
7217 if (tcctx.cb.decl_map)
7218 sf = *tcctx.cb.decl_map->get (sf);
7219 src = build_simple_mem_ref_loc (loc, sarg);
7220 src = omp_build_component_ref (src, sf);
7221 if (use_pointer_for_field (decl, NULL))
7222 src = build_simple_mem_ref_loc (loc, src);
7224 else
7225 src = decl;
7226 dst = build_simple_mem_ref_loc (loc, arg);
7227 dst = omp_build_component_ref (dst, f);
7228 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7229 append_to_statement_list (t, &list);
7230 break;
7231 default:
7232 break;
7235 /* Last pass: handle VLA firstprivates. */
7236 if (tcctx.cb.decl_map)
7237 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7238 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7240 tree ind, ptr, df;
7242 decl = OMP_CLAUSE_DECL (c);
7243 if (!is_variable_sized (decl))
7244 continue;
7245 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7246 if (n == NULL)
7247 continue;
7248 f = (tree) n->value;
7249 f = *tcctx.cb.decl_map->get (f);
7250 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7251 ind = DECL_VALUE_EXPR (decl);
7252 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7253 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7254 n = splay_tree_lookup (ctx->sfield_map,
7255 (splay_tree_key) TREE_OPERAND (ind, 0));
7256 sf = (tree) n->value;
7257 sf = *tcctx.cb.decl_map->get (sf);
7258 src = build_simple_mem_ref_loc (loc, sarg);
7259 src = omp_build_component_ref (src, sf);
7260 src = build_simple_mem_ref_loc (loc, src);
7261 dst = build_simple_mem_ref_loc (loc, arg);
7262 dst = omp_build_component_ref (dst, f);
7263 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7264 append_to_statement_list (t, &list);
7265 n = splay_tree_lookup (ctx->field_map,
7266 (splay_tree_key) TREE_OPERAND (ind, 0));
7267 df = (tree) n->value;
7268 df = *tcctx.cb.decl_map->get (df);
7269 ptr = build_simple_mem_ref_loc (loc, arg);
7270 ptr = omp_build_component_ref (ptr, df);
7271 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7272 build_fold_addr_expr_loc (loc, dst));
7273 append_to_statement_list (t, &list);
7276 t = build1 (RETURN_EXPR, void_type_node, NULL);
7277 append_to_statement_list (t, &list);
7279 if (tcctx.cb.decl_map)
7280 delete tcctx.cb.decl_map;
7281 pop_gimplify_context (NULL);
7282 BIND_EXPR_BODY (bind) = list;
7283 pop_cfun ();
7286 static void
7287 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7289 tree c, clauses;
7290 gimple *g;
7291 size_t n_in = 0, n_out = 0, idx = 2, i;
7293 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7294 gcc_assert (clauses);
7295 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7296 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7297 switch (OMP_CLAUSE_DEPEND_KIND (c))
7299 case OMP_CLAUSE_DEPEND_IN:
7300 n_in++;
7301 break;
7302 case OMP_CLAUSE_DEPEND_OUT:
7303 case OMP_CLAUSE_DEPEND_INOUT:
7304 n_out++;
7305 break;
7306 case OMP_CLAUSE_DEPEND_SOURCE:
7307 case OMP_CLAUSE_DEPEND_SINK:
7308 /* FALLTHRU */
7309 default:
7310 gcc_unreachable ();
7312 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7313 tree array = create_tmp_var (type);
7314 TREE_ADDRESSABLE (array) = 1;
7315 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7316 NULL_TREE);
7317 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7318 gimple_seq_add_stmt (iseq, g);
7319 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7320 NULL_TREE);
7321 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7322 gimple_seq_add_stmt (iseq, g);
7323 for (i = 0; i < 2; i++)
7325 if ((i ? n_in : n_out) == 0)
7326 continue;
7327 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7328 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7329 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7331 tree t = OMP_CLAUSE_DECL (c);
7332 t = fold_convert (ptr_type_node, t);
7333 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7334 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7335 NULL_TREE, NULL_TREE);
7336 g = gimple_build_assign (r, t);
7337 gimple_seq_add_stmt (iseq, g);
7340 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7341 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7342 OMP_CLAUSE_CHAIN (c) = *pclauses;
7343 *pclauses = c;
7344 tree clobber = build_constructor (type, NULL);
7345 TREE_THIS_VOLATILE (clobber) = 1;
7346 g = gimple_build_assign (array, clobber);
7347 gimple_seq_add_stmt (oseq, g);
7350 /* Lower the OpenMP parallel or task directive in the current statement
7351 in GSI_P. CTX holds context information for the directive. */
7353 static void
7354 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7356 tree clauses;
7357 tree child_fn, t;
7358 gimple *stmt = gsi_stmt (*gsi_p);
7359 gbind *par_bind, *bind, *dep_bind = NULL;
7360 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7361 location_t loc = gimple_location (stmt);
7363 clauses = gimple_omp_taskreg_clauses (stmt);
7364 par_bind
7365 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7366 par_body = gimple_bind_body (par_bind);
7367 child_fn = ctx->cb.dst_fn;
7368 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7369 && !gimple_omp_parallel_combined_p (stmt))
7371 struct walk_stmt_info wi;
7372 int ws_num = 0;
7374 memset (&wi, 0, sizeof (wi));
7375 wi.info = &ws_num;
7376 wi.val_only = true;
7377 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7378 if (ws_num == 1)
7379 gimple_omp_parallel_set_combined_p (stmt, true);
7381 gimple_seq dep_ilist = NULL;
7382 gimple_seq dep_olist = NULL;
7383 if (gimple_code (stmt) == GIMPLE_OMP_TASK
7384 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7386 push_gimplify_context ();
7387 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7388 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7389 &dep_ilist, &dep_olist);
7392 if (ctx->srecord_type)
7393 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7395 push_gimplify_context ();
7397 par_olist = NULL;
7398 par_ilist = NULL;
7399 par_rlist = NULL;
7400 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7401 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7402 if (phony_construct && ctx->record_type)
7404 gcc_checking_assert (!ctx->receiver_decl);
7405 ctx->receiver_decl = create_tmp_var
7406 (build_reference_type (ctx->record_type), ".omp_rec");
7408 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7409 lower_omp (&par_body, ctx);
7410 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7411 lower_reduction_clauses (clauses, &par_rlist, ctx);
7413 /* Declare all the variables created by mapping and the variables
7414 declared in the scope of the parallel body. */
7415 record_vars_into (ctx->block_vars, child_fn);
7416 record_vars_into (gimple_bind_vars (par_bind), child_fn);
7418 if (ctx->record_type)
7420 ctx->sender_decl
7421 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7422 : ctx->record_type, ".omp_data_o");
7423 DECL_NAMELESS (ctx->sender_decl) = 1;
7424 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7425 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7428 olist = NULL;
7429 ilist = NULL;
7430 lower_send_clauses (clauses, &ilist, &olist, ctx);
7431 lower_send_shared_vars (&ilist, &olist, ctx);
7433 if (ctx->record_type)
7435 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7436 TREE_THIS_VOLATILE (clobber) = 1;
7437 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7438 clobber));
7441 /* Once all the expansions are done, sequence all the different
7442 fragments inside gimple_omp_body. */
7444 new_body = NULL;
7446 if (ctx->record_type)
7448 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7449 /* fixup_child_record_type might have changed receiver_decl's type. */
7450 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7451 gimple_seq_add_stmt (&new_body,
7452 gimple_build_assign (ctx->receiver_decl, t));
7455 gimple_seq_add_seq (&new_body, par_ilist);
7456 gimple_seq_add_seq (&new_body, par_body);
7457 gimple_seq_add_seq (&new_body, par_rlist);
7458 if (ctx->cancellable)
7459 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7460 gimple_seq_add_seq (&new_body, par_olist);
7461 new_body = maybe_catch_exception (new_body);
7462 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7463 gimple_seq_add_stmt (&new_body,
7464 gimple_build_omp_continue (integer_zero_node,
7465 integer_zero_node));
7466 if (!phony_construct)
7468 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7469 gimple_omp_set_body (stmt, new_body);
7472 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7473 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7474 gimple_bind_add_seq (bind, ilist);
7475 if (!phony_construct)
7476 gimple_bind_add_stmt (bind, stmt);
7477 else
7478 gimple_bind_add_seq (bind, new_body);
7479 gimple_bind_add_seq (bind, olist);
7481 pop_gimplify_context (NULL);
7483 if (dep_bind)
7485 gimple_bind_add_seq (dep_bind, dep_ilist);
7486 gimple_bind_add_stmt (dep_bind, bind);
7487 gimple_bind_add_seq (dep_bind, dep_olist);
7488 pop_gimplify_context (dep_bind);
7492 /* Lower the GIMPLE_OMP_TARGET in the current statement
7493 in GSI_P. CTX holds context information for the directive. */
7495 static void
7496 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7498 tree clauses;
7499 tree child_fn, t, c;
7500 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7501 gbind *tgt_bind, *bind, *dep_bind = NULL;
7502 gimple_seq tgt_body, olist, ilist, fplist, new_body;
7503 location_t loc = gimple_location (stmt);
7504 bool offloaded, data_region;
7505 unsigned int map_cnt = 0;
7507 offloaded = is_gimple_omp_offloaded (stmt);
7508 switch (gimple_omp_target_kind (stmt))
7510 case GF_OMP_TARGET_KIND_REGION:
7511 case GF_OMP_TARGET_KIND_UPDATE:
7512 case GF_OMP_TARGET_KIND_ENTER_DATA:
7513 case GF_OMP_TARGET_KIND_EXIT_DATA:
7514 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7515 case GF_OMP_TARGET_KIND_OACC_KERNELS:
7516 case GF_OMP_TARGET_KIND_OACC_UPDATE:
7517 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7518 case GF_OMP_TARGET_KIND_OACC_DECLARE:
7519 data_region = false;
7520 break;
7521 case GF_OMP_TARGET_KIND_DATA:
7522 case GF_OMP_TARGET_KIND_OACC_DATA:
7523 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7524 data_region = true;
7525 break;
7526 default:
7527 gcc_unreachable ();
7530 clauses = gimple_omp_target_clauses (stmt);
7532 gimple_seq dep_ilist = NULL;
7533 gimple_seq dep_olist = NULL;
7534 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7536 push_gimplify_context ();
7537 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7538 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7539 &dep_ilist, &dep_olist);
7542 tgt_bind = NULL;
7543 tgt_body = NULL;
7544 if (offloaded)
7546 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7547 tgt_body = gimple_bind_body (tgt_bind);
7549 else if (data_region)
7550 tgt_body = gimple_omp_body (stmt);
7551 child_fn = ctx->cb.dst_fn;
7553 push_gimplify_context ();
7554 fplist = NULL;
7556 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7557 switch (OMP_CLAUSE_CODE (c))
7559 tree var, x;
7561 default:
7562 break;
7563 case OMP_CLAUSE_MAP:
7564 #if CHECKING_P
7565 /* First check what we're prepared to handle in the following. */
7566 switch (OMP_CLAUSE_MAP_KIND (c))
7568 case GOMP_MAP_ALLOC:
7569 case GOMP_MAP_TO:
7570 case GOMP_MAP_FROM:
7571 case GOMP_MAP_TOFROM:
7572 case GOMP_MAP_POINTER:
7573 case GOMP_MAP_TO_PSET:
7574 case GOMP_MAP_DELETE:
7575 case GOMP_MAP_RELEASE:
7576 case GOMP_MAP_ALWAYS_TO:
7577 case GOMP_MAP_ALWAYS_FROM:
7578 case GOMP_MAP_ALWAYS_TOFROM:
7579 case GOMP_MAP_FIRSTPRIVATE_POINTER:
7580 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7581 case GOMP_MAP_STRUCT:
7582 case GOMP_MAP_ALWAYS_POINTER:
7583 break;
7584 case GOMP_MAP_FORCE_ALLOC:
7585 case GOMP_MAP_FORCE_TO:
7586 case GOMP_MAP_FORCE_FROM:
7587 case GOMP_MAP_FORCE_TOFROM:
7588 case GOMP_MAP_FORCE_PRESENT:
7589 case GOMP_MAP_FORCE_DEVICEPTR:
7590 case GOMP_MAP_DEVICE_RESIDENT:
7591 case GOMP_MAP_LINK:
7592 gcc_assert (is_gimple_omp_oacc (stmt));
7593 break;
7594 default:
7595 gcc_unreachable ();
7597 #endif
7598 /* FALLTHRU */
7599 case OMP_CLAUSE_TO:
7600 case OMP_CLAUSE_FROM:
7601 oacc_firstprivate:
7602 var = OMP_CLAUSE_DECL (c);
7603 if (!DECL_P (var))
7605 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7606 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7607 && (OMP_CLAUSE_MAP_KIND (c)
7608 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
7609 map_cnt++;
7610 continue;
7613 if (DECL_SIZE (var)
7614 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7616 tree var2 = DECL_VALUE_EXPR (var);
7617 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7618 var2 = TREE_OPERAND (var2, 0);
7619 gcc_assert (DECL_P (var2));
7620 var = var2;
7623 if (offloaded
7624 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7625 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7626 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7628 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7630 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7631 && varpool_node::get_create (var)->offloadable)
7632 continue;
7634 tree type = build_pointer_type (TREE_TYPE (var));
7635 tree new_var = lookup_decl (var, ctx);
7636 x = create_tmp_var_raw (type, get_name (new_var));
7637 gimple_add_tmp_var (x);
7638 x = build_simple_mem_ref (x);
7639 SET_DECL_VALUE_EXPR (new_var, x);
7640 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7642 continue;
7645 if (!maybe_lookup_field (var, ctx))
7646 continue;
7648 /* Don't remap oacc parallel reduction variables, because the
7649 intermediate result must be local to each gang. */
7650 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7651 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7653 x = build_receiver_ref (var, true, ctx);
7654 tree new_var = lookup_decl (var, ctx);
7656 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7657 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7658 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7659 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7660 x = build_simple_mem_ref (x);
7661 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7663 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7664 if (omp_is_reference (new_var))
7666 /* Create a local object to hold the instance
7667 value. */
7668 tree type = TREE_TYPE (TREE_TYPE (new_var));
7669 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7670 tree inst = create_tmp_var (type, id);
7671 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7672 x = build_fold_addr_expr (inst);
7674 gimplify_assign (new_var, x, &fplist);
7676 else if (DECL_P (new_var))
7678 SET_DECL_VALUE_EXPR (new_var, x);
7679 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7681 else
7682 gcc_unreachable ();
7684 map_cnt++;
7685 break;
7687 case OMP_CLAUSE_FIRSTPRIVATE:
7688 if (is_oacc_parallel (ctx))
7689 goto oacc_firstprivate;
7690 map_cnt++;
7691 var = OMP_CLAUSE_DECL (c);
7692 if (!omp_is_reference (var)
7693 && !is_gimple_reg_type (TREE_TYPE (var)))
7695 tree new_var = lookup_decl (var, ctx);
7696 if (is_variable_sized (var))
7698 tree pvar = DECL_VALUE_EXPR (var);
7699 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7700 pvar = TREE_OPERAND (pvar, 0);
7701 gcc_assert (DECL_P (pvar));
7702 tree new_pvar = lookup_decl (pvar, ctx);
7703 x = build_fold_indirect_ref (new_pvar);
7704 TREE_THIS_NOTRAP (x) = 1;
7706 else
7707 x = build_receiver_ref (var, true, ctx);
7708 SET_DECL_VALUE_EXPR (new_var, x);
7709 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7711 break;
7713 case OMP_CLAUSE_PRIVATE:
7714 if (is_gimple_omp_oacc (ctx->stmt))
7715 break;
7716 var = OMP_CLAUSE_DECL (c);
7717 if (is_variable_sized (var))
7719 tree new_var = lookup_decl (var, ctx);
7720 tree pvar = DECL_VALUE_EXPR (var);
7721 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7722 pvar = TREE_OPERAND (pvar, 0);
7723 gcc_assert (DECL_P (pvar));
7724 tree new_pvar = lookup_decl (pvar, ctx);
7725 x = build_fold_indirect_ref (new_pvar);
7726 TREE_THIS_NOTRAP (x) = 1;
7727 SET_DECL_VALUE_EXPR (new_var, x);
7728 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7730 break;
7732 case OMP_CLAUSE_USE_DEVICE_PTR:
7733 case OMP_CLAUSE_IS_DEVICE_PTR:
7734 var = OMP_CLAUSE_DECL (c);
7735 map_cnt++;
7736 if (is_variable_sized (var))
7738 tree new_var = lookup_decl (var, ctx);
7739 tree pvar = DECL_VALUE_EXPR (var);
7740 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7741 pvar = TREE_OPERAND (pvar, 0);
7742 gcc_assert (DECL_P (pvar));
7743 tree new_pvar = lookup_decl (pvar, ctx);
7744 x = build_fold_indirect_ref (new_pvar);
7745 TREE_THIS_NOTRAP (x) = 1;
7746 SET_DECL_VALUE_EXPR (new_var, x);
7747 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7749 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7751 tree new_var = lookup_decl (var, ctx);
7752 tree type = build_pointer_type (TREE_TYPE (var));
7753 x = create_tmp_var_raw (type, get_name (new_var));
7754 gimple_add_tmp_var (x);
7755 x = build_simple_mem_ref (x);
7756 SET_DECL_VALUE_EXPR (new_var, x);
7757 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7759 else
7761 tree new_var = lookup_decl (var, ctx);
7762 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7763 gimple_add_tmp_var (x);
7764 SET_DECL_VALUE_EXPR (new_var, x);
7765 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7767 break;
7770 if (offloaded)
7772 target_nesting_level++;
7773 lower_omp (&tgt_body, ctx);
7774 target_nesting_level--;
7776 else if (data_region)
7777 lower_omp (&tgt_body, ctx);
7779 if (offloaded)
7781 /* Declare all the variables created by mapping and the variables
7782 declared in the scope of the target body. */
7783 record_vars_into (ctx->block_vars, child_fn);
7784 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7787 olist = NULL;
7788 ilist = NULL;
7789 if (ctx->record_type)
7791 ctx->sender_decl
7792 = create_tmp_var (ctx->record_type, ".omp_data_arr");
7793 DECL_NAMELESS (ctx->sender_decl) = 1;
7794 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7795 t = make_tree_vec (3);
7796 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7797 TREE_VEC_ELT (t, 1)
7798 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7799 ".omp_data_sizes");
7800 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7801 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7802 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7803 tree tkind_type = short_unsigned_type_node;
7804 int talign_shift = 8;
7805 TREE_VEC_ELT (t, 2)
7806 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7807 ".omp_data_kinds");
7808 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7809 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7810 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7811 gimple_omp_target_set_data_arg (stmt, t);
7813 vec<constructor_elt, va_gc> *vsize;
7814 vec<constructor_elt, va_gc> *vkind;
7815 vec_alloc (vsize, map_cnt);
7816 vec_alloc (vkind, map_cnt);
7817 unsigned int map_idx = 0;
7819 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7820 switch (OMP_CLAUSE_CODE (c))
7822 tree ovar, nc, s, purpose, var, x, type;
7823 unsigned int talign;
7825 default:
7826 break;
7828 case OMP_CLAUSE_MAP:
7829 case OMP_CLAUSE_TO:
7830 case OMP_CLAUSE_FROM:
7831 oacc_firstprivate_map:
7832 nc = c;
7833 ovar = OMP_CLAUSE_DECL (c);
7834 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7835 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7836 || (OMP_CLAUSE_MAP_KIND (c)
7837 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7838 break;
7839 if (!DECL_P (ovar))
7841 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7842 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7844 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7845 == get_base_address (ovar));
7846 nc = OMP_CLAUSE_CHAIN (c);
7847 ovar = OMP_CLAUSE_DECL (nc);
7849 else
7851 tree x = build_sender_ref (ovar, ctx);
7852 tree v
7853 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7854 gimplify_assign (x, v, &ilist);
7855 nc = NULL_TREE;
7858 else
7860 if (DECL_SIZE (ovar)
7861 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7863 tree ovar2 = DECL_VALUE_EXPR (ovar);
7864 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7865 ovar2 = TREE_OPERAND (ovar2, 0);
7866 gcc_assert (DECL_P (ovar2));
7867 ovar = ovar2;
7869 if (!maybe_lookup_field (ovar, ctx))
7870 continue;
7873 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7874 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7875 talign = DECL_ALIGN_UNIT (ovar);
7876 if (nc)
7878 var = lookup_decl_in_outer_ctx (ovar, ctx);
7879 x = build_sender_ref (ovar, ctx);
7881 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7882 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7883 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7884 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7886 gcc_assert (offloaded);
7887 tree avar
7888 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7889 mark_addressable (avar);
7890 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7891 talign = DECL_ALIGN_UNIT (avar);
7892 avar = build_fold_addr_expr (avar);
7893 gimplify_assign (x, avar, &ilist);
7895 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7897 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7898 if (!omp_is_reference (var))
7900 if (is_gimple_reg (var)
7901 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7902 TREE_NO_WARNING (var) = 1;
7903 var = build_fold_addr_expr (var);
7905 else
7906 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7907 gimplify_assign (x, var, &ilist);
7909 else if (is_gimple_reg (var))
7911 gcc_assert (offloaded);
7912 tree avar = create_tmp_var (TREE_TYPE (var));
7913 mark_addressable (avar);
7914 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7915 if (GOMP_MAP_COPY_TO_P (map_kind)
7916 || map_kind == GOMP_MAP_POINTER
7917 || map_kind == GOMP_MAP_TO_PSET
7918 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7920 /* If we need to initialize a temporary
7921 with VAR because it is not addressable, and
7922 the variable hasn't been initialized yet, then
7923 we'll get a warning for the store to avar.
7924 Don't warn in that case, the mapping might
7925 be implicit. */
7926 TREE_NO_WARNING (var) = 1;
7927 gimplify_assign (avar, var, &ilist);
7929 avar = build_fold_addr_expr (avar);
7930 gimplify_assign (x, avar, &ilist);
7931 if ((GOMP_MAP_COPY_FROM_P (map_kind)
7932 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7933 && !TYPE_READONLY (TREE_TYPE (var)))
7935 x = unshare_expr (x);
7936 x = build_simple_mem_ref (x);
7937 gimplify_assign (var, x, &olist);
7940 else
7942 var = build_fold_addr_expr (var);
7943 gimplify_assign (x, var, &ilist);
7946 s = NULL_TREE;
7947 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7949 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7950 s = TREE_TYPE (ovar);
7951 if (TREE_CODE (s) == REFERENCE_TYPE)
7952 s = TREE_TYPE (s);
7953 s = TYPE_SIZE_UNIT (s);
7955 else
7956 s = OMP_CLAUSE_SIZE (c);
7957 if (s == NULL_TREE)
7958 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
7959 s = fold_convert (size_type_node, s);
7960 purpose = size_int (map_idx++);
7961 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
7962 if (TREE_CODE (s) != INTEGER_CST)
7963 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
7965 unsigned HOST_WIDE_INT tkind, tkind_zero;
7966 switch (OMP_CLAUSE_CODE (c))
7968 case OMP_CLAUSE_MAP:
7969 tkind = OMP_CLAUSE_MAP_KIND (c);
7970 tkind_zero = tkind;
7971 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
7972 switch (tkind)
7974 case GOMP_MAP_ALLOC:
7975 case GOMP_MAP_TO:
7976 case GOMP_MAP_FROM:
7977 case GOMP_MAP_TOFROM:
7978 case GOMP_MAP_ALWAYS_TO:
7979 case GOMP_MAP_ALWAYS_FROM:
7980 case GOMP_MAP_ALWAYS_TOFROM:
7981 case GOMP_MAP_RELEASE:
7982 case GOMP_MAP_FORCE_TO:
7983 case GOMP_MAP_FORCE_FROM:
7984 case GOMP_MAP_FORCE_TOFROM:
7985 case GOMP_MAP_FORCE_PRESENT:
7986 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
7987 break;
7988 case GOMP_MAP_DELETE:
7989 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
7990 default:
7991 break;
7993 if (tkind_zero != tkind)
7995 if (integer_zerop (s))
7996 tkind = tkind_zero;
7997 else if (integer_nonzerop (s))
7998 tkind_zero = tkind;
8000 break;
8001 case OMP_CLAUSE_FIRSTPRIVATE:
8002 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
8003 tkind = GOMP_MAP_TO;
8004 tkind_zero = tkind;
8005 break;
8006 case OMP_CLAUSE_TO:
8007 tkind = GOMP_MAP_TO;
8008 tkind_zero = tkind;
8009 break;
8010 case OMP_CLAUSE_FROM:
8011 tkind = GOMP_MAP_FROM;
8012 tkind_zero = tkind;
8013 break;
8014 default:
8015 gcc_unreachable ();
8017 gcc_checking_assert (tkind
8018 < (HOST_WIDE_INT_C (1U) << talign_shift));
8019 gcc_checking_assert (tkind_zero
8020 < (HOST_WIDE_INT_C (1U) << talign_shift));
8021 talign = ceil_log2 (talign);
8022 tkind |= talign << talign_shift;
8023 tkind_zero |= talign << talign_shift;
8024 gcc_checking_assert (tkind
8025 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8026 gcc_checking_assert (tkind_zero
8027 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8028 if (tkind == tkind_zero)
8029 x = build_int_cstu (tkind_type, tkind);
8030 else
8032 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8033 x = build3 (COND_EXPR, tkind_type,
8034 fold_build2 (EQ_EXPR, boolean_type_node,
8035 unshare_expr (s), size_zero_node),
8036 build_int_cstu (tkind_type, tkind_zero),
8037 build_int_cstu (tkind_type, tkind));
8039 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8040 if (nc && nc != c)
8041 c = nc;
8042 break;
8044 case OMP_CLAUSE_FIRSTPRIVATE:
8045 if (is_oacc_parallel (ctx))
8046 goto oacc_firstprivate_map;
8047 ovar = OMP_CLAUSE_DECL (c);
8048 if (omp_is_reference (ovar))
8049 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8050 else
8051 talign = DECL_ALIGN_UNIT (ovar);
8052 var = lookup_decl_in_outer_ctx (ovar, ctx);
8053 x = build_sender_ref (ovar, ctx);
8054 tkind = GOMP_MAP_FIRSTPRIVATE;
8055 type = TREE_TYPE (ovar);
8056 if (omp_is_reference (ovar))
8057 type = TREE_TYPE (type);
8058 if ((INTEGRAL_TYPE_P (type)
8059 && TYPE_PRECISION (type) <= POINTER_SIZE)
8060 || TREE_CODE (type) == POINTER_TYPE)
8062 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8063 tree t = var;
8064 if (omp_is_reference (var))
8065 t = build_simple_mem_ref (var);
8066 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8067 TREE_NO_WARNING (var) = 1;
8068 if (TREE_CODE (type) != POINTER_TYPE)
8069 t = fold_convert (pointer_sized_int_node, t);
8070 t = fold_convert (TREE_TYPE (x), t);
8071 gimplify_assign (x, t, &ilist);
8073 else if (omp_is_reference (var))
8074 gimplify_assign (x, var, &ilist);
8075 else if (is_gimple_reg (var))
8077 tree avar = create_tmp_var (TREE_TYPE (var));
8078 mark_addressable (avar);
8079 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8080 TREE_NO_WARNING (var) = 1;
8081 gimplify_assign (avar, var, &ilist);
8082 avar = build_fold_addr_expr (avar);
8083 gimplify_assign (x, avar, &ilist);
8085 else
8087 var = build_fold_addr_expr (var);
8088 gimplify_assign (x, var, &ilist);
8090 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8091 s = size_int (0);
8092 else if (omp_is_reference (ovar))
8093 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8094 else
8095 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8096 s = fold_convert (size_type_node, s);
8097 purpose = size_int (map_idx++);
8098 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8099 if (TREE_CODE (s) != INTEGER_CST)
8100 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8102 gcc_checking_assert (tkind
8103 < (HOST_WIDE_INT_C (1U) << talign_shift));
8104 talign = ceil_log2 (talign);
8105 tkind |= talign << talign_shift;
8106 gcc_checking_assert (tkind
8107 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8108 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8109 build_int_cstu (tkind_type, tkind));
8110 break;
8112 case OMP_CLAUSE_USE_DEVICE_PTR:
8113 case OMP_CLAUSE_IS_DEVICE_PTR:
8114 ovar = OMP_CLAUSE_DECL (c);
8115 var = lookup_decl_in_outer_ctx (ovar, ctx);
8116 x = build_sender_ref (ovar, ctx);
8117 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8118 tkind = GOMP_MAP_USE_DEVICE_PTR;
8119 else
8120 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8121 type = TREE_TYPE (ovar);
8122 if (TREE_CODE (type) == ARRAY_TYPE)
8123 var = build_fold_addr_expr (var);
8124 else
8126 if (omp_is_reference (ovar))
8128 type = TREE_TYPE (type);
8129 if (TREE_CODE (type) != ARRAY_TYPE)
8130 var = build_simple_mem_ref (var);
8131 var = fold_convert (TREE_TYPE (x), var);
8134 gimplify_assign (x, var, &ilist);
8135 s = size_int (0);
8136 purpose = size_int (map_idx++);
8137 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8138 gcc_checking_assert (tkind
8139 < (HOST_WIDE_INT_C (1U) << talign_shift));
8140 gcc_checking_assert (tkind
8141 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8142 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8143 build_int_cstu (tkind_type, tkind));
8144 break;
8147 gcc_assert (map_idx == map_cnt);
8149 DECL_INITIAL (TREE_VEC_ELT (t, 1))
8150 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8151 DECL_INITIAL (TREE_VEC_ELT (t, 2))
8152 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8153 for (int i = 1; i <= 2; i++)
8154 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8156 gimple_seq initlist = NULL;
8157 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8158 TREE_VEC_ELT (t, i)),
8159 &initlist, true, NULL_TREE);
8160 gimple_seq_add_seq (&ilist, initlist);
8162 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8163 NULL);
8164 TREE_THIS_VOLATILE (clobber) = 1;
8165 gimple_seq_add_stmt (&olist,
8166 gimple_build_assign (TREE_VEC_ELT (t, i),
8167 clobber));
8170 tree clobber = build_constructor (ctx->record_type, NULL);
8171 TREE_THIS_VOLATILE (clobber) = 1;
8172 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8173 clobber));
8176 /* Once all the expansions are done, sequence all the different
8177 fragments inside gimple_omp_body. */
8179 new_body = NULL;
8181 if (offloaded
8182 && ctx->record_type)
8184 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8185 /* fixup_child_record_type might have changed receiver_decl's type. */
8186 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8187 gimple_seq_add_stmt (&new_body,
8188 gimple_build_assign (ctx->receiver_decl, t));
8190 gimple_seq_add_seq (&new_body, fplist);
8192 if (offloaded || data_region)
8194 tree prev = NULL_TREE;
8195 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8196 switch (OMP_CLAUSE_CODE (c))
8198 tree var, x;
8199 default:
8200 break;
8201 case OMP_CLAUSE_FIRSTPRIVATE:
8202 if (is_gimple_omp_oacc (ctx->stmt))
8203 break;
8204 var = OMP_CLAUSE_DECL (c);
8205 if (omp_is_reference (var)
8206 || is_gimple_reg_type (TREE_TYPE (var)))
8208 tree new_var = lookup_decl (var, ctx);
8209 tree type;
8210 type = TREE_TYPE (var);
8211 if (omp_is_reference (var))
8212 type = TREE_TYPE (type);
8213 if ((INTEGRAL_TYPE_P (type)
8214 && TYPE_PRECISION (type) <= POINTER_SIZE)
8215 || TREE_CODE (type) == POINTER_TYPE)
8217 x = build_receiver_ref (var, false, ctx);
8218 if (TREE_CODE (type) != POINTER_TYPE)
8219 x = fold_convert (pointer_sized_int_node, x);
8220 x = fold_convert (type, x);
8221 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8222 fb_rvalue);
8223 if (omp_is_reference (var))
8225 tree v = create_tmp_var_raw (type, get_name (var));
8226 gimple_add_tmp_var (v);
8227 TREE_ADDRESSABLE (v) = 1;
8228 gimple_seq_add_stmt (&new_body,
8229 gimple_build_assign (v, x));
8230 x = build_fold_addr_expr (v);
8232 gimple_seq_add_stmt (&new_body,
8233 gimple_build_assign (new_var, x));
8235 else
8237 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8238 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8239 fb_rvalue);
8240 gimple_seq_add_stmt (&new_body,
8241 gimple_build_assign (new_var, x));
8244 else if (is_variable_sized (var))
8246 tree pvar = DECL_VALUE_EXPR (var);
8247 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8248 pvar = TREE_OPERAND (pvar, 0);
8249 gcc_assert (DECL_P (pvar));
8250 tree new_var = lookup_decl (pvar, ctx);
8251 x = build_receiver_ref (var, false, ctx);
8252 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8253 gimple_seq_add_stmt (&new_body,
8254 gimple_build_assign (new_var, x));
8256 break;
8257 case OMP_CLAUSE_PRIVATE:
8258 if (is_gimple_omp_oacc (ctx->stmt))
8259 break;
8260 var = OMP_CLAUSE_DECL (c);
8261 if (omp_is_reference (var))
8263 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8264 tree new_var = lookup_decl (var, ctx);
8265 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8266 if (TREE_CONSTANT (x))
8268 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8269 get_name (var));
8270 gimple_add_tmp_var (x);
8271 TREE_ADDRESSABLE (x) = 1;
8272 x = build_fold_addr_expr_loc (clause_loc, x);
8274 else
8275 break;
8277 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8278 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8279 gimple_seq_add_stmt (&new_body,
8280 gimple_build_assign (new_var, x));
8282 break;
8283 case OMP_CLAUSE_USE_DEVICE_PTR:
8284 case OMP_CLAUSE_IS_DEVICE_PTR:
8285 var = OMP_CLAUSE_DECL (c);
8286 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8287 x = build_sender_ref (var, ctx);
8288 else
8289 x = build_receiver_ref (var, false, ctx);
8290 if (is_variable_sized (var))
8292 tree pvar = DECL_VALUE_EXPR (var);
8293 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8294 pvar = TREE_OPERAND (pvar, 0);
8295 gcc_assert (DECL_P (pvar));
8296 tree new_var = lookup_decl (pvar, ctx);
8297 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8298 gimple_seq_add_stmt (&new_body,
8299 gimple_build_assign (new_var, x));
8301 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8303 tree new_var = lookup_decl (var, ctx);
8304 new_var = DECL_VALUE_EXPR (new_var);
8305 gcc_assert (TREE_CODE (new_var) == MEM_REF);
8306 new_var = TREE_OPERAND (new_var, 0);
8307 gcc_assert (DECL_P (new_var));
8308 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8309 gimple_seq_add_stmt (&new_body,
8310 gimple_build_assign (new_var, x));
8312 else
8314 tree type = TREE_TYPE (var);
8315 tree new_var = lookup_decl (var, ctx);
8316 if (omp_is_reference (var))
8318 type = TREE_TYPE (type);
8319 if (TREE_CODE (type) != ARRAY_TYPE)
8321 tree v = create_tmp_var_raw (type, get_name (var));
8322 gimple_add_tmp_var (v);
8323 TREE_ADDRESSABLE (v) = 1;
8324 x = fold_convert (type, x);
8325 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8326 fb_rvalue);
8327 gimple_seq_add_stmt (&new_body,
8328 gimple_build_assign (v, x));
8329 x = build_fold_addr_expr (v);
8332 new_var = DECL_VALUE_EXPR (new_var);
8333 x = fold_convert (TREE_TYPE (new_var), x);
8334 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8335 gimple_seq_add_stmt (&new_body,
8336 gimple_build_assign (new_var, x));
8338 break;
8340 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8341 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8342 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8343 or references to VLAs. */
8344 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8345 switch (OMP_CLAUSE_CODE (c))
8347 tree var;
8348 default:
8349 break;
8350 case OMP_CLAUSE_MAP:
8351 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8352 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8354 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8355 poly_int64 offset = 0;
8356 gcc_assert (prev);
8357 var = OMP_CLAUSE_DECL (c);
8358 if (DECL_P (var)
8359 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8360 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8361 ctx))
8362 && varpool_node::get_create (var)->offloadable)
8363 break;
8364 if (TREE_CODE (var) == INDIRECT_REF
8365 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8366 var = TREE_OPERAND (var, 0);
8367 if (TREE_CODE (var) == COMPONENT_REF)
8369 var = get_addr_base_and_unit_offset (var, &offset);
8370 gcc_assert (var != NULL_TREE && DECL_P (var));
8372 else if (DECL_SIZE (var)
8373 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8375 tree var2 = DECL_VALUE_EXPR (var);
8376 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8377 var2 = TREE_OPERAND (var2, 0);
8378 gcc_assert (DECL_P (var2));
8379 var = var2;
8381 tree new_var = lookup_decl (var, ctx), x;
8382 tree type = TREE_TYPE (new_var);
8383 bool is_ref;
8384 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8385 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8386 == COMPONENT_REF))
8388 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8389 is_ref = true;
8390 new_var = build2 (MEM_REF, type,
8391 build_fold_addr_expr (new_var),
8392 build_int_cst (build_pointer_type (type),
8393 offset));
8395 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8397 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8398 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8399 new_var = build2 (MEM_REF, type,
8400 build_fold_addr_expr (new_var),
8401 build_int_cst (build_pointer_type (type),
8402 offset));
8404 else
8405 is_ref = omp_is_reference (var);
8406 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8407 is_ref = false;
8408 bool ref_to_array = false;
8409 if (is_ref)
8411 type = TREE_TYPE (type);
8412 if (TREE_CODE (type) == ARRAY_TYPE)
8414 type = build_pointer_type (type);
8415 ref_to_array = true;
8418 else if (TREE_CODE (type) == ARRAY_TYPE)
8420 tree decl2 = DECL_VALUE_EXPR (new_var);
8421 gcc_assert (TREE_CODE (decl2) == MEM_REF);
8422 decl2 = TREE_OPERAND (decl2, 0);
8423 gcc_assert (DECL_P (decl2));
8424 new_var = decl2;
8425 type = TREE_TYPE (new_var);
8427 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8428 x = fold_convert_loc (clause_loc, type, x);
8429 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8431 tree bias = OMP_CLAUSE_SIZE (c);
8432 if (DECL_P (bias))
8433 bias = lookup_decl (bias, ctx);
8434 bias = fold_convert_loc (clause_loc, sizetype, bias);
8435 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8436 bias);
8437 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8438 TREE_TYPE (x), x, bias);
8440 if (ref_to_array)
8441 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8442 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8443 if (is_ref && !ref_to_array)
8445 tree t = create_tmp_var_raw (type, get_name (var));
8446 gimple_add_tmp_var (t);
8447 TREE_ADDRESSABLE (t) = 1;
8448 gimple_seq_add_stmt (&new_body,
8449 gimple_build_assign (t, x));
8450 x = build_fold_addr_expr_loc (clause_loc, t);
8452 gimple_seq_add_stmt (&new_body,
8453 gimple_build_assign (new_var, x));
8454 prev = NULL_TREE;
8456 else if (OMP_CLAUSE_CHAIN (c)
8457 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8458 == OMP_CLAUSE_MAP
8459 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8460 == GOMP_MAP_FIRSTPRIVATE_POINTER
8461 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8462 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8463 prev = c;
8464 break;
8465 case OMP_CLAUSE_PRIVATE:
8466 var = OMP_CLAUSE_DECL (c);
8467 if (is_variable_sized (var))
8469 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8470 tree new_var = lookup_decl (var, ctx);
8471 tree pvar = DECL_VALUE_EXPR (var);
8472 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8473 pvar = TREE_OPERAND (pvar, 0);
8474 gcc_assert (DECL_P (pvar));
8475 tree new_pvar = lookup_decl (pvar, ctx);
8476 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8477 tree al = size_int (DECL_ALIGN (var));
8478 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8479 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8480 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8481 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8482 gimple_seq_add_stmt (&new_body,
8483 gimple_build_assign (new_pvar, x));
8485 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8487 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8488 tree new_var = lookup_decl (var, ctx);
8489 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8490 if (TREE_CONSTANT (x))
8491 break;
8492 else
8494 tree atmp
8495 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8496 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8497 tree al = size_int (TYPE_ALIGN (rtype));
8498 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8501 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8502 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8503 gimple_seq_add_stmt (&new_body,
8504 gimple_build_assign (new_var, x));
8506 break;
8509 gimple_seq fork_seq = NULL;
8510 gimple_seq join_seq = NULL;
8512 if (is_oacc_parallel (ctx))
8514 /* If there are reductions on the offloaded region itself, treat
8515 them as a dummy GANG loop. */
8516 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8518 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8519 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8522 gimple_seq_add_seq (&new_body, fork_seq);
8523 gimple_seq_add_seq (&new_body, tgt_body);
8524 gimple_seq_add_seq (&new_body, join_seq);
8526 if (offloaded)
8527 new_body = maybe_catch_exception (new_body);
8529 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8530 gimple_omp_set_body (stmt, new_body);
8533 bind = gimple_build_bind (NULL, NULL,
8534 tgt_bind ? gimple_bind_block (tgt_bind)
8535 : NULL_TREE);
8536 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8537 gimple_bind_add_seq (bind, ilist);
8538 gimple_bind_add_stmt (bind, stmt);
8539 gimple_bind_add_seq (bind, olist);
8541 pop_gimplify_context (NULL);
8543 if (dep_bind)
8545 gimple_bind_add_seq (dep_bind, dep_ilist);
8546 gimple_bind_add_stmt (dep_bind, bind);
8547 gimple_bind_add_seq (dep_bind, dep_olist);
8548 pop_gimplify_context (dep_bind);
8552 /* Expand code for an OpenMP teams directive. */
8554 static void
8555 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8557 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8558 push_gimplify_context ();
8560 tree block = make_node (BLOCK);
8561 gbind *bind = gimple_build_bind (NULL, NULL, block);
8562 gsi_replace (gsi_p, bind, true);
8563 gimple_seq bind_body = NULL;
8564 gimple_seq dlist = NULL;
8565 gimple_seq olist = NULL;
8567 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8568 OMP_CLAUSE_NUM_TEAMS);
8569 if (num_teams == NULL_TREE)
8570 num_teams = build_int_cst (unsigned_type_node, 0);
8571 else
8573 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8574 num_teams = fold_convert (unsigned_type_node, num_teams);
8575 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8577 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8578 OMP_CLAUSE_THREAD_LIMIT);
8579 if (thread_limit == NULL_TREE)
8580 thread_limit = build_int_cst (unsigned_type_node, 0);
8581 else
8583 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8584 thread_limit = fold_convert (unsigned_type_node, thread_limit);
8585 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8586 fb_rvalue);
8589 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8590 &bind_body, &dlist, ctx, NULL);
8591 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8592 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8593 if (!gimple_omp_teams_grid_phony (teams_stmt))
8595 gimple_seq_add_stmt (&bind_body, teams_stmt);
8596 location_t loc = gimple_location (teams_stmt);
8597 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8598 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8599 gimple_set_location (call, loc);
8600 gimple_seq_add_stmt (&bind_body, call);
8603 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8604 gimple_omp_set_body (teams_stmt, NULL);
8605 gimple_seq_add_seq (&bind_body, olist);
8606 gimple_seq_add_seq (&bind_body, dlist);
8607 if (!gimple_omp_teams_grid_phony (teams_stmt))
8608 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8609 gimple_bind_set_body (bind, bind_body);
8611 pop_gimplify_context (bind);
8613 gimple_bind_append_vars (bind, ctx->block_vars);
8614 BLOCK_VARS (block) = ctx->block_vars;
8615 if (BLOCK_VARS (block))
8616 TREE_USED (block) = 1;
8619 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8621 static void
8622 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8624 gimple *stmt = gsi_stmt (*gsi_p);
8625 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8626 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8627 gimple_build_omp_return (false));
8631 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8632 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8633 of OMP context, but with task_shared_vars set. */
8635 static tree
8636 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8637 void *data)
8639 tree t = *tp;
8641 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8642 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8643 return t;
8645 if (task_shared_vars
8646 && DECL_P (t)
8647 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8648 return t;
8650 /* If a global variable has been privatized, TREE_CONSTANT on
8651 ADDR_EXPR might be wrong. */
8652 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8653 recompute_tree_invariant_for_addr_expr (t);
8655 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8656 return NULL_TREE;
8659 /* Data to be communicated between lower_omp_regimplify_operands and
8660 lower_omp_regimplify_operands_p. */
8662 struct lower_omp_regimplify_operands_data
8664 omp_context *ctx;
8665 vec<tree> *decls;
8668 /* Helper function for lower_omp_regimplify_operands. Find
8669 omp_member_access_dummy_var vars and adjust temporarily their
8670 DECL_VALUE_EXPRs if needed. */
8672 static tree
8673 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8674 void *data)
8676 tree t = omp_member_access_dummy_var (*tp);
8677 if (t)
8679 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8680 lower_omp_regimplify_operands_data *ldata
8681 = (lower_omp_regimplify_operands_data *) wi->info;
8682 tree o = maybe_lookup_decl (t, ldata->ctx);
8683 if (o != t)
8685 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8686 ldata->decls->safe_push (*tp);
8687 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8688 SET_DECL_VALUE_EXPR (*tp, v);
8691 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8692 return NULL_TREE;
8695 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8696 of omp_member_access_dummy_var vars during regimplification. */
8698 static void
8699 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8700 gimple_stmt_iterator *gsi_p)
8702 auto_vec<tree, 10> decls;
8703 if (ctx)
8705 struct walk_stmt_info wi;
8706 memset (&wi, '\0', sizeof (wi));
8707 struct lower_omp_regimplify_operands_data data;
8708 data.ctx = ctx;
8709 data.decls = &decls;
8710 wi.info = &data;
8711 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8713 gimple_regimplify_operands (stmt, gsi_p);
8714 while (!decls.is_empty ())
8716 tree t = decls.pop ();
8717 tree v = decls.pop ();
8718 SET_DECL_VALUE_EXPR (t, v);
8722 static void
8723 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8725 gimple *stmt = gsi_stmt (*gsi_p);
8726 struct walk_stmt_info wi;
8727 gcall *call_stmt;
8729 if (gimple_has_location (stmt))
8730 input_location = gimple_location (stmt);
8732 if (task_shared_vars)
8733 memset (&wi, '\0', sizeof (wi));
8735 /* If we have issued syntax errors, avoid doing any heavy lifting.
8736 Just replace the OMP directives with a NOP to avoid
8737 confusing RTL expansion. */
8738 if (seen_error () && is_gimple_omp (stmt))
8740 gsi_replace (gsi_p, gimple_build_nop (), true);
8741 return;
8744 switch (gimple_code (stmt))
8746 case GIMPLE_COND:
8748 gcond *cond_stmt = as_a <gcond *> (stmt);
8749 if ((ctx || task_shared_vars)
8750 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8751 lower_omp_regimplify_p,
8752 ctx ? NULL : &wi, NULL)
8753 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8754 lower_omp_regimplify_p,
8755 ctx ? NULL : &wi, NULL)))
8756 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8758 break;
8759 case GIMPLE_CATCH:
8760 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8761 break;
8762 case GIMPLE_EH_FILTER:
8763 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8764 break;
8765 case GIMPLE_TRY:
8766 lower_omp (gimple_try_eval_ptr (stmt), ctx);
8767 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8768 break;
8769 case GIMPLE_TRANSACTION:
8770 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8771 ctx);
8772 break;
8773 case GIMPLE_BIND:
8774 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8775 break;
8776 case GIMPLE_OMP_PARALLEL:
8777 case GIMPLE_OMP_TASK:
8778 ctx = maybe_lookup_ctx (stmt);
8779 gcc_assert (ctx);
8780 if (ctx->cancellable)
8781 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8782 lower_omp_taskreg (gsi_p, ctx);
8783 break;
8784 case GIMPLE_OMP_FOR:
8785 ctx = maybe_lookup_ctx (stmt);
8786 gcc_assert (ctx);
8787 if (ctx->cancellable)
8788 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8789 lower_omp_for (gsi_p, ctx);
8790 break;
8791 case GIMPLE_OMP_SECTIONS:
8792 ctx = maybe_lookup_ctx (stmt);
8793 gcc_assert (ctx);
8794 if (ctx->cancellable)
8795 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8796 lower_omp_sections (gsi_p, ctx);
8797 break;
8798 case GIMPLE_OMP_SINGLE:
8799 ctx = maybe_lookup_ctx (stmt);
8800 gcc_assert (ctx);
8801 lower_omp_single (gsi_p, ctx);
8802 break;
8803 case GIMPLE_OMP_MASTER:
8804 ctx = maybe_lookup_ctx (stmt);
8805 gcc_assert (ctx);
8806 lower_omp_master (gsi_p, ctx);
8807 break;
8808 case GIMPLE_OMP_TASKGROUP:
8809 ctx = maybe_lookup_ctx (stmt);
8810 gcc_assert (ctx);
8811 lower_omp_taskgroup (gsi_p, ctx);
8812 break;
8813 case GIMPLE_OMP_ORDERED:
8814 ctx = maybe_lookup_ctx (stmt);
8815 gcc_assert (ctx);
8816 lower_omp_ordered (gsi_p, ctx);
8817 break;
8818 case GIMPLE_OMP_CRITICAL:
8819 ctx = maybe_lookup_ctx (stmt);
8820 gcc_assert (ctx);
8821 lower_omp_critical (gsi_p, ctx);
8822 break;
8823 case GIMPLE_OMP_ATOMIC_LOAD:
8824 if ((ctx || task_shared_vars)
8825 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8826 as_a <gomp_atomic_load *> (stmt)),
8827 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8828 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8829 break;
8830 case GIMPLE_OMP_TARGET:
8831 ctx = maybe_lookup_ctx (stmt);
8832 gcc_assert (ctx);
8833 lower_omp_target (gsi_p, ctx);
8834 break;
8835 case GIMPLE_OMP_TEAMS:
8836 ctx = maybe_lookup_ctx (stmt);
8837 gcc_assert (ctx);
8838 lower_omp_teams (gsi_p, ctx);
8839 break;
8840 case GIMPLE_OMP_GRID_BODY:
8841 ctx = maybe_lookup_ctx (stmt);
8842 gcc_assert (ctx);
8843 lower_omp_grid_body (gsi_p, ctx);
8844 break;
8845 case GIMPLE_CALL:
8846 tree fndecl;
8847 call_stmt = as_a <gcall *> (stmt);
8848 fndecl = gimple_call_fndecl (call_stmt);
8849 if (fndecl
8850 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8851 switch (DECL_FUNCTION_CODE (fndecl))
8853 case BUILT_IN_GOMP_BARRIER:
8854 if (ctx == NULL)
8855 break;
8856 /* FALLTHRU */
8857 case BUILT_IN_GOMP_CANCEL:
8858 case BUILT_IN_GOMP_CANCELLATION_POINT:
8859 omp_context *cctx;
8860 cctx = ctx;
8861 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8862 cctx = cctx->outer;
8863 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8864 if (!cctx->cancellable)
8866 if (DECL_FUNCTION_CODE (fndecl)
8867 == BUILT_IN_GOMP_CANCELLATION_POINT)
8869 stmt = gimple_build_nop ();
8870 gsi_replace (gsi_p, stmt, false);
8872 break;
8874 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8876 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8877 gimple_call_set_fndecl (call_stmt, fndecl);
8878 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8880 tree lhs;
8881 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8882 gimple_call_set_lhs (call_stmt, lhs);
8883 tree fallthru_label;
8884 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8885 gimple *g;
8886 g = gimple_build_label (fallthru_label);
8887 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8888 g = gimple_build_cond (NE_EXPR, lhs,
8889 fold_convert (TREE_TYPE (lhs),
8890 boolean_false_node),
8891 cctx->cancel_label, fallthru_label);
8892 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8893 break;
8894 default:
8895 break;
8897 /* FALLTHRU */
8898 default:
8899 if ((ctx || task_shared_vars)
8900 && walk_gimple_op (stmt, lower_omp_regimplify_p,
8901 ctx ? NULL : &wi))
8903 /* Just remove clobbers, this should happen only if we have
8904 "privatized" local addressable variables in SIMD regions,
8905 the clobber isn't needed in that case and gimplifying address
8906 of the ARRAY_REF into a pointer and creating MEM_REF based
8907 clobber would create worse code than we get with the clobber
8908 dropped. */
8909 if (gimple_clobber_p (stmt))
8911 gsi_replace (gsi_p, gimple_build_nop (), true);
8912 break;
8914 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8916 break;
8920 static void
8921 lower_omp (gimple_seq *body, omp_context *ctx)
8923 location_t saved_location = input_location;
8924 gimple_stmt_iterator gsi;
8925 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8926 lower_omp_1 (&gsi, ctx);
8927 /* During gimplification, we haven't folded statments inside offloading
8928 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8929 if (target_nesting_level || taskreg_nesting_level)
8930 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8931 fold_stmt (&gsi);
8932 input_location = saved_location;
8935 /* Main entry point. */
8937 static unsigned int
8938 execute_lower_omp (void)
8940 gimple_seq body;
8941 int i;
8942 omp_context *ctx;
8944 /* This pass always runs, to provide PROP_gimple_lomp.
8945 But often, there is nothing to do. */
8946 if (flag_openacc == 0 && flag_openmp == 0
8947 && flag_openmp_simd == 0)
8948 return 0;
8950 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
8951 delete_omp_context);
8953 body = gimple_body (current_function_decl);
8955 if (hsa_gen_requested_p ())
8956 omp_grid_gridify_all_targets (&body);
8958 scan_omp (&body, NULL);
8959 gcc_assert (taskreg_nesting_level == 0);
8960 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
8961 finish_taskreg_scan (ctx);
8962 taskreg_contexts.release ();
8964 if (all_contexts->root)
8966 if (task_shared_vars)
8967 push_gimplify_context ();
8968 lower_omp (&body, NULL);
8969 if (task_shared_vars)
8970 pop_gimplify_context (NULL);
8973 if (all_contexts)
8975 splay_tree_delete (all_contexts);
8976 all_contexts = NULL;
8978 BITMAP_FREE (task_shared_vars);
8979 return 0;
8982 namespace {
8984 const pass_data pass_data_lower_omp =
8986 GIMPLE_PASS, /* type */
8987 "omplower", /* name */
8988 OPTGROUP_OMP, /* optinfo_flags */
8989 TV_NONE, /* tv_id */
8990 PROP_gimple_any, /* properties_required */
8991 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
8992 0, /* properties_destroyed */
8993 0, /* todo_flags_start */
8994 0, /* todo_flags_finish */
8997 class pass_lower_omp : public gimple_opt_pass
8999 public:
9000 pass_lower_omp (gcc::context *ctxt)
9001 : gimple_opt_pass (pass_data_lower_omp, ctxt)
9004 /* opt_pass methods: */
9005 virtual unsigned int execute (function *) { return execute_lower_omp (); }
9007 }; // class pass_lower_omp
9009 } // anon namespace
9011 gimple_opt_pass *
9012 make_pass_lower_omp (gcc::context *ctxt)
9014 return new pass_lower_omp (ctxt);
9017 /* The following is a utility to diagnose structured block violations.
9018 It is not part of the "omplower" pass, as that's invoked too late. It
9019 should be invoked by the respective front ends after gimplification. */
9021 static splay_tree all_labels;
9023 /* Check for mismatched contexts and generate an error if needed. Return
9024 true if an error is detected. */
9026 static bool
9027 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9028 gimple *branch_ctx, gimple *label_ctx)
9030 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9031 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9033 if (label_ctx == branch_ctx)
9034 return false;
9036 const char* kind = NULL;
9038 if (flag_openacc)
9040 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9041 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9043 gcc_checking_assert (kind == NULL);
9044 kind = "OpenACC";
9047 if (kind == NULL)
9049 gcc_checking_assert (flag_openmp || flag_openmp_simd);
9050 kind = "OpenMP";
9053 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9054 so we could traverse it and issue a correct "exit" or "enter" error
9055 message upon a structured block violation.
9057 We built the context by building a list with tree_cons'ing, but there is
9058 no easy counterpart in gimple tuples. It seems like far too much work
9059 for issuing exit/enter error messages. If someone really misses the
9060 distinct error message... patches welcome. */
9062 #if 0
9063 /* Try to avoid confusing the user by producing and error message
9064 with correct "exit" or "enter" verbiage. We prefer "exit"
9065 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9066 if (branch_ctx == NULL)
9067 exit_p = false;
9068 else
9070 while (label_ctx)
9072 if (TREE_VALUE (label_ctx) == branch_ctx)
9074 exit_p = false;
9075 break;
9077 label_ctx = TREE_CHAIN (label_ctx);
9081 if (exit_p)
9082 error ("invalid exit from %s structured block", kind);
9083 else
9084 error ("invalid entry to %s structured block", kind);
9085 #endif
9087 /* If it's obvious we have an invalid entry, be specific about the error. */
9088 if (branch_ctx == NULL)
9089 error ("invalid entry to %s structured block", kind);
9090 else
9092 /* Otherwise, be vague and lazy, but efficient. */
9093 error ("invalid branch to/from %s structured block", kind);
9096 gsi_replace (gsi_p, gimple_build_nop (), false);
9097 return true;
9100 /* Pass 1: Create a minimal tree of structured blocks, and record
9101 where each label is found. */
9103 static tree
9104 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9105 struct walk_stmt_info *wi)
9107 gimple *context = (gimple *) wi->info;
9108 gimple *inner_context;
9109 gimple *stmt = gsi_stmt (*gsi_p);
9111 *handled_ops_p = true;
9113 switch (gimple_code (stmt))
9115 WALK_SUBSTMTS;
9117 case GIMPLE_OMP_PARALLEL:
9118 case GIMPLE_OMP_TASK:
9119 case GIMPLE_OMP_SECTIONS:
9120 case GIMPLE_OMP_SINGLE:
9121 case GIMPLE_OMP_SECTION:
9122 case GIMPLE_OMP_MASTER:
9123 case GIMPLE_OMP_ORDERED:
9124 case GIMPLE_OMP_CRITICAL:
9125 case GIMPLE_OMP_TARGET:
9126 case GIMPLE_OMP_TEAMS:
9127 case GIMPLE_OMP_TASKGROUP:
9128 /* The minimal context here is just the current OMP construct. */
9129 inner_context = stmt;
9130 wi->info = inner_context;
9131 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9132 wi->info = context;
9133 break;
9135 case GIMPLE_OMP_FOR:
9136 inner_context = stmt;
9137 wi->info = inner_context;
9138 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9139 walk them. */
9140 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9141 diagnose_sb_1, NULL, wi);
9142 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9143 wi->info = context;
9144 break;
9146 case GIMPLE_LABEL:
9147 splay_tree_insert (all_labels,
9148 (splay_tree_key) gimple_label_label (
9149 as_a <glabel *> (stmt)),
9150 (splay_tree_value) context);
9151 break;
9153 default:
9154 break;
9157 return NULL_TREE;
9160 /* Pass 2: Check each branch and see if its context differs from that of
9161 the destination label's context. */
9163 static tree
9164 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9165 struct walk_stmt_info *wi)
9167 gimple *context = (gimple *) wi->info;
9168 splay_tree_node n;
9169 gimple *stmt = gsi_stmt (*gsi_p);
9171 *handled_ops_p = true;
9173 switch (gimple_code (stmt))
9175 WALK_SUBSTMTS;
9177 case GIMPLE_OMP_PARALLEL:
9178 case GIMPLE_OMP_TASK:
9179 case GIMPLE_OMP_SECTIONS:
9180 case GIMPLE_OMP_SINGLE:
9181 case GIMPLE_OMP_SECTION:
9182 case GIMPLE_OMP_MASTER:
9183 case GIMPLE_OMP_ORDERED:
9184 case GIMPLE_OMP_CRITICAL:
9185 case GIMPLE_OMP_TARGET:
9186 case GIMPLE_OMP_TEAMS:
9187 case GIMPLE_OMP_TASKGROUP:
9188 wi->info = stmt;
9189 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9190 wi->info = context;
9191 break;
9193 case GIMPLE_OMP_FOR:
9194 wi->info = stmt;
9195 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9196 walk them. */
9197 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9198 diagnose_sb_2, NULL, wi);
9199 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9200 wi->info = context;
9201 break;
9203 case GIMPLE_COND:
9205 gcond *cond_stmt = as_a <gcond *> (stmt);
9206 tree lab = gimple_cond_true_label (cond_stmt);
9207 if (lab)
9209 n = splay_tree_lookup (all_labels,
9210 (splay_tree_key) lab);
9211 diagnose_sb_0 (gsi_p, context,
9212 n ? (gimple *) n->value : NULL);
9214 lab = gimple_cond_false_label (cond_stmt);
9215 if (lab)
9217 n = splay_tree_lookup (all_labels,
9218 (splay_tree_key) lab);
9219 diagnose_sb_0 (gsi_p, context,
9220 n ? (gimple *) n->value : NULL);
9223 break;
9225 case GIMPLE_GOTO:
9227 tree lab = gimple_goto_dest (stmt);
9228 if (TREE_CODE (lab) != LABEL_DECL)
9229 break;
9231 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9232 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9234 break;
9236 case GIMPLE_SWITCH:
9238 gswitch *switch_stmt = as_a <gswitch *> (stmt);
9239 unsigned int i;
9240 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9242 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9243 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9244 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9245 break;
9248 break;
9250 case GIMPLE_RETURN:
9251 diagnose_sb_0 (gsi_p, context, NULL);
9252 break;
9254 default:
9255 break;
9258 return NULL_TREE;
9261 static unsigned int
9262 diagnose_omp_structured_block_errors (void)
9264 struct walk_stmt_info wi;
9265 gimple_seq body = gimple_body (current_function_decl);
9267 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9269 memset (&wi, 0, sizeof (wi));
9270 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9272 memset (&wi, 0, sizeof (wi));
9273 wi.want_locations = true;
9274 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9276 gimple_set_body (current_function_decl, body);
9278 splay_tree_delete (all_labels);
9279 all_labels = NULL;
9281 return 0;
9284 namespace {
9286 const pass_data pass_data_diagnose_omp_blocks =
9288 GIMPLE_PASS, /* type */
9289 "*diagnose_omp_blocks", /* name */
9290 OPTGROUP_OMP, /* optinfo_flags */
9291 TV_NONE, /* tv_id */
9292 PROP_gimple_any, /* properties_required */
9293 0, /* properties_provided */
9294 0, /* properties_destroyed */
9295 0, /* todo_flags_start */
9296 0, /* todo_flags_finish */
9299 class pass_diagnose_omp_blocks : public gimple_opt_pass
9301 public:
9302 pass_diagnose_omp_blocks (gcc::context *ctxt)
9303 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9306 /* opt_pass methods: */
9307 virtual bool gate (function *)
9309 return flag_openacc || flag_openmp || flag_openmp_simd;
9311 virtual unsigned int execute (function *)
9313 return diagnose_omp_structured_block_errors ();
9316 }; // class pass_diagnose_omp_blocks
9318 } // anon namespace
9320 gimple_opt_pass *
9321 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9323 return new pass_diagnose_omp_blocks (ctxt);
9327 #include "gt-omp-low.h"