PR target/82145
[official-gcc.git] / gcc / omp-low.c
blob8ed8f7c90f2dd5c2262e3a234710d03aad401a6f
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2017 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* Nesting depth of this context. Used to beautify error messages re
118 invalid gotos. The outermost ctx is depth 1, with depth 0 being
119 reserved for the main body of the function. */
120 int depth;
122 /* True if this parallel directive is nested within another. */
123 bool is_nested;
125 /* True if this construct can be cancelled. */
126 bool cancellable;
129 static splay_tree all_contexts;
130 static int taskreg_nesting_level;
131 static int target_nesting_level;
132 static bitmap task_shared_vars;
133 static vec<omp_context *> taskreg_contexts;
135 static void scan_omp (gimple_seq *, omp_context *);
136 static tree scan_omp_1_op (tree *, int *, void *);
138 #define WALK_SUBSTMTS \
139 case GIMPLE_BIND: \
140 case GIMPLE_TRY: \
141 case GIMPLE_CATCH: \
142 case GIMPLE_EH_FILTER: \
143 case GIMPLE_TRANSACTION: \
144 /* The sub-statements for these should be walked. */ \
145 *handled_ops_p = false; \
146 break;
148 /* Return true if CTX corresponds to an oacc parallel region. */
150 static bool
151 is_oacc_parallel (omp_context *ctx)
153 enum gimple_code outer_type = gimple_code (ctx->stmt);
154 return ((outer_type == GIMPLE_OMP_TARGET)
155 && (gimple_omp_target_kind (ctx->stmt)
156 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
159 /* Return true if CTX corresponds to an oacc kernels region. */
161 static bool
162 is_oacc_kernels (omp_context *ctx)
164 enum gimple_code outer_type = gimple_code (ctx->stmt);
165 return ((outer_type == GIMPLE_OMP_TARGET)
166 && (gimple_omp_target_kind (ctx->stmt)
167 == GF_OMP_TARGET_KIND_OACC_KERNELS));
170 /* If DECL is the artificial dummy VAR_DECL created for non-static
171 data member privatization, return the underlying "this" parameter,
172 otherwise return NULL. */
174 tree
175 omp_member_access_dummy_var (tree decl)
177 if (!VAR_P (decl)
178 || !DECL_ARTIFICIAL (decl)
179 || !DECL_IGNORED_P (decl)
180 || !DECL_HAS_VALUE_EXPR_P (decl)
181 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
182 return NULL_TREE;
184 tree v = DECL_VALUE_EXPR (decl);
185 if (TREE_CODE (v) != COMPONENT_REF)
186 return NULL_TREE;
188 while (1)
189 switch (TREE_CODE (v))
191 case COMPONENT_REF:
192 case MEM_REF:
193 case INDIRECT_REF:
194 CASE_CONVERT:
195 case POINTER_PLUS_EXPR:
196 v = TREE_OPERAND (v, 0);
197 continue;
198 case PARM_DECL:
199 if (DECL_CONTEXT (v) == current_function_decl
200 && DECL_ARTIFICIAL (v)
201 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
202 return v;
203 return NULL_TREE;
204 default:
205 return NULL_TREE;
209 /* Helper for unshare_and_remap, called through walk_tree. */
211 static tree
212 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
214 tree *pair = (tree *) data;
215 if (*tp == pair[0])
217 *tp = unshare_expr (pair[1]);
218 *walk_subtrees = 0;
220 else if (IS_TYPE_OR_DECL_P (*tp))
221 *walk_subtrees = 0;
222 return NULL_TREE;
225 /* Return unshare_expr (X) with all occurrences of FROM
226 replaced with TO. */
228 static tree
229 unshare_and_remap (tree x, tree from, tree to)
231 tree pair[2] = { from, to };
232 x = unshare_expr (x);
233 walk_tree (&x, unshare_and_remap_1, pair, NULL);
234 return x;
237 /* Convenience function for calling scan_omp_1_op on tree operands. */
239 static inline tree
240 scan_omp_op (tree *tp, omp_context *ctx)
242 struct walk_stmt_info wi;
244 memset (&wi, 0, sizeof (wi));
245 wi.info = ctx;
246 wi.want_locations = true;
248 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
251 static void lower_omp (gimple_seq *, omp_context *);
252 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
253 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
255 /* Return true if CTX is for an omp parallel. */
257 static inline bool
258 is_parallel_ctx (omp_context *ctx)
260 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
264 /* Return true if CTX is for an omp task. */
266 static inline bool
267 is_task_ctx (omp_context *ctx)
269 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
273 /* Return true if CTX is for an omp taskloop. */
275 static inline bool
276 is_taskloop_ctx (omp_context *ctx)
278 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
279 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
283 /* Return true if CTX is for an omp parallel or omp task. */
285 static inline bool
286 is_taskreg_ctx (omp_context *ctx)
288 return is_parallel_ctx (ctx) || is_task_ctx (ctx);
291 /* Return true if EXPR is variable sized. */
293 static inline bool
294 is_variable_sized (const_tree expr)
296 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
299 /* Lookup variables. The "maybe" form
300 allows for the variable form to not have been entered, otherwise we
301 assert that the variable must have been entered. */
303 static inline tree
304 lookup_decl (tree var, omp_context *ctx)
306 tree *n = ctx->cb.decl_map->get (var);
307 return *n;
310 static inline tree
311 maybe_lookup_decl (const_tree var, omp_context *ctx)
313 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
314 return n ? *n : NULL_TREE;
317 static inline tree
318 lookup_field (tree var, omp_context *ctx)
320 splay_tree_node n;
321 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
322 return (tree) n->value;
325 static inline tree
326 lookup_sfield (splay_tree_key key, omp_context *ctx)
328 splay_tree_node n;
329 n = splay_tree_lookup (ctx->sfield_map
330 ? ctx->sfield_map : ctx->field_map, key);
331 return (tree) n->value;
334 static inline tree
335 lookup_sfield (tree var, omp_context *ctx)
337 return lookup_sfield ((splay_tree_key) var, ctx);
340 static inline tree
341 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
343 splay_tree_node n;
344 n = splay_tree_lookup (ctx->field_map, key);
345 return n ? (tree) n->value : NULL_TREE;
348 static inline tree
349 maybe_lookup_field (tree var, omp_context *ctx)
351 return maybe_lookup_field ((splay_tree_key) var, ctx);
354 /* Return true if DECL should be copied by pointer. SHARED_CTX is
355 the parallel context if DECL is to be shared. */
357 static bool
358 use_pointer_for_field (tree decl, omp_context *shared_ctx)
360 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
361 || TYPE_ATOMIC (TREE_TYPE (decl)))
362 return true;
364 /* We can only use copy-in/copy-out semantics for shared variables
365 when we know the value is not accessible from an outer scope. */
366 if (shared_ctx)
368 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
370 /* ??? Trivially accessible from anywhere. But why would we even
371 be passing an address in this case? Should we simply assert
372 this to be false, or should we have a cleanup pass that removes
373 these from the list of mappings? */
374 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
375 return true;
377 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
378 without analyzing the expression whether or not its location
379 is accessible to anyone else. In the case of nested parallel
380 regions it certainly may be. */
381 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
382 return true;
384 /* Do not use copy-in/copy-out for variables that have their
385 address taken. */
386 if (TREE_ADDRESSABLE (decl))
387 return true;
389 /* lower_send_shared_vars only uses copy-in, but not copy-out
390 for these. */
391 if (TREE_READONLY (decl)
392 || ((TREE_CODE (decl) == RESULT_DECL
393 || TREE_CODE (decl) == PARM_DECL)
394 && DECL_BY_REFERENCE (decl)))
395 return false;
397 /* Disallow copy-in/out in nested parallel if
398 decl is shared in outer parallel, otherwise
399 each thread could store the shared variable
400 in its own copy-in location, making the
401 variable no longer really shared. */
402 if (shared_ctx->is_nested)
404 omp_context *up;
406 for (up = shared_ctx->outer; up; up = up->outer)
407 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
408 break;
410 if (up)
412 tree c;
414 for (c = gimple_omp_taskreg_clauses (up->stmt);
415 c; c = OMP_CLAUSE_CHAIN (c))
416 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
417 && OMP_CLAUSE_DECL (c) == decl)
418 break;
420 if (c)
421 goto maybe_mark_addressable_and_ret;
425 /* For tasks avoid using copy-in/out. As tasks can be
426 deferred or executed in different thread, when GOMP_task
427 returns, the task hasn't necessarily terminated. */
428 if (is_task_ctx (shared_ctx))
430 tree outer;
431 maybe_mark_addressable_and_ret:
432 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
433 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
435 /* Taking address of OUTER in lower_send_shared_vars
436 might need regimplification of everything that uses the
437 variable. */
438 if (!task_shared_vars)
439 task_shared_vars = BITMAP_ALLOC (NULL);
440 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
441 TREE_ADDRESSABLE (outer) = 1;
443 return true;
447 return false;
450 /* Construct a new automatic decl similar to VAR. */
452 static tree
453 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
455 tree copy = copy_var_decl (var, name, type);
457 DECL_CONTEXT (copy) = current_function_decl;
458 DECL_CHAIN (copy) = ctx->block_vars;
459 /* If VAR is listed in task_shared_vars, it means it wasn't
460 originally addressable and is just because task needs to take
461 it's address. But we don't need to take address of privatizations
462 from that var. */
463 if (TREE_ADDRESSABLE (var)
464 && task_shared_vars
465 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
466 TREE_ADDRESSABLE (copy) = 0;
467 ctx->block_vars = copy;
469 return copy;
472 static tree
473 omp_copy_decl_1 (tree var, omp_context *ctx)
475 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
478 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
479 as appropriate. */
480 static tree
481 omp_build_component_ref (tree obj, tree field)
483 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
484 if (TREE_THIS_VOLATILE (field))
485 TREE_THIS_VOLATILE (ret) |= 1;
486 if (TREE_READONLY (field))
487 TREE_READONLY (ret) |= 1;
488 return ret;
491 /* Build tree nodes to access the field for VAR on the receiver side. */
493 static tree
494 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
496 tree x, field = lookup_field (var, ctx);
498 /* If the receiver record type was remapped in the child function,
499 remap the field into the new record type. */
500 x = maybe_lookup_field (field, ctx);
501 if (x != NULL)
502 field = x;
504 x = build_simple_mem_ref (ctx->receiver_decl);
505 TREE_THIS_NOTRAP (x) = 1;
506 x = omp_build_component_ref (x, field);
507 if (by_ref)
509 x = build_simple_mem_ref (x);
510 TREE_THIS_NOTRAP (x) = 1;
513 return x;
516 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
517 of a parallel, this is a component reference; for workshare constructs
518 this is some variable. */
520 static tree
521 build_outer_var_ref (tree var, omp_context *ctx,
522 enum omp_clause_code code = OMP_CLAUSE_ERROR)
524 tree x;
526 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
527 x = var;
528 else if (is_variable_sized (var))
530 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
531 x = build_outer_var_ref (x, ctx, code);
532 x = build_simple_mem_ref (x);
534 else if (is_taskreg_ctx (ctx))
536 bool by_ref = use_pointer_for_field (var, NULL);
537 x = build_receiver_ref (var, by_ref, ctx);
539 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
540 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
541 || (code == OMP_CLAUSE_PRIVATE
542 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
543 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
544 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
546 /* #pragma omp simd isn't a worksharing construct, and can reference
547 even private vars in its linear etc. clauses.
548 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
549 to private vars in all worksharing constructs. */
550 x = NULL_TREE;
551 if (ctx->outer && is_taskreg_ctx (ctx))
552 x = lookup_decl (var, ctx->outer);
553 else if (ctx->outer)
554 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
555 if (x == NULL_TREE)
556 x = var;
558 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
560 gcc_assert (ctx->outer);
561 splay_tree_node n
562 = splay_tree_lookup (ctx->outer->field_map,
563 (splay_tree_key) &DECL_UID (var));
564 if (n == NULL)
566 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
567 x = var;
568 else
569 x = lookup_decl (var, ctx->outer);
571 else
573 tree field = (tree) n->value;
574 /* If the receiver record type was remapped in the child function,
575 remap the field into the new record type. */
576 x = maybe_lookup_field (field, ctx->outer);
577 if (x != NULL)
578 field = x;
580 x = build_simple_mem_ref (ctx->outer->receiver_decl);
581 x = omp_build_component_ref (x, field);
582 if (use_pointer_for_field (var, ctx->outer))
583 x = build_simple_mem_ref (x);
586 else if (ctx->outer)
588 omp_context *outer = ctx->outer;
589 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
591 outer = outer->outer;
592 gcc_assert (outer
593 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
595 x = lookup_decl (var, outer);
597 else if (omp_is_reference (var))
598 /* This can happen with orphaned constructs. If var is reference, it is
599 possible it is shared and as such valid. */
600 x = var;
601 else if (omp_member_access_dummy_var (var))
602 x = var;
603 else
604 gcc_unreachable ();
606 if (x == var)
608 tree t = omp_member_access_dummy_var (var);
609 if (t)
611 x = DECL_VALUE_EXPR (var);
612 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
613 if (o != t)
614 x = unshare_and_remap (x, t, o);
615 else
616 x = unshare_expr (x);
620 if (omp_is_reference (var))
621 x = build_simple_mem_ref (x);
623 return x;
626 /* Build tree nodes to access the field for VAR on the sender side. */
628 static tree
629 build_sender_ref (splay_tree_key key, omp_context *ctx)
631 tree field = lookup_sfield (key, ctx);
632 return omp_build_component_ref (ctx->sender_decl, field);
635 static tree
636 build_sender_ref (tree var, omp_context *ctx)
638 return build_sender_ref ((splay_tree_key) var, ctx);
641 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
642 BASE_POINTERS_RESTRICT, declare the field with restrict. */
644 static void
645 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx,
646 bool base_pointers_restrict = false)
648 tree field, type, sfield = NULL_TREE;
649 splay_tree_key key = (splay_tree_key) var;
651 if ((mask & 8) != 0)
653 key = (splay_tree_key) &DECL_UID (var);
654 gcc_checking_assert (key != (splay_tree_key) var);
656 gcc_assert ((mask & 1) == 0
657 || !splay_tree_lookup (ctx->field_map, key));
658 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
659 || !splay_tree_lookup (ctx->sfield_map, key));
660 gcc_assert ((mask & 3) == 3
661 || !is_gimple_omp_oacc (ctx->stmt));
663 type = TREE_TYPE (var);
664 /* Prevent redeclaring the var in the split-off function with a restrict
665 pointer type. Note that we only clear type itself, restrict qualifiers in
666 the pointed-to type will be ignored by points-to analysis. */
667 if (POINTER_TYPE_P (type)
668 && TYPE_RESTRICT (type))
669 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
671 if (mask & 4)
673 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
674 type = build_pointer_type (build_pointer_type (type));
676 else if (by_ref)
678 type = build_pointer_type (type);
679 if (base_pointers_restrict)
680 type = build_qualified_type (type, TYPE_QUAL_RESTRICT);
682 else if ((mask & 3) == 1 && omp_is_reference (var))
683 type = TREE_TYPE (type);
685 field = build_decl (DECL_SOURCE_LOCATION (var),
686 FIELD_DECL, DECL_NAME (var), type);
688 /* Remember what variable this field was created for. This does have a
689 side effect of making dwarf2out ignore this member, so for helpful
690 debugging we clear it later in delete_omp_context. */
691 DECL_ABSTRACT_ORIGIN (field) = var;
692 if (type == TREE_TYPE (var))
694 SET_DECL_ALIGN (field, DECL_ALIGN (var));
695 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
696 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
698 else
699 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
701 if ((mask & 3) == 3)
703 insert_field_into_struct (ctx->record_type, field);
704 if (ctx->srecord_type)
706 sfield = build_decl (DECL_SOURCE_LOCATION (var),
707 FIELD_DECL, DECL_NAME (var), type);
708 DECL_ABSTRACT_ORIGIN (sfield) = var;
709 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
710 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
711 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
712 insert_field_into_struct (ctx->srecord_type, sfield);
715 else
717 if (ctx->srecord_type == NULL_TREE)
719 tree t;
721 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
722 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
723 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
725 sfield = build_decl (DECL_SOURCE_LOCATION (t),
726 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
727 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
728 insert_field_into_struct (ctx->srecord_type, sfield);
729 splay_tree_insert (ctx->sfield_map,
730 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
731 (splay_tree_value) sfield);
734 sfield = field;
735 insert_field_into_struct ((mask & 1) ? ctx->record_type
736 : ctx->srecord_type, field);
739 if (mask & 1)
740 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
741 if ((mask & 2) && ctx->sfield_map)
742 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
745 static tree
746 install_var_local (tree var, omp_context *ctx)
748 tree new_var = omp_copy_decl_1 (var, ctx);
749 insert_decl_map (&ctx->cb, var, new_var);
750 return new_var;
753 /* Adjust the replacement for DECL in CTX for the new context. This means
754 copying the DECL_VALUE_EXPR, and fixing up the type. */
756 static void
757 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
759 tree new_decl, size;
761 new_decl = lookup_decl (decl, ctx);
763 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
765 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
766 && DECL_HAS_VALUE_EXPR_P (decl))
768 tree ve = DECL_VALUE_EXPR (decl);
769 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
770 SET_DECL_VALUE_EXPR (new_decl, ve);
771 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
774 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
776 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
777 if (size == error_mark_node)
778 size = TYPE_SIZE (TREE_TYPE (new_decl));
779 DECL_SIZE (new_decl) = size;
781 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
782 if (size == error_mark_node)
783 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
784 DECL_SIZE_UNIT (new_decl) = size;
788 /* The callback for remap_decl. Search all containing contexts for a
789 mapping of the variable; this avoids having to duplicate the splay
790 tree ahead of time. We know a mapping doesn't already exist in the
791 given context. Create new mappings to implement default semantics. */
793 static tree
794 omp_copy_decl (tree var, copy_body_data *cb)
796 omp_context *ctx = (omp_context *) cb;
797 tree new_var;
799 if (TREE_CODE (var) == LABEL_DECL)
801 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
802 return var;
803 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
804 DECL_CONTEXT (new_var) = current_function_decl;
805 insert_decl_map (&ctx->cb, var, new_var);
806 return new_var;
809 while (!is_taskreg_ctx (ctx))
811 ctx = ctx->outer;
812 if (ctx == NULL)
813 return var;
814 new_var = maybe_lookup_decl (var, ctx);
815 if (new_var)
816 return new_var;
819 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
820 return var;
822 return error_mark_node;
825 /* Create a new context, with OUTER_CTX being the surrounding context. */
827 static omp_context *
828 new_omp_context (gimple *stmt, omp_context *outer_ctx)
830 omp_context *ctx = XCNEW (omp_context);
832 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
833 (splay_tree_value) ctx);
834 ctx->stmt = stmt;
836 if (outer_ctx)
838 ctx->outer = outer_ctx;
839 ctx->cb = outer_ctx->cb;
840 ctx->cb.block = NULL;
841 ctx->depth = outer_ctx->depth + 1;
843 else
845 ctx->cb.src_fn = current_function_decl;
846 ctx->cb.dst_fn = current_function_decl;
847 ctx->cb.src_node = cgraph_node::get (current_function_decl);
848 gcc_checking_assert (ctx->cb.src_node);
849 ctx->cb.dst_node = ctx->cb.src_node;
850 ctx->cb.src_cfun = cfun;
851 ctx->cb.copy_decl = omp_copy_decl;
852 ctx->cb.eh_lp_nr = 0;
853 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
854 ctx->depth = 1;
857 ctx->cb.decl_map = new hash_map<tree, tree>;
859 return ctx;
862 static gimple_seq maybe_catch_exception (gimple_seq);
864 /* Finalize task copyfn. */
866 static void
867 finalize_task_copyfn (gomp_task *task_stmt)
869 struct function *child_cfun;
870 tree child_fn;
871 gimple_seq seq = NULL, new_seq;
872 gbind *bind;
874 child_fn = gimple_omp_task_copy_fn (task_stmt);
875 if (child_fn == NULL_TREE)
876 return;
878 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
879 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
881 push_cfun (child_cfun);
882 bind = gimplify_body (child_fn, false);
883 gimple_seq_add_stmt (&seq, bind);
884 new_seq = maybe_catch_exception (seq);
885 if (new_seq != seq)
887 bind = gimple_build_bind (NULL, new_seq, NULL);
888 seq = NULL;
889 gimple_seq_add_stmt (&seq, bind);
891 gimple_set_body (child_fn, seq);
892 pop_cfun ();
894 /* Inform the callgraph about the new function. */
895 cgraph_node *node = cgraph_node::get_create (child_fn);
896 node->parallelized_function = 1;
897 cgraph_node::add_new_function (child_fn, false);
900 /* Destroy a omp_context data structures. Called through the splay tree
901 value delete callback. */
903 static void
904 delete_omp_context (splay_tree_value value)
906 omp_context *ctx = (omp_context *) value;
908 delete ctx->cb.decl_map;
910 if (ctx->field_map)
911 splay_tree_delete (ctx->field_map);
912 if (ctx->sfield_map)
913 splay_tree_delete (ctx->sfield_map);
915 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
916 it produces corrupt debug information. */
917 if (ctx->record_type)
919 tree t;
920 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
921 DECL_ABSTRACT_ORIGIN (t) = NULL;
923 if (ctx->srecord_type)
925 tree t;
926 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
927 DECL_ABSTRACT_ORIGIN (t) = NULL;
930 if (is_task_ctx (ctx))
931 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
933 XDELETE (ctx);
936 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
937 context. */
939 static void
940 fixup_child_record_type (omp_context *ctx)
942 tree f, type = ctx->record_type;
944 if (!ctx->receiver_decl)
945 return;
946 /* ??? It isn't sufficient to just call remap_type here, because
947 variably_modified_type_p doesn't work the way we expect for
948 record types. Testing each field for whether it needs remapping
949 and creating a new record by hand works, however. */
950 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
951 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
952 break;
953 if (f)
955 tree name, new_fields = NULL;
957 type = lang_hooks.types.make_type (RECORD_TYPE);
958 name = DECL_NAME (TYPE_NAME (ctx->record_type));
959 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
960 TYPE_DECL, name, type);
961 TYPE_NAME (type) = name;
963 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
965 tree new_f = copy_node (f);
966 DECL_CONTEXT (new_f) = type;
967 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
968 DECL_CHAIN (new_f) = new_fields;
969 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
970 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
971 &ctx->cb, NULL);
972 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
973 &ctx->cb, NULL);
974 new_fields = new_f;
976 /* Arrange to be able to look up the receiver field
977 given the sender field. */
978 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
979 (splay_tree_value) new_f);
981 TYPE_FIELDS (type) = nreverse (new_fields);
982 layout_type (type);
985 /* In a target region we never modify any of the pointers in *.omp_data_i,
986 so attempt to help the optimizers. */
987 if (is_gimple_omp_offloaded (ctx->stmt))
988 type = build_qualified_type (type, TYPE_QUAL_CONST);
990 TREE_TYPE (ctx->receiver_decl)
991 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
994 /* Instantiate decls as necessary in CTX to satisfy the data sharing
995 specified by CLAUSES. If BASE_POINTERS_RESTRICT, install var field with
996 restrict. */
998 static void
999 scan_sharing_clauses (tree clauses, omp_context *ctx,
1000 bool base_pointers_restrict = false)
1002 tree c, decl;
1003 bool scan_array_reductions = false;
1005 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1007 bool by_ref;
1009 switch (OMP_CLAUSE_CODE (c))
1011 case OMP_CLAUSE_PRIVATE:
1012 decl = OMP_CLAUSE_DECL (c);
1013 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1014 goto do_private;
1015 else if (!is_variable_sized (decl))
1016 install_var_local (decl, ctx);
1017 break;
1019 case OMP_CLAUSE_SHARED:
1020 decl = OMP_CLAUSE_DECL (c);
1021 /* Ignore shared directives in teams construct. */
1022 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1024 /* Global variables don't need to be copied,
1025 the receiver side will use them directly. */
1026 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1027 if (is_global_var (odecl))
1028 break;
1029 insert_decl_map (&ctx->cb, decl, odecl);
1030 break;
1032 gcc_assert (is_taskreg_ctx (ctx));
1033 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1034 || !is_variable_sized (decl));
1035 /* Global variables don't need to be copied,
1036 the receiver side will use them directly. */
1037 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1038 break;
1039 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1041 use_pointer_for_field (decl, ctx);
1042 break;
1044 by_ref = use_pointer_for_field (decl, NULL);
1045 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1046 || TREE_ADDRESSABLE (decl)
1047 || by_ref
1048 || omp_is_reference (decl))
1050 by_ref = use_pointer_for_field (decl, ctx);
1051 install_var_field (decl, by_ref, 3, ctx);
1052 install_var_local (decl, ctx);
1053 break;
1055 /* We don't need to copy const scalar vars back. */
1056 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1057 goto do_private;
1059 case OMP_CLAUSE_REDUCTION:
1060 decl = OMP_CLAUSE_DECL (c);
1061 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1062 && TREE_CODE (decl) == MEM_REF)
1064 tree t = TREE_OPERAND (decl, 0);
1065 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1066 t = TREE_OPERAND (t, 0);
1067 if (TREE_CODE (t) == INDIRECT_REF
1068 || TREE_CODE (t) == ADDR_EXPR)
1069 t = TREE_OPERAND (t, 0);
1070 install_var_local (t, ctx);
1071 if (is_taskreg_ctx (ctx)
1072 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1073 && !is_variable_sized (t))
1075 by_ref = use_pointer_for_field (t, ctx);
1076 install_var_field (t, by_ref, 3, ctx);
1078 break;
1080 goto do_private;
1082 case OMP_CLAUSE_LASTPRIVATE:
1083 /* Let the corresponding firstprivate clause create
1084 the variable. */
1085 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1086 break;
1087 /* FALLTHRU */
1089 case OMP_CLAUSE_FIRSTPRIVATE:
1090 case OMP_CLAUSE_LINEAR:
1091 decl = OMP_CLAUSE_DECL (c);
1092 do_private:
1093 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1094 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1095 && is_gimple_omp_offloaded (ctx->stmt))
1097 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1098 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1099 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1100 install_var_field (decl, true, 3, ctx);
1101 else
1102 install_var_field (decl, false, 3, ctx);
1104 if (is_variable_sized (decl))
1106 if (is_task_ctx (ctx))
1107 install_var_field (decl, false, 1, ctx);
1108 break;
1110 else if (is_taskreg_ctx (ctx))
1112 bool global
1113 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1114 by_ref = use_pointer_for_field (decl, NULL);
1116 if (is_task_ctx (ctx)
1117 && (global || by_ref || omp_is_reference (decl)))
1119 install_var_field (decl, false, 1, ctx);
1120 if (!global)
1121 install_var_field (decl, by_ref, 2, ctx);
1123 else if (!global)
1124 install_var_field (decl, by_ref, 3, ctx);
1126 install_var_local (decl, ctx);
1127 break;
1129 case OMP_CLAUSE_USE_DEVICE_PTR:
1130 decl = OMP_CLAUSE_DECL (c);
1131 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1132 install_var_field (decl, true, 3, ctx);
1133 else
1134 install_var_field (decl, false, 3, ctx);
1135 if (DECL_SIZE (decl)
1136 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1138 tree decl2 = DECL_VALUE_EXPR (decl);
1139 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1140 decl2 = TREE_OPERAND (decl2, 0);
1141 gcc_assert (DECL_P (decl2));
1142 install_var_local (decl2, ctx);
1144 install_var_local (decl, ctx);
1145 break;
1147 case OMP_CLAUSE_IS_DEVICE_PTR:
1148 decl = OMP_CLAUSE_DECL (c);
1149 goto do_private;
1151 case OMP_CLAUSE__LOOPTEMP_:
1152 gcc_assert (is_taskreg_ctx (ctx));
1153 decl = OMP_CLAUSE_DECL (c);
1154 install_var_field (decl, false, 3, ctx);
1155 install_var_local (decl, ctx);
1156 break;
1158 case OMP_CLAUSE_COPYPRIVATE:
1159 case OMP_CLAUSE_COPYIN:
1160 decl = OMP_CLAUSE_DECL (c);
1161 by_ref = use_pointer_for_field (decl, NULL);
1162 install_var_field (decl, by_ref, 3, ctx);
1163 break;
1165 case OMP_CLAUSE_FINAL:
1166 case OMP_CLAUSE_IF:
1167 case OMP_CLAUSE_NUM_THREADS:
1168 case OMP_CLAUSE_NUM_TEAMS:
1169 case OMP_CLAUSE_THREAD_LIMIT:
1170 case OMP_CLAUSE_DEVICE:
1171 case OMP_CLAUSE_SCHEDULE:
1172 case OMP_CLAUSE_DIST_SCHEDULE:
1173 case OMP_CLAUSE_DEPEND:
1174 case OMP_CLAUSE_PRIORITY:
1175 case OMP_CLAUSE_GRAINSIZE:
1176 case OMP_CLAUSE_NUM_TASKS:
1177 case OMP_CLAUSE__CILK_FOR_COUNT_:
1178 case OMP_CLAUSE_NUM_GANGS:
1179 case OMP_CLAUSE_NUM_WORKERS:
1180 case OMP_CLAUSE_VECTOR_LENGTH:
1181 if (ctx->outer)
1182 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1183 break;
1185 case OMP_CLAUSE_TO:
1186 case OMP_CLAUSE_FROM:
1187 case OMP_CLAUSE_MAP:
1188 if (ctx->outer)
1189 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1190 decl = OMP_CLAUSE_DECL (c);
1191 /* Global variables with "omp declare target" attribute
1192 don't need to be copied, the receiver side will use them
1193 directly. However, global variables with "omp declare target link"
1194 attribute need to be copied. */
1195 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1196 && DECL_P (decl)
1197 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1198 && (OMP_CLAUSE_MAP_KIND (c)
1199 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1200 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1201 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1202 && varpool_node::get_create (decl)->offloadable
1203 && !lookup_attribute ("omp declare target link",
1204 DECL_ATTRIBUTES (decl)))
1205 break;
1206 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1207 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1209 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1210 not offloaded; there is nothing to map for those. */
1211 if (!is_gimple_omp_offloaded (ctx->stmt)
1212 && !POINTER_TYPE_P (TREE_TYPE (decl))
1213 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1214 break;
1216 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1217 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1218 || (OMP_CLAUSE_MAP_KIND (c)
1219 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1221 if (TREE_CODE (decl) == COMPONENT_REF
1222 || (TREE_CODE (decl) == INDIRECT_REF
1223 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1224 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1225 == REFERENCE_TYPE)))
1226 break;
1227 if (DECL_SIZE (decl)
1228 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1230 tree decl2 = DECL_VALUE_EXPR (decl);
1231 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1232 decl2 = TREE_OPERAND (decl2, 0);
1233 gcc_assert (DECL_P (decl2));
1234 install_var_local (decl2, ctx);
1236 install_var_local (decl, ctx);
1237 break;
1239 if (DECL_P (decl))
1241 if (DECL_SIZE (decl)
1242 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1244 tree decl2 = DECL_VALUE_EXPR (decl);
1245 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1246 decl2 = TREE_OPERAND (decl2, 0);
1247 gcc_assert (DECL_P (decl2));
1248 install_var_field (decl2, true, 3, ctx);
1249 install_var_local (decl2, ctx);
1250 install_var_local (decl, ctx);
1252 else
1254 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1255 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1256 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1257 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1258 install_var_field (decl, true, 7, ctx);
1259 else
1260 install_var_field (decl, true, 3, ctx,
1261 base_pointers_restrict);
1262 if (is_gimple_omp_offloaded (ctx->stmt)
1263 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1264 install_var_local (decl, ctx);
1267 else
1269 tree base = get_base_address (decl);
1270 tree nc = OMP_CLAUSE_CHAIN (c);
1271 if (DECL_P (base)
1272 && nc != NULL_TREE
1273 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1274 && OMP_CLAUSE_DECL (nc) == base
1275 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1276 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1278 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1279 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1281 else
1283 if (ctx->outer)
1285 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1286 decl = OMP_CLAUSE_DECL (c);
1288 gcc_assert (!splay_tree_lookup (ctx->field_map,
1289 (splay_tree_key) decl));
1290 tree field
1291 = build_decl (OMP_CLAUSE_LOCATION (c),
1292 FIELD_DECL, NULL_TREE, ptr_type_node);
1293 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1294 insert_field_into_struct (ctx->record_type, field);
1295 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1296 (splay_tree_value) field);
1299 break;
1301 case OMP_CLAUSE__GRIDDIM_:
1302 if (ctx->outer)
1304 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1305 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1307 break;
1309 case OMP_CLAUSE_NOWAIT:
1310 case OMP_CLAUSE_ORDERED:
1311 case OMP_CLAUSE_COLLAPSE:
1312 case OMP_CLAUSE_UNTIED:
1313 case OMP_CLAUSE_MERGEABLE:
1314 case OMP_CLAUSE_PROC_BIND:
1315 case OMP_CLAUSE_SAFELEN:
1316 case OMP_CLAUSE_SIMDLEN:
1317 case OMP_CLAUSE_THREADS:
1318 case OMP_CLAUSE_SIMD:
1319 case OMP_CLAUSE_NOGROUP:
1320 case OMP_CLAUSE_DEFAULTMAP:
1321 case OMP_CLAUSE_ASYNC:
1322 case OMP_CLAUSE_WAIT:
1323 case OMP_CLAUSE_GANG:
1324 case OMP_CLAUSE_WORKER:
1325 case OMP_CLAUSE_VECTOR:
1326 case OMP_CLAUSE_INDEPENDENT:
1327 case OMP_CLAUSE_AUTO:
1328 case OMP_CLAUSE_SEQ:
1329 case OMP_CLAUSE_TILE:
1330 case OMP_CLAUSE__SIMT_:
1331 case OMP_CLAUSE_DEFAULT:
1332 break;
1334 case OMP_CLAUSE_ALIGNED:
1335 decl = OMP_CLAUSE_DECL (c);
1336 if (is_global_var (decl)
1337 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1338 install_var_local (decl, ctx);
1339 break;
1341 case OMP_CLAUSE__CACHE_:
1342 default:
1343 gcc_unreachable ();
1347 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1349 switch (OMP_CLAUSE_CODE (c))
1351 case OMP_CLAUSE_LASTPRIVATE:
1352 /* Let the corresponding firstprivate clause create
1353 the variable. */
1354 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1355 scan_array_reductions = true;
1356 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1357 break;
1358 /* FALLTHRU */
1360 case OMP_CLAUSE_FIRSTPRIVATE:
1361 case OMP_CLAUSE_PRIVATE:
1362 case OMP_CLAUSE_LINEAR:
1363 case OMP_CLAUSE_IS_DEVICE_PTR:
1364 decl = OMP_CLAUSE_DECL (c);
1365 if (is_variable_sized (decl))
1367 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1368 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1369 && is_gimple_omp_offloaded (ctx->stmt))
1371 tree decl2 = DECL_VALUE_EXPR (decl);
1372 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1373 decl2 = TREE_OPERAND (decl2, 0);
1374 gcc_assert (DECL_P (decl2));
1375 install_var_local (decl2, ctx);
1376 fixup_remapped_decl (decl2, ctx, false);
1378 install_var_local (decl, ctx);
1380 fixup_remapped_decl (decl, ctx,
1381 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1382 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1383 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1384 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1385 scan_array_reductions = true;
1386 break;
1388 case OMP_CLAUSE_REDUCTION:
1389 decl = OMP_CLAUSE_DECL (c);
1390 if (TREE_CODE (decl) != MEM_REF)
1392 if (is_variable_sized (decl))
1393 install_var_local (decl, ctx);
1394 fixup_remapped_decl (decl, ctx, false);
1396 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1397 scan_array_reductions = true;
1398 break;
1400 case OMP_CLAUSE_SHARED:
1401 /* Ignore shared directives in teams construct. */
1402 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1403 break;
1404 decl = OMP_CLAUSE_DECL (c);
1405 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1406 break;
1407 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1409 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1410 ctx->outer)))
1411 break;
1412 bool by_ref = use_pointer_for_field (decl, ctx);
1413 install_var_field (decl, by_ref, 11, ctx);
1414 break;
1416 fixup_remapped_decl (decl, ctx, false);
1417 break;
1419 case OMP_CLAUSE_MAP:
1420 if (!is_gimple_omp_offloaded (ctx->stmt))
1421 break;
1422 decl = OMP_CLAUSE_DECL (c);
1423 if (DECL_P (decl)
1424 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1425 && (OMP_CLAUSE_MAP_KIND (c)
1426 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1427 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1428 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1429 && varpool_node::get_create (decl)->offloadable)
1430 break;
1431 if (DECL_P (decl))
1433 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1434 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1435 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1436 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1438 tree new_decl = lookup_decl (decl, ctx);
1439 TREE_TYPE (new_decl)
1440 = remap_type (TREE_TYPE (decl), &ctx->cb);
1442 else if (DECL_SIZE (decl)
1443 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1445 tree decl2 = DECL_VALUE_EXPR (decl);
1446 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1447 decl2 = TREE_OPERAND (decl2, 0);
1448 gcc_assert (DECL_P (decl2));
1449 fixup_remapped_decl (decl2, ctx, false);
1450 fixup_remapped_decl (decl, ctx, true);
1452 else
1453 fixup_remapped_decl (decl, ctx, false);
1455 break;
1457 case OMP_CLAUSE_COPYPRIVATE:
1458 case OMP_CLAUSE_COPYIN:
1459 case OMP_CLAUSE_DEFAULT:
1460 case OMP_CLAUSE_IF:
1461 case OMP_CLAUSE_NUM_THREADS:
1462 case OMP_CLAUSE_NUM_TEAMS:
1463 case OMP_CLAUSE_THREAD_LIMIT:
1464 case OMP_CLAUSE_DEVICE:
1465 case OMP_CLAUSE_SCHEDULE:
1466 case OMP_CLAUSE_DIST_SCHEDULE:
1467 case OMP_CLAUSE_NOWAIT:
1468 case OMP_CLAUSE_ORDERED:
1469 case OMP_CLAUSE_COLLAPSE:
1470 case OMP_CLAUSE_UNTIED:
1471 case OMP_CLAUSE_FINAL:
1472 case OMP_CLAUSE_MERGEABLE:
1473 case OMP_CLAUSE_PROC_BIND:
1474 case OMP_CLAUSE_SAFELEN:
1475 case OMP_CLAUSE_SIMDLEN:
1476 case OMP_CLAUSE_ALIGNED:
1477 case OMP_CLAUSE_DEPEND:
1478 case OMP_CLAUSE__LOOPTEMP_:
1479 case OMP_CLAUSE_TO:
1480 case OMP_CLAUSE_FROM:
1481 case OMP_CLAUSE_PRIORITY:
1482 case OMP_CLAUSE_GRAINSIZE:
1483 case OMP_CLAUSE_NUM_TASKS:
1484 case OMP_CLAUSE_THREADS:
1485 case OMP_CLAUSE_SIMD:
1486 case OMP_CLAUSE_NOGROUP:
1487 case OMP_CLAUSE_DEFAULTMAP:
1488 case OMP_CLAUSE_USE_DEVICE_PTR:
1489 case OMP_CLAUSE__CILK_FOR_COUNT_:
1490 case OMP_CLAUSE_ASYNC:
1491 case OMP_CLAUSE_WAIT:
1492 case OMP_CLAUSE_NUM_GANGS:
1493 case OMP_CLAUSE_NUM_WORKERS:
1494 case OMP_CLAUSE_VECTOR_LENGTH:
1495 case OMP_CLAUSE_GANG:
1496 case OMP_CLAUSE_WORKER:
1497 case OMP_CLAUSE_VECTOR:
1498 case OMP_CLAUSE_INDEPENDENT:
1499 case OMP_CLAUSE_AUTO:
1500 case OMP_CLAUSE_SEQ:
1501 case OMP_CLAUSE_TILE:
1502 case OMP_CLAUSE__GRIDDIM_:
1503 case OMP_CLAUSE__SIMT_:
1504 break;
1506 case OMP_CLAUSE__CACHE_:
1507 default:
1508 gcc_unreachable ();
1512 gcc_checking_assert (!scan_array_reductions
1513 || !is_gimple_omp_oacc (ctx->stmt));
1514 if (scan_array_reductions)
1516 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1517 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1518 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1520 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1521 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1523 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1524 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1525 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1526 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1527 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1528 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1532 /* Create a new name for omp child function. Returns an identifier. If
1533 IS_CILK_FOR is true then the suffix for the child function is
1534 "_cilk_for_fn." */
1536 static tree
1537 create_omp_child_function_name (bool task_copy, bool is_cilk_for)
1539 if (is_cilk_for)
1540 return clone_function_name (current_function_decl, "_cilk_for_fn");
1541 return clone_function_name (current_function_decl,
1542 task_copy ? "_omp_cpyfn" : "_omp_fn");
1545 /* Returns the type of the induction variable for the child function for
1546 _Cilk_for and the types for _high and _low variables based on TYPE. */
1548 static tree
1549 cilk_for_check_loop_diff_type (tree type)
1551 if (TYPE_PRECISION (type) <= TYPE_PRECISION (uint32_type_node))
1553 if (TYPE_UNSIGNED (type))
1554 return uint32_type_node;
1555 else
1556 return integer_type_node;
1558 else
1560 if (TYPE_UNSIGNED (type))
1561 return uint64_type_node;
1562 else
1563 return long_long_integer_type_node;
1567 /* Return true if CTX may belong to offloaded code: either if current function
1568 is offloaded, or any enclosing context corresponds to a target region. */
1570 static bool
1571 omp_maybe_offloaded_ctx (omp_context *ctx)
1573 if (cgraph_node::get (current_function_decl)->offloadable)
1574 return true;
1575 for (; ctx; ctx = ctx->outer)
1576 if (is_gimple_omp_offloaded (ctx->stmt))
1577 return true;
1578 return false;
1581 /* Build a decl for the omp child function. It'll not contain a body
1582 yet, just the bare decl. */
1584 static void
1585 create_omp_child_function (omp_context *ctx, bool task_copy)
1587 tree decl, type, name, t;
1589 tree cilk_for_count
1590 = (flag_cilkplus && gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1591 ? omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
1592 OMP_CLAUSE__CILK_FOR_COUNT_) : NULL_TREE;
1593 tree cilk_var_type = NULL_TREE;
1595 name = create_omp_child_function_name (task_copy,
1596 cilk_for_count != NULL_TREE);
1597 if (task_copy)
1598 type = build_function_type_list (void_type_node, ptr_type_node,
1599 ptr_type_node, NULL_TREE);
1600 else if (cilk_for_count)
1602 type = TREE_TYPE (OMP_CLAUSE_OPERAND (cilk_for_count, 0));
1603 cilk_var_type = cilk_for_check_loop_diff_type (type);
1604 type = build_function_type_list (void_type_node, ptr_type_node,
1605 cilk_var_type, cilk_var_type, NULL_TREE);
1607 else
1608 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1610 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1612 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1613 || !task_copy);
1614 if (!task_copy)
1615 ctx->cb.dst_fn = decl;
1616 else
1617 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1619 TREE_STATIC (decl) = 1;
1620 TREE_USED (decl) = 1;
1621 DECL_ARTIFICIAL (decl) = 1;
1622 DECL_IGNORED_P (decl) = 0;
1623 TREE_PUBLIC (decl) = 0;
1624 DECL_UNINLINABLE (decl) = 1;
1625 DECL_EXTERNAL (decl) = 0;
1626 DECL_CONTEXT (decl) = NULL_TREE;
1627 DECL_INITIAL (decl) = make_node (BLOCK);
1628 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1629 if (omp_maybe_offloaded_ctx (ctx))
1631 cgraph_node::get_create (decl)->offloadable = 1;
1632 if (ENABLE_OFFLOADING)
1633 g->have_offload = true;
1636 if (cgraph_node::get_create (decl)->offloadable
1637 && !lookup_attribute ("omp declare target",
1638 DECL_ATTRIBUTES (current_function_decl)))
1640 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1641 ? "omp target entrypoint"
1642 : "omp declare target");
1643 DECL_ATTRIBUTES (decl)
1644 = tree_cons (get_identifier (target_attr),
1645 NULL_TREE, DECL_ATTRIBUTES (decl));
1648 t = build_decl (DECL_SOURCE_LOCATION (decl),
1649 RESULT_DECL, NULL_TREE, void_type_node);
1650 DECL_ARTIFICIAL (t) = 1;
1651 DECL_IGNORED_P (t) = 1;
1652 DECL_CONTEXT (t) = decl;
1653 DECL_RESULT (decl) = t;
1655 /* _Cilk_for's child function requires two extra parameters called
1656 __low and __high that are set the by Cilk runtime when it calls this
1657 function. */
1658 if (cilk_for_count)
1660 t = build_decl (DECL_SOURCE_LOCATION (decl),
1661 PARM_DECL, get_identifier ("__high"), cilk_var_type);
1662 DECL_ARTIFICIAL (t) = 1;
1663 DECL_NAMELESS (t) = 1;
1664 DECL_ARG_TYPE (t) = ptr_type_node;
1665 DECL_CONTEXT (t) = current_function_decl;
1666 TREE_USED (t) = 1;
1667 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1668 DECL_ARGUMENTS (decl) = t;
1670 t = build_decl (DECL_SOURCE_LOCATION (decl),
1671 PARM_DECL, get_identifier ("__low"), cilk_var_type);
1672 DECL_ARTIFICIAL (t) = 1;
1673 DECL_NAMELESS (t) = 1;
1674 DECL_ARG_TYPE (t) = ptr_type_node;
1675 DECL_CONTEXT (t) = current_function_decl;
1676 TREE_USED (t) = 1;
1677 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1678 DECL_ARGUMENTS (decl) = t;
1681 tree data_name = get_identifier (".omp_data_i");
1682 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1683 ptr_type_node);
1684 DECL_ARTIFICIAL (t) = 1;
1685 DECL_NAMELESS (t) = 1;
1686 DECL_ARG_TYPE (t) = ptr_type_node;
1687 DECL_CONTEXT (t) = current_function_decl;
1688 TREE_USED (t) = 1;
1689 TREE_READONLY (t) = 1;
1690 if (cilk_for_count)
1691 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1692 DECL_ARGUMENTS (decl) = t;
1693 if (!task_copy)
1694 ctx->receiver_decl = t;
1695 else
1697 t = build_decl (DECL_SOURCE_LOCATION (decl),
1698 PARM_DECL, get_identifier (".omp_data_o"),
1699 ptr_type_node);
1700 DECL_ARTIFICIAL (t) = 1;
1701 DECL_NAMELESS (t) = 1;
1702 DECL_ARG_TYPE (t) = ptr_type_node;
1703 DECL_CONTEXT (t) = current_function_decl;
1704 TREE_USED (t) = 1;
1705 TREE_ADDRESSABLE (t) = 1;
1706 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1707 DECL_ARGUMENTS (decl) = t;
1710 /* Allocate memory for the function structure. The call to
1711 allocate_struct_function clobbers CFUN, so we need to restore
1712 it afterward. */
1713 push_struct_function (decl);
1714 cfun->function_end_locus = gimple_location (ctx->stmt);
1715 init_tree_ssa (cfun);
1716 pop_cfun ();
1719 /* Callback for walk_gimple_seq. Check if combined parallel
1720 contains gimple_omp_for_combined_into_p OMP_FOR. */
1722 tree
1723 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1724 bool *handled_ops_p,
1725 struct walk_stmt_info *wi)
1727 gimple *stmt = gsi_stmt (*gsi_p);
1729 *handled_ops_p = true;
1730 switch (gimple_code (stmt))
1732 WALK_SUBSTMTS;
1734 case GIMPLE_OMP_FOR:
1735 if (gimple_omp_for_combined_into_p (stmt)
1736 && gimple_omp_for_kind (stmt)
1737 == *(const enum gf_mask *) (wi->info))
1739 wi->info = stmt;
1740 return integer_zero_node;
1742 break;
1743 default:
1744 break;
1746 return NULL;
1749 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1751 static void
1752 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1753 omp_context *outer_ctx)
1755 struct walk_stmt_info wi;
1757 memset (&wi, 0, sizeof (wi));
1758 wi.val_only = true;
1759 wi.info = (void *) &msk;
1760 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1761 if (wi.info != (void *) &msk)
1763 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1764 struct omp_for_data fd;
1765 omp_extract_for_data (for_stmt, &fd, NULL);
1766 /* We need two temporaries with fd.loop.v type (istart/iend)
1767 and then (fd.collapse - 1) temporaries with the same
1768 type for count2 ... countN-1 vars if not constant. */
1769 size_t count = 2, i;
1770 tree type = fd.iter_type;
1771 if (fd.collapse > 1
1772 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1774 count += fd.collapse - 1;
1775 /* If there are lastprivate clauses on the inner
1776 GIMPLE_OMP_FOR, add one more temporaries for the total number
1777 of iterations (product of count1 ... countN-1). */
1778 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1779 OMP_CLAUSE_LASTPRIVATE))
1780 count++;
1781 else if (msk == GF_OMP_FOR_KIND_FOR
1782 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1783 OMP_CLAUSE_LASTPRIVATE))
1784 count++;
1786 for (i = 0; i < count; i++)
1788 tree temp = create_tmp_var (type);
1789 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1790 insert_decl_map (&outer_ctx->cb, temp, temp);
1791 OMP_CLAUSE_DECL (c) = temp;
1792 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1793 gimple_omp_taskreg_set_clauses (stmt, c);
1798 /* Scan an OpenMP parallel directive. */
1800 static void
1801 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1803 omp_context *ctx;
1804 tree name;
1805 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1807 /* Ignore parallel directives with empty bodies, unless there
1808 are copyin clauses. */
1809 if (optimize > 0
1810 && empty_body_p (gimple_omp_body (stmt))
1811 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1812 OMP_CLAUSE_COPYIN) == NULL)
1814 gsi_replace (gsi, gimple_build_nop (), false);
1815 return;
1818 if (gimple_omp_parallel_combined_p (stmt))
1819 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1821 ctx = new_omp_context (stmt, outer_ctx);
1822 taskreg_contexts.safe_push (ctx);
1823 if (taskreg_nesting_level > 1)
1824 ctx->is_nested = true;
1825 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1826 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1827 name = create_tmp_var_name (".omp_data_s");
1828 name = build_decl (gimple_location (stmt),
1829 TYPE_DECL, name, ctx->record_type);
1830 DECL_ARTIFICIAL (name) = 1;
1831 DECL_NAMELESS (name) = 1;
1832 TYPE_NAME (ctx->record_type) = name;
1833 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1834 if (!gimple_omp_parallel_grid_phony (stmt))
1836 create_omp_child_function (ctx, false);
1837 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1840 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1841 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1843 if (TYPE_FIELDS (ctx->record_type) == NULL)
1844 ctx->record_type = ctx->receiver_decl = NULL;
1847 /* Scan an OpenMP task directive. */
1849 static void
1850 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1852 omp_context *ctx;
1853 tree name, t;
1854 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1856 /* Ignore task directives with empty bodies, unless they have depend
1857 clause. */
1858 if (optimize > 0
1859 && empty_body_p (gimple_omp_body (stmt))
1860 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1862 gsi_replace (gsi, gimple_build_nop (), false);
1863 return;
1866 if (gimple_omp_task_taskloop_p (stmt))
1867 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1869 ctx = new_omp_context (stmt, outer_ctx);
1870 taskreg_contexts.safe_push (ctx);
1871 if (taskreg_nesting_level > 1)
1872 ctx->is_nested = true;
1873 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1874 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1875 name = create_tmp_var_name (".omp_data_s");
1876 name = build_decl (gimple_location (stmt),
1877 TYPE_DECL, name, ctx->record_type);
1878 DECL_ARTIFICIAL (name) = 1;
1879 DECL_NAMELESS (name) = 1;
1880 TYPE_NAME (ctx->record_type) = name;
1881 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1882 create_omp_child_function (ctx, false);
1883 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1885 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1887 if (ctx->srecord_type)
1889 name = create_tmp_var_name (".omp_data_a");
1890 name = build_decl (gimple_location (stmt),
1891 TYPE_DECL, name, ctx->srecord_type);
1892 DECL_ARTIFICIAL (name) = 1;
1893 DECL_NAMELESS (name) = 1;
1894 TYPE_NAME (ctx->srecord_type) = name;
1895 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1896 create_omp_child_function (ctx, true);
1899 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1901 if (TYPE_FIELDS (ctx->record_type) == NULL)
1903 ctx->record_type = ctx->receiver_decl = NULL;
1904 t = build_int_cst (long_integer_type_node, 0);
1905 gimple_omp_task_set_arg_size (stmt, t);
1906 t = build_int_cst (long_integer_type_node, 1);
1907 gimple_omp_task_set_arg_align (stmt, t);
1911 /* Helper function for finish_taskreg_scan, called through walk_tree.
1912 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1913 tree, replace it in the expression. */
1915 static tree
1916 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1918 if (VAR_P (*tp))
1920 omp_context *ctx = (omp_context *) data;
1921 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1922 if (t != *tp)
1924 if (DECL_HAS_VALUE_EXPR_P (t))
1925 t = unshare_expr (DECL_VALUE_EXPR (t));
1926 *tp = t;
1928 *walk_subtrees = 0;
1930 else if (IS_TYPE_OR_DECL_P (*tp))
1931 *walk_subtrees = 0;
1932 return NULL_TREE;
1935 /* If any decls have been made addressable during scan_omp,
1936 adjust their fields if needed, and layout record types
1937 of parallel/task constructs. */
1939 static void
1940 finish_taskreg_scan (omp_context *ctx)
1942 if (ctx->record_type == NULL_TREE)
1943 return;
1945 /* If any task_shared_vars were needed, verify all
1946 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1947 statements if use_pointer_for_field hasn't changed
1948 because of that. If it did, update field types now. */
1949 if (task_shared_vars)
1951 tree c;
1953 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1954 c; c = OMP_CLAUSE_CHAIN (c))
1955 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1956 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1958 tree decl = OMP_CLAUSE_DECL (c);
1960 /* Global variables don't need to be copied,
1961 the receiver side will use them directly. */
1962 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1963 continue;
1964 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1965 || !use_pointer_for_field (decl, ctx))
1966 continue;
1967 tree field = lookup_field (decl, ctx);
1968 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1969 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1970 continue;
1971 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1972 TREE_THIS_VOLATILE (field) = 0;
1973 DECL_USER_ALIGN (field) = 0;
1974 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1975 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1976 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1977 if (ctx->srecord_type)
1979 tree sfield = lookup_sfield (decl, ctx);
1980 TREE_TYPE (sfield) = TREE_TYPE (field);
1981 TREE_THIS_VOLATILE (sfield) = 0;
1982 DECL_USER_ALIGN (sfield) = 0;
1983 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1984 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1985 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1990 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1992 layout_type (ctx->record_type);
1993 fixup_child_record_type (ctx);
1995 else
1997 location_t loc = gimple_location (ctx->stmt);
1998 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1999 /* Move VLA fields to the end. */
2000 p = &TYPE_FIELDS (ctx->record_type);
2001 while (*p)
2002 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2003 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2005 *q = *p;
2006 *p = TREE_CHAIN (*p);
2007 TREE_CHAIN (*q) = NULL_TREE;
2008 q = &TREE_CHAIN (*q);
2010 else
2011 p = &DECL_CHAIN (*p);
2012 *p = vla_fields;
2013 if (gimple_omp_task_taskloop_p (ctx->stmt))
2015 /* Move fields corresponding to first and second _looptemp_
2016 clause first. There are filled by GOMP_taskloop
2017 and thus need to be in specific positions. */
2018 tree c1 = gimple_omp_task_clauses (ctx->stmt);
2019 c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
2020 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2021 OMP_CLAUSE__LOOPTEMP_);
2022 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2023 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2024 p = &TYPE_FIELDS (ctx->record_type);
2025 while (*p)
2026 if (*p == f1 || *p == f2)
2027 *p = DECL_CHAIN (*p);
2028 else
2029 p = &DECL_CHAIN (*p);
2030 DECL_CHAIN (f1) = f2;
2031 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2032 TYPE_FIELDS (ctx->record_type) = f1;
2033 if (ctx->srecord_type)
2035 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2036 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2037 p = &TYPE_FIELDS (ctx->srecord_type);
2038 while (*p)
2039 if (*p == f1 || *p == f2)
2040 *p = DECL_CHAIN (*p);
2041 else
2042 p = &DECL_CHAIN (*p);
2043 DECL_CHAIN (f1) = f2;
2044 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2045 TYPE_FIELDS (ctx->srecord_type) = f1;
2048 layout_type (ctx->record_type);
2049 fixup_child_record_type (ctx);
2050 if (ctx->srecord_type)
2051 layout_type (ctx->srecord_type);
2052 tree t = fold_convert_loc (loc, long_integer_type_node,
2053 TYPE_SIZE_UNIT (ctx->record_type));
2054 if (TREE_CODE (t) != INTEGER_CST)
2056 t = unshare_expr (t);
2057 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2059 gimple_omp_task_set_arg_size (ctx->stmt, t);
2060 t = build_int_cst (long_integer_type_node,
2061 TYPE_ALIGN_UNIT (ctx->record_type));
2062 gimple_omp_task_set_arg_align (ctx->stmt, t);
2066 /* Find the enclosing offload context. */
2068 static omp_context *
2069 enclosing_target_ctx (omp_context *ctx)
2071 for (; ctx; ctx = ctx->outer)
2072 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2073 break;
2075 return ctx;
2078 /* Return true if ctx is part of an oacc kernels region. */
2080 static bool
2081 ctx_in_oacc_kernels_region (omp_context *ctx)
2083 for (;ctx != NULL; ctx = ctx->outer)
2085 gimple *stmt = ctx->stmt;
2086 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2087 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2088 return true;
2091 return false;
2094 /* Check the parallelism clauses inside a kernels regions.
2095 Until kernels handling moves to use the same loop indirection
2096 scheme as parallel, we need to do this checking early. */
2098 static unsigned
2099 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2101 bool checking = true;
2102 unsigned outer_mask = 0;
2103 unsigned this_mask = 0;
2104 bool has_seq = false, has_auto = false;
2106 if (ctx->outer)
2107 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2108 if (!stmt)
2110 checking = false;
2111 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2112 return outer_mask;
2113 stmt = as_a <gomp_for *> (ctx->stmt);
2116 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2118 switch (OMP_CLAUSE_CODE (c))
2120 case OMP_CLAUSE_GANG:
2121 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2122 break;
2123 case OMP_CLAUSE_WORKER:
2124 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2125 break;
2126 case OMP_CLAUSE_VECTOR:
2127 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2128 break;
2129 case OMP_CLAUSE_SEQ:
2130 has_seq = true;
2131 break;
2132 case OMP_CLAUSE_AUTO:
2133 has_auto = true;
2134 break;
2135 default:
2136 break;
2140 if (checking)
2142 if (has_seq && (this_mask || has_auto))
2143 error_at (gimple_location (stmt), "%<seq%> overrides other"
2144 " OpenACC loop specifiers");
2145 else if (has_auto && this_mask)
2146 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2147 " OpenACC loop specifiers");
2149 if (this_mask & outer_mask)
2150 error_at (gimple_location (stmt), "inner loop uses same"
2151 " OpenACC parallelism as containing loop");
2154 return outer_mask | this_mask;
2157 /* Scan a GIMPLE_OMP_FOR. */
2159 static omp_context *
2160 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2162 omp_context *ctx;
2163 size_t i;
2164 tree clauses = gimple_omp_for_clauses (stmt);
2166 ctx = new_omp_context (stmt, outer_ctx);
2168 if (is_gimple_omp_oacc (stmt))
2170 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2172 if (!tgt || is_oacc_parallel (tgt))
2173 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2175 char const *check = NULL;
2177 switch (OMP_CLAUSE_CODE (c))
2179 case OMP_CLAUSE_GANG:
2180 check = "gang";
2181 break;
2183 case OMP_CLAUSE_WORKER:
2184 check = "worker";
2185 break;
2187 case OMP_CLAUSE_VECTOR:
2188 check = "vector";
2189 break;
2191 default:
2192 break;
2195 if (check && OMP_CLAUSE_OPERAND (c, 0))
2196 error_at (gimple_location (stmt),
2197 "argument not permitted on %qs clause in"
2198 " OpenACC %<parallel%>", check);
2201 if (tgt && is_oacc_kernels (tgt))
2203 /* Strip out reductions, as they are not handled yet. */
2204 tree *prev_ptr = &clauses;
2206 while (tree probe = *prev_ptr)
2208 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2210 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2211 *prev_ptr = *next_ptr;
2212 else
2213 prev_ptr = next_ptr;
2216 gimple_omp_for_set_clauses (stmt, clauses);
2217 check_oacc_kernel_gwv (stmt, ctx);
2221 scan_sharing_clauses (clauses, ctx);
2223 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2224 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2226 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2227 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2228 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2229 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2231 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2232 return ctx;
2235 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2237 static void
2238 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2239 omp_context *outer_ctx)
2241 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2242 gsi_replace (gsi, bind, false);
2243 gimple_seq seq = NULL;
2244 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2245 tree cond = create_tmp_var_raw (integer_type_node);
2246 DECL_CONTEXT (cond) = current_function_decl;
2247 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2248 gimple_bind_set_vars (bind, cond);
2249 gimple_call_set_lhs (g, cond);
2250 gimple_seq_add_stmt (&seq, g);
2251 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2252 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2253 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2254 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2255 gimple_seq_add_stmt (&seq, g);
2256 g = gimple_build_label (lab1);
2257 gimple_seq_add_stmt (&seq, g);
2258 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2259 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2260 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2261 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2262 gimple_omp_for_set_clauses (new_stmt, clause);
2263 gimple_seq_add_stmt (&seq, new_stmt);
2264 g = gimple_build_goto (lab3);
2265 gimple_seq_add_stmt (&seq, g);
2266 g = gimple_build_label (lab2);
2267 gimple_seq_add_stmt (&seq, g);
2268 gimple_seq_add_stmt (&seq, stmt);
2269 g = gimple_build_label (lab3);
2270 gimple_seq_add_stmt (&seq, g);
2271 gimple_bind_set_body (bind, seq);
2272 update_stmt (bind);
2273 scan_omp_for (new_stmt, outer_ctx);
2274 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2277 /* Scan an OpenMP sections directive. */
2279 static void
2280 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2282 omp_context *ctx;
2284 ctx = new_omp_context (stmt, outer_ctx);
2285 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2286 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2289 /* Scan an OpenMP single directive. */
2291 static void
2292 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2294 omp_context *ctx;
2295 tree name;
2297 ctx = new_omp_context (stmt, outer_ctx);
2298 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2299 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2300 name = create_tmp_var_name (".omp_copy_s");
2301 name = build_decl (gimple_location (stmt),
2302 TYPE_DECL, name, ctx->record_type);
2303 TYPE_NAME (ctx->record_type) = name;
2305 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2306 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2308 if (TYPE_FIELDS (ctx->record_type) == NULL)
2309 ctx->record_type = NULL;
2310 else
2311 layout_type (ctx->record_type);
2314 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2315 used in the corresponding offloaded function are restrict. */
2317 static bool
2318 omp_target_base_pointers_restrict_p (tree clauses)
2320 /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2321 used by OpenACC. */
2322 if (flag_openacc == 0)
2323 return false;
2325 /* I. Basic example:
2327 void foo (void)
2329 unsigned int a[2], b[2];
2331 #pragma acc kernels \
2332 copyout (a) \
2333 copyout (b)
2335 a[0] = 0;
2336 b[0] = 1;
2340 After gimplification, we have:
2342 #pragma omp target oacc_kernels \
2343 map(force_from:a [len: 8]) \
2344 map(force_from:b [len: 8])
2346 a[0] = 0;
2347 b[0] = 1;
2350 Because both mappings have the force prefix, we know that they will be
2351 allocated when calling the corresponding offloaded function, which means we
2352 can mark the base pointers for a and b in the offloaded function as
2353 restrict. */
2355 tree c;
2356 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2358 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
2359 return false;
2361 switch (OMP_CLAUSE_MAP_KIND (c))
2363 case GOMP_MAP_FORCE_ALLOC:
2364 case GOMP_MAP_FORCE_TO:
2365 case GOMP_MAP_FORCE_FROM:
2366 case GOMP_MAP_FORCE_TOFROM:
2367 break;
2368 default:
2369 return false;
2373 return true;
2376 /* Scan a GIMPLE_OMP_TARGET. */
2378 static void
2379 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2381 omp_context *ctx;
2382 tree name;
2383 bool offloaded = is_gimple_omp_offloaded (stmt);
2384 tree clauses = gimple_omp_target_clauses (stmt);
2386 ctx = new_omp_context (stmt, outer_ctx);
2387 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2388 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2389 name = create_tmp_var_name (".omp_data_t");
2390 name = build_decl (gimple_location (stmt),
2391 TYPE_DECL, name, ctx->record_type);
2392 DECL_ARTIFICIAL (name) = 1;
2393 DECL_NAMELESS (name) = 1;
2394 TYPE_NAME (ctx->record_type) = name;
2395 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2397 bool base_pointers_restrict = false;
2398 if (offloaded)
2400 create_omp_child_function (ctx, false);
2401 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2403 base_pointers_restrict = omp_target_base_pointers_restrict_p (clauses);
2404 if (base_pointers_restrict
2405 && dump_file && (dump_flags & TDF_DETAILS))
2406 fprintf (dump_file,
2407 "Base pointers in offloaded function are restrict\n");
2410 scan_sharing_clauses (clauses, ctx, base_pointers_restrict);
2411 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2413 if (TYPE_FIELDS (ctx->record_type) == NULL)
2414 ctx->record_type = ctx->receiver_decl = NULL;
2415 else
2417 TYPE_FIELDS (ctx->record_type)
2418 = nreverse (TYPE_FIELDS (ctx->record_type));
2419 if (flag_checking)
2421 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2422 for (tree field = TYPE_FIELDS (ctx->record_type);
2423 field;
2424 field = DECL_CHAIN (field))
2425 gcc_assert (DECL_ALIGN (field) == align);
2427 layout_type (ctx->record_type);
2428 if (offloaded)
2429 fixup_child_record_type (ctx);
2433 /* Scan an OpenMP teams directive. */
2435 static void
2436 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2438 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2439 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2440 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2443 /* Check nesting restrictions. */
2444 static bool
2445 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2447 tree c;
2449 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2450 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2451 the original copy of its contents. */
2452 return true;
2454 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2455 inside an OpenACC CTX. */
2456 if (!(is_gimple_omp (stmt)
2457 && is_gimple_omp_oacc (stmt))
2458 /* Except for atomic codes that we share with OpenMP. */
2459 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2460 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2462 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2464 error_at (gimple_location (stmt),
2465 "non-OpenACC construct inside of OpenACC routine");
2466 return false;
2468 else
2469 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2470 if (is_gimple_omp (octx->stmt)
2471 && is_gimple_omp_oacc (octx->stmt))
2473 error_at (gimple_location (stmt),
2474 "non-OpenACC construct inside of OpenACC region");
2475 return false;
2479 if (ctx != NULL)
2481 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2482 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2484 c = NULL_TREE;
2485 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2487 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2488 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2490 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2491 && (ctx->outer == NULL
2492 || !gimple_omp_for_combined_into_p (ctx->stmt)
2493 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2494 || (gimple_omp_for_kind (ctx->outer->stmt)
2495 != GF_OMP_FOR_KIND_FOR)
2496 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2498 error_at (gimple_location (stmt),
2499 "%<ordered simd threads%> must be closely "
2500 "nested inside of %<for simd%> region");
2501 return false;
2503 return true;
2506 error_at (gimple_location (stmt),
2507 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2508 " may not be nested inside %<simd%> region");
2509 return false;
2511 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2513 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2514 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2515 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2516 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2518 error_at (gimple_location (stmt),
2519 "only %<distribute%> or %<parallel%> regions are "
2520 "allowed to be strictly nested inside %<teams%> "
2521 "region");
2522 return false;
2526 switch (gimple_code (stmt))
2528 case GIMPLE_OMP_FOR:
2529 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2530 return true;
2531 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2533 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2535 error_at (gimple_location (stmt),
2536 "%<distribute%> region must be strictly nested "
2537 "inside %<teams%> construct");
2538 return false;
2540 return true;
2542 /* We split taskloop into task and nested taskloop in it. */
2543 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2544 return true;
2545 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2547 bool ok = false;
2549 if (ctx)
2550 switch (gimple_code (ctx->stmt))
2552 case GIMPLE_OMP_FOR:
2553 ok = (gimple_omp_for_kind (ctx->stmt)
2554 == GF_OMP_FOR_KIND_OACC_LOOP);
2555 break;
2557 case GIMPLE_OMP_TARGET:
2558 switch (gimple_omp_target_kind (ctx->stmt))
2560 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2561 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2562 ok = true;
2563 break;
2565 default:
2566 break;
2569 default:
2570 break;
2572 else if (oacc_get_fn_attrib (current_function_decl))
2573 ok = true;
2574 if (!ok)
2576 error_at (gimple_location (stmt),
2577 "OpenACC loop directive must be associated with"
2578 " an OpenACC compute region");
2579 return false;
2582 /* FALLTHRU */
2583 case GIMPLE_CALL:
2584 if (is_gimple_call (stmt)
2585 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2586 == BUILT_IN_GOMP_CANCEL
2587 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2588 == BUILT_IN_GOMP_CANCELLATION_POINT))
2590 const char *bad = NULL;
2591 const char *kind = NULL;
2592 const char *construct
2593 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2594 == BUILT_IN_GOMP_CANCEL)
2595 ? "#pragma omp cancel"
2596 : "#pragma omp cancellation point";
2597 if (ctx == NULL)
2599 error_at (gimple_location (stmt), "orphaned %qs construct",
2600 construct);
2601 return false;
2603 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2604 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2605 : 0)
2607 case 1:
2608 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2609 bad = "#pragma omp parallel";
2610 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2611 == BUILT_IN_GOMP_CANCEL
2612 && !integer_zerop (gimple_call_arg (stmt, 1)))
2613 ctx->cancellable = true;
2614 kind = "parallel";
2615 break;
2616 case 2:
2617 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2618 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2619 bad = "#pragma omp for";
2620 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2621 == BUILT_IN_GOMP_CANCEL
2622 && !integer_zerop (gimple_call_arg (stmt, 1)))
2624 ctx->cancellable = true;
2625 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2626 OMP_CLAUSE_NOWAIT))
2627 warning_at (gimple_location (stmt), 0,
2628 "%<#pragma omp cancel for%> inside "
2629 "%<nowait%> for construct");
2630 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2631 OMP_CLAUSE_ORDERED))
2632 warning_at (gimple_location (stmt), 0,
2633 "%<#pragma omp cancel for%> inside "
2634 "%<ordered%> for construct");
2636 kind = "for";
2637 break;
2638 case 4:
2639 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2640 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2641 bad = "#pragma omp sections";
2642 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2643 == BUILT_IN_GOMP_CANCEL
2644 && !integer_zerop (gimple_call_arg (stmt, 1)))
2646 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2648 ctx->cancellable = true;
2649 if (omp_find_clause (gimple_omp_sections_clauses
2650 (ctx->stmt),
2651 OMP_CLAUSE_NOWAIT))
2652 warning_at (gimple_location (stmt), 0,
2653 "%<#pragma omp cancel sections%> inside "
2654 "%<nowait%> sections construct");
2656 else
2658 gcc_assert (ctx->outer
2659 && gimple_code (ctx->outer->stmt)
2660 == GIMPLE_OMP_SECTIONS);
2661 ctx->outer->cancellable = true;
2662 if (omp_find_clause (gimple_omp_sections_clauses
2663 (ctx->outer->stmt),
2664 OMP_CLAUSE_NOWAIT))
2665 warning_at (gimple_location (stmt), 0,
2666 "%<#pragma omp cancel sections%> inside "
2667 "%<nowait%> sections construct");
2670 kind = "sections";
2671 break;
2672 case 8:
2673 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2674 bad = "#pragma omp task";
2675 else
2677 for (omp_context *octx = ctx->outer;
2678 octx; octx = octx->outer)
2680 switch (gimple_code (octx->stmt))
2682 case GIMPLE_OMP_TASKGROUP:
2683 break;
2684 case GIMPLE_OMP_TARGET:
2685 if (gimple_omp_target_kind (octx->stmt)
2686 != GF_OMP_TARGET_KIND_REGION)
2687 continue;
2688 /* FALLTHRU */
2689 case GIMPLE_OMP_PARALLEL:
2690 case GIMPLE_OMP_TEAMS:
2691 error_at (gimple_location (stmt),
2692 "%<%s taskgroup%> construct not closely "
2693 "nested inside of %<taskgroup%> region",
2694 construct);
2695 return false;
2696 default:
2697 continue;
2699 break;
2701 ctx->cancellable = true;
2703 kind = "taskgroup";
2704 break;
2705 default:
2706 error_at (gimple_location (stmt), "invalid arguments");
2707 return false;
2709 if (bad)
2711 error_at (gimple_location (stmt),
2712 "%<%s %s%> construct not closely nested inside of %qs",
2713 construct, kind, bad);
2714 return false;
2717 /* FALLTHRU */
2718 case GIMPLE_OMP_SECTIONS:
2719 case GIMPLE_OMP_SINGLE:
2720 for (; ctx != NULL; ctx = ctx->outer)
2721 switch (gimple_code (ctx->stmt))
2723 case GIMPLE_OMP_FOR:
2724 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2725 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2726 break;
2727 /* FALLTHRU */
2728 case GIMPLE_OMP_SECTIONS:
2729 case GIMPLE_OMP_SINGLE:
2730 case GIMPLE_OMP_ORDERED:
2731 case GIMPLE_OMP_MASTER:
2732 case GIMPLE_OMP_TASK:
2733 case GIMPLE_OMP_CRITICAL:
2734 if (is_gimple_call (stmt))
2736 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2737 != BUILT_IN_GOMP_BARRIER)
2738 return true;
2739 error_at (gimple_location (stmt),
2740 "barrier region may not be closely nested inside "
2741 "of work-sharing, %<critical%>, %<ordered%>, "
2742 "%<master%>, explicit %<task%> or %<taskloop%> "
2743 "region");
2744 return false;
2746 error_at (gimple_location (stmt),
2747 "work-sharing region may not be closely nested inside "
2748 "of work-sharing, %<critical%>, %<ordered%>, "
2749 "%<master%>, explicit %<task%> or %<taskloop%> region");
2750 return false;
2751 case GIMPLE_OMP_PARALLEL:
2752 case GIMPLE_OMP_TEAMS:
2753 return true;
2754 case GIMPLE_OMP_TARGET:
2755 if (gimple_omp_target_kind (ctx->stmt)
2756 == GF_OMP_TARGET_KIND_REGION)
2757 return true;
2758 break;
2759 default:
2760 break;
2762 break;
2763 case GIMPLE_OMP_MASTER:
2764 for (; ctx != NULL; ctx = ctx->outer)
2765 switch (gimple_code (ctx->stmt))
2767 case GIMPLE_OMP_FOR:
2768 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2769 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2770 break;
2771 /* FALLTHRU */
2772 case GIMPLE_OMP_SECTIONS:
2773 case GIMPLE_OMP_SINGLE:
2774 case GIMPLE_OMP_TASK:
2775 error_at (gimple_location (stmt),
2776 "%<master%> region may not be closely nested inside "
2777 "of work-sharing, explicit %<task%> or %<taskloop%> "
2778 "region");
2779 return false;
2780 case GIMPLE_OMP_PARALLEL:
2781 case GIMPLE_OMP_TEAMS:
2782 return true;
2783 case GIMPLE_OMP_TARGET:
2784 if (gimple_omp_target_kind (ctx->stmt)
2785 == GF_OMP_TARGET_KIND_REGION)
2786 return true;
2787 break;
2788 default:
2789 break;
2791 break;
2792 case GIMPLE_OMP_TASK:
2793 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2794 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2795 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2796 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2798 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2799 error_at (OMP_CLAUSE_LOCATION (c),
2800 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2801 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2802 return false;
2804 break;
2805 case GIMPLE_OMP_ORDERED:
2806 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2807 c; c = OMP_CLAUSE_CHAIN (c))
2809 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2811 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2812 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2813 continue;
2815 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2816 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2817 || kind == OMP_CLAUSE_DEPEND_SINK)
2819 tree oclause;
2820 /* Look for containing ordered(N) loop. */
2821 if (ctx == NULL
2822 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2823 || (oclause
2824 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2825 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2827 error_at (OMP_CLAUSE_LOCATION (c),
2828 "%<ordered%> construct with %<depend%> clause "
2829 "must be closely nested inside an %<ordered%> "
2830 "loop");
2831 return false;
2833 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2835 error_at (OMP_CLAUSE_LOCATION (c),
2836 "%<ordered%> construct with %<depend%> clause "
2837 "must be closely nested inside a loop with "
2838 "%<ordered%> clause with a parameter");
2839 return false;
2842 else
2844 error_at (OMP_CLAUSE_LOCATION (c),
2845 "invalid depend kind in omp %<ordered%> %<depend%>");
2846 return false;
2849 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2850 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2852 /* ordered simd must be closely nested inside of simd region,
2853 and simd region must not encounter constructs other than
2854 ordered simd, therefore ordered simd may be either orphaned,
2855 or ctx->stmt must be simd. The latter case is handled already
2856 earlier. */
2857 if (ctx != NULL)
2859 error_at (gimple_location (stmt),
2860 "%<ordered%> %<simd%> must be closely nested inside "
2861 "%<simd%> region");
2862 return false;
2865 for (; ctx != NULL; ctx = ctx->outer)
2866 switch (gimple_code (ctx->stmt))
2868 case GIMPLE_OMP_CRITICAL:
2869 case GIMPLE_OMP_TASK:
2870 case GIMPLE_OMP_ORDERED:
2871 ordered_in_taskloop:
2872 error_at (gimple_location (stmt),
2873 "%<ordered%> region may not be closely nested inside "
2874 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2875 "%<taskloop%> region");
2876 return false;
2877 case GIMPLE_OMP_FOR:
2878 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2879 goto ordered_in_taskloop;
2880 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2881 OMP_CLAUSE_ORDERED) == NULL)
2883 error_at (gimple_location (stmt),
2884 "%<ordered%> region must be closely nested inside "
2885 "a loop region with an %<ordered%> clause");
2886 return false;
2888 return true;
2889 case GIMPLE_OMP_TARGET:
2890 if (gimple_omp_target_kind (ctx->stmt)
2891 != GF_OMP_TARGET_KIND_REGION)
2892 break;
2893 /* FALLTHRU */
2894 case GIMPLE_OMP_PARALLEL:
2895 case GIMPLE_OMP_TEAMS:
2896 error_at (gimple_location (stmt),
2897 "%<ordered%> region must be closely nested inside "
2898 "a loop region with an %<ordered%> clause");
2899 return false;
2900 default:
2901 break;
2903 break;
2904 case GIMPLE_OMP_CRITICAL:
2906 tree this_stmt_name
2907 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2908 for (; ctx != NULL; ctx = ctx->outer)
2909 if (gomp_critical *other_crit
2910 = dyn_cast <gomp_critical *> (ctx->stmt))
2911 if (this_stmt_name == gimple_omp_critical_name (other_crit))
2913 error_at (gimple_location (stmt),
2914 "%<critical%> region may not be nested inside "
2915 "a %<critical%> region with the same name");
2916 return false;
2919 break;
2920 case GIMPLE_OMP_TEAMS:
2921 if (ctx == NULL
2922 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2923 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2925 error_at (gimple_location (stmt),
2926 "%<teams%> construct not closely nested inside of "
2927 "%<target%> construct");
2928 return false;
2930 break;
2931 case GIMPLE_OMP_TARGET:
2932 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2933 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2934 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2935 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2937 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2938 error_at (OMP_CLAUSE_LOCATION (c),
2939 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2940 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2941 return false;
2943 if (is_gimple_omp_offloaded (stmt)
2944 && oacc_get_fn_attrib (cfun->decl) != NULL)
2946 error_at (gimple_location (stmt),
2947 "OpenACC region inside of OpenACC routine, nested "
2948 "parallelism not supported yet");
2949 return false;
2951 for (; ctx != NULL; ctx = ctx->outer)
2953 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2955 if (is_gimple_omp (stmt)
2956 && is_gimple_omp_oacc (stmt)
2957 && is_gimple_omp (ctx->stmt))
2959 error_at (gimple_location (stmt),
2960 "OpenACC construct inside of non-OpenACC region");
2961 return false;
2963 continue;
2966 const char *stmt_name, *ctx_stmt_name;
2967 switch (gimple_omp_target_kind (stmt))
2969 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2970 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2971 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2972 case GF_OMP_TARGET_KIND_ENTER_DATA:
2973 stmt_name = "target enter data"; break;
2974 case GF_OMP_TARGET_KIND_EXIT_DATA:
2975 stmt_name = "target exit data"; break;
2976 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2977 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2978 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2979 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
2980 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2981 stmt_name = "enter/exit data"; break;
2982 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2983 break;
2984 default: gcc_unreachable ();
2986 switch (gimple_omp_target_kind (ctx->stmt))
2988 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2989 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
2990 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2991 ctx_stmt_name = "parallel"; break;
2992 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2993 ctx_stmt_name = "kernels"; break;
2994 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
2995 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2996 ctx_stmt_name = "host_data"; break;
2997 default: gcc_unreachable ();
3000 /* OpenACC/OpenMP mismatch? */
3001 if (is_gimple_omp_oacc (stmt)
3002 != is_gimple_omp_oacc (ctx->stmt))
3004 error_at (gimple_location (stmt),
3005 "%s %qs construct inside of %s %qs region",
3006 (is_gimple_omp_oacc (stmt)
3007 ? "OpenACC" : "OpenMP"), stmt_name,
3008 (is_gimple_omp_oacc (ctx->stmt)
3009 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3010 return false;
3012 if (is_gimple_omp_offloaded (ctx->stmt))
3014 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3015 if (is_gimple_omp_oacc (ctx->stmt))
3017 error_at (gimple_location (stmt),
3018 "%qs construct inside of %qs region",
3019 stmt_name, ctx_stmt_name);
3020 return false;
3022 else
3024 warning_at (gimple_location (stmt), 0,
3025 "%qs construct inside of %qs region",
3026 stmt_name, ctx_stmt_name);
3030 break;
3031 default:
3032 break;
3034 return true;
3038 /* Helper function scan_omp.
3040 Callback for walk_tree or operators in walk_gimple_stmt used to
3041 scan for OMP directives in TP. */
3043 static tree
3044 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3046 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3047 omp_context *ctx = (omp_context *) wi->info;
3048 tree t = *tp;
3050 switch (TREE_CODE (t))
3052 case VAR_DECL:
3053 case PARM_DECL:
3054 case LABEL_DECL:
3055 case RESULT_DECL:
3056 if (ctx)
3058 tree repl = remap_decl (t, &ctx->cb);
3059 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3060 *tp = repl;
3062 break;
3064 default:
3065 if (ctx && TYPE_P (t))
3066 *tp = remap_type (t, &ctx->cb);
3067 else if (!DECL_P (t))
3069 *walk_subtrees = 1;
3070 if (ctx)
3072 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3073 if (tem != TREE_TYPE (t))
3075 if (TREE_CODE (t) == INTEGER_CST)
3076 *tp = wide_int_to_tree (tem, t);
3077 else
3078 TREE_TYPE (t) = tem;
3082 break;
3085 return NULL_TREE;
3088 /* Return true if FNDECL is a setjmp or a longjmp. */
3090 static bool
3091 setjmp_or_longjmp_p (const_tree fndecl)
3093 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3094 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
3095 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
3096 return true;
3098 tree declname = DECL_NAME (fndecl);
3099 if (!declname)
3100 return false;
3101 const char *name = IDENTIFIER_POINTER (declname);
3102 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3106 /* Helper function for scan_omp.
3108 Callback for walk_gimple_stmt used to scan for OMP directives in
3109 the current statement in GSI. */
3111 static tree
3112 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3113 struct walk_stmt_info *wi)
3115 gimple *stmt = gsi_stmt (*gsi);
3116 omp_context *ctx = (omp_context *) wi->info;
3118 if (gimple_has_location (stmt))
3119 input_location = gimple_location (stmt);
3121 /* Check the nesting restrictions. */
3122 bool remove = false;
3123 if (is_gimple_omp (stmt))
3124 remove = !check_omp_nesting_restrictions (stmt, ctx);
3125 else if (is_gimple_call (stmt))
3127 tree fndecl = gimple_call_fndecl (stmt);
3128 if (fndecl)
3130 if (setjmp_or_longjmp_p (fndecl)
3131 && ctx
3132 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3133 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3135 remove = true;
3136 error_at (gimple_location (stmt),
3137 "setjmp/longjmp inside simd construct");
3139 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3140 switch (DECL_FUNCTION_CODE (fndecl))
3142 case BUILT_IN_GOMP_BARRIER:
3143 case BUILT_IN_GOMP_CANCEL:
3144 case BUILT_IN_GOMP_CANCELLATION_POINT:
3145 case BUILT_IN_GOMP_TASKYIELD:
3146 case BUILT_IN_GOMP_TASKWAIT:
3147 case BUILT_IN_GOMP_TASKGROUP_START:
3148 case BUILT_IN_GOMP_TASKGROUP_END:
3149 remove = !check_omp_nesting_restrictions (stmt, ctx);
3150 break;
3151 default:
3152 break;
3156 if (remove)
3158 stmt = gimple_build_nop ();
3159 gsi_replace (gsi, stmt, false);
3162 *handled_ops_p = true;
3164 switch (gimple_code (stmt))
3166 case GIMPLE_OMP_PARALLEL:
3167 taskreg_nesting_level++;
3168 scan_omp_parallel (gsi, ctx);
3169 taskreg_nesting_level--;
3170 break;
3172 case GIMPLE_OMP_TASK:
3173 taskreg_nesting_level++;
3174 scan_omp_task (gsi, ctx);
3175 taskreg_nesting_level--;
3176 break;
3178 case GIMPLE_OMP_FOR:
3179 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3180 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3181 && omp_maybe_offloaded_ctx (ctx)
3182 && omp_max_simt_vf ())
3183 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3184 else
3185 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3186 break;
3188 case GIMPLE_OMP_SECTIONS:
3189 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3190 break;
3192 case GIMPLE_OMP_SINGLE:
3193 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3194 break;
3196 case GIMPLE_OMP_SECTION:
3197 case GIMPLE_OMP_MASTER:
3198 case GIMPLE_OMP_TASKGROUP:
3199 case GIMPLE_OMP_ORDERED:
3200 case GIMPLE_OMP_CRITICAL:
3201 case GIMPLE_OMP_GRID_BODY:
3202 ctx = new_omp_context (stmt, ctx);
3203 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3204 break;
3206 case GIMPLE_OMP_TARGET:
3207 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3208 break;
3210 case GIMPLE_OMP_TEAMS:
3211 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3212 break;
3214 case GIMPLE_BIND:
3216 tree var;
3218 *handled_ops_p = false;
3219 if (ctx)
3220 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3221 var ;
3222 var = DECL_CHAIN (var))
3223 insert_decl_map (&ctx->cb, var, var);
3225 break;
3226 default:
3227 *handled_ops_p = false;
3228 break;
3231 return NULL_TREE;
3235 /* Scan all the statements starting at the current statement. CTX
3236 contains context information about the OMP directives and
3237 clauses found during the scan. */
3239 static void
3240 scan_omp (gimple_seq *body_p, omp_context *ctx)
3242 location_t saved_location;
3243 struct walk_stmt_info wi;
3245 memset (&wi, 0, sizeof (wi));
3246 wi.info = ctx;
3247 wi.want_locations = true;
3249 saved_location = input_location;
3250 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3251 input_location = saved_location;
3254 /* Re-gimplification and code generation routines. */
3256 /* If a context was created for STMT when it was scanned, return it. */
3258 static omp_context *
3259 maybe_lookup_ctx (gimple *stmt)
3261 splay_tree_node n;
3262 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3263 return n ? (omp_context *) n->value : NULL;
3267 /* Find the mapping for DECL in CTX or the immediately enclosing
3268 context that has a mapping for DECL.
3270 If CTX is a nested parallel directive, we may have to use the decl
3271 mappings created in CTX's parent context. Suppose that we have the
3272 following parallel nesting (variable UIDs showed for clarity):
3274 iD.1562 = 0;
3275 #omp parallel shared(iD.1562) -> outer parallel
3276 iD.1562 = iD.1562 + 1;
3278 #omp parallel shared (iD.1562) -> inner parallel
3279 iD.1562 = iD.1562 - 1;
3281 Each parallel structure will create a distinct .omp_data_s structure
3282 for copying iD.1562 in/out of the directive:
3284 outer parallel .omp_data_s.1.i -> iD.1562
3285 inner parallel .omp_data_s.2.i -> iD.1562
3287 A shared variable mapping will produce a copy-out operation before
3288 the parallel directive and a copy-in operation after it. So, in
3289 this case we would have:
3291 iD.1562 = 0;
3292 .omp_data_o.1.i = iD.1562;
3293 #omp parallel shared(iD.1562) -> outer parallel
3294 .omp_data_i.1 = &.omp_data_o.1
3295 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3297 .omp_data_o.2.i = iD.1562; -> **
3298 #omp parallel shared(iD.1562) -> inner parallel
3299 .omp_data_i.2 = &.omp_data_o.2
3300 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3303 ** This is a problem. The symbol iD.1562 cannot be referenced
3304 inside the body of the outer parallel region. But since we are
3305 emitting this copy operation while expanding the inner parallel
3306 directive, we need to access the CTX structure of the outer
3307 parallel directive to get the correct mapping:
3309 .omp_data_o.2.i = .omp_data_i.1->i
3311 Since there may be other workshare or parallel directives enclosing
3312 the parallel directive, it may be necessary to walk up the context
3313 parent chain. This is not a problem in general because nested
3314 parallelism happens only rarely. */
3316 static tree
3317 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3319 tree t;
3320 omp_context *up;
3322 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3323 t = maybe_lookup_decl (decl, up);
3325 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3327 return t ? t : decl;
3331 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3332 in outer contexts. */
3334 static tree
3335 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3337 tree t = NULL;
3338 omp_context *up;
3340 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3341 t = maybe_lookup_decl (decl, up);
3343 return t ? t : decl;
3347 /* Construct the initialization value for reduction operation OP. */
3349 tree
3350 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3352 switch (op)
3354 case PLUS_EXPR:
3355 case MINUS_EXPR:
3356 case BIT_IOR_EXPR:
3357 case BIT_XOR_EXPR:
3358 case TRUTH_OR_EXPR:
3359 case TRUTH_ORIF_EXPR:
3360 case TRUTH_XOR_EXPR:
3361 case NE_EXPR:
3362 return build_zero_cst (type);
3364 case MULT_EXPR:
3365 case TRUTH_AND_EXPR:
3366 case TRUTH_ANDIF_EXPR:
3367 case EQ_EXPR:
3368 return fold_convert_loc (loc, type, integer_one_node);
3370 case BIT_AND_EXPR:
3371 return fold_convert_loc (loc, type, integer_minus_one_node);
3373 case MAX_EXPR:
3374 if (SCALAR_FLOAT_TYPE_P (type))
3376 REAL_VALUE_TYPE max, min;
3377 if (HONOR_INFINITIES (type))
3379 real_inf (&max);
3380 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3382 else
3383 real_maxval (&min, 1, TYPE_MODE (type));
3384 return build_real (type, min);
3386 else if (POINTER_TYPE_P (type))
3388 wide_int min
3389 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3390 return wide_int_to_tree (type, min);
3392 else
3394 gcc_assert (INTEGRAL_TYPE_P (type));
3395 return TYPE_MIN_VALUE (type);
3398 case MIN_EXPR:
3399 if (SCALAR_FLOAT_TYPE_P (type))
3401 REAL_VALUE_TYPE max;
3402 if (HONOR_INFINITIES (type))
3403 real_inf (&max);
3404 else
3405 real_maxval (&max, 0, TYPE_MODE (type));
3406 return build_real (type, max);
3408 else if (POINTER_TYPE_P (type))
3410 wide_int max
3411 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3412 return wide_int_to_tree (type, max);
3414 else
3416 gcc_assert (INTEGRAL_TYPE_P (type));
3417 return TYPE_MAX_VALUE (type);
3420 default:
3421 gcc_unreachable ();
3425 /* Construct the initialization value for reduction CLAUSE. */
3427 tree
3428 omp_reduction_init (tree clause, tree type)
3430 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3431 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3434 /* Return alignment to be assumed for var in CLAUSE, which should be
3435 OMP_CLAUSE_ALIGNED. */
3437 static tree
3438 omp_clause_aligned_alignment (tree clause)
3440 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3441 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3443 /* Otherwise return implementation defined alignment. */
3444 unsigned int al = 1;
3445 opt_scalar_mode mode_iter;
3446 int vs = targetm.vectorize.autovectorize_vector_sizes ();
3447 if (vs)
3448 vs = 1 << floor_log2 (vs);
3449 static enum mode_class classes[]
3450 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3451 for (int i = 0; i < 4; i += 2)
3452 /* The for loop above dictates that we only walk through scalar classes. */
3453 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3455 scalar_mode mode = mode_iter.require ();
3456 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3457 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3458 continue;
3459 while (vs
3460 && GET_MODE_SIZE (vmode) < vs
3461 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3462 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3464 tree type = lang_hooks.types.type_for_mode (mode, 1);
3465 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3466 continue;
3467 type = build_vector_type (type, GET_MODE_SIZE (vmode)
3468 / GET_MODE_SIZE (mode));
3469 if (TYPE_MODE (type) != vmode)
3470 continue;
3471 if (TYPE_ALIGN_UNIT (type) > al)
3472 al = TYPE_ALIGN_UNIT (type);
3474 return build_int_cst (integer_type_node, al);
3478 /* This structure is part of the interface between lower_rec_simd_input_clauses
3479 and lower_rec_input_clauses. */
3481 struct omplow_simd_context {
3482 tree idx;
3483 tree lane;
3484 vec<tree, va_heap> simt_eargs;
3485 gimple_seq simt_dlist;
3486 int max_vf;
3487 bool is_simt;
3490 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3491 privatization. */
3493 static bool
3494 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3495 omplow_simd_context *sctx, tree &ivar, tree &lvar)
3497 if (sctx->max_vf == 0)
3499 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3500 if (sctx->max_vf > 1)
3502 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3503 OMP_CLAUSE_SAFELEN);
3504 if (c
3505 && (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) != INTEGER_CST
3506 || tree_int_cst_sgn (OMP_CLAUSE_SAFELEN_EXPR (c)) != 1))
3507 sctx->max_vf = 1;
3508 else if (c && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
3509 sctx->max_vf) == -1)
3510 sctx->max_vf = tree_to_shwi (OMP_CLAUSE_SAFELEN_EXPR (c));
3512 if (sctx->max_vf > 1)
3514 sctx->idx = create_tmp_var (unsigned_type_node);
3515 sctx->lane = create_tmp_var (unsigned_type_node);
3518 if (sctx->max_vf == 1)
3519 return false;
3521 if (sctx->is_simt)
3523 if (is_gimple_reg (new_var))
3525 ivar = lvar = new_var;
3526 return true;
3528 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3529 ivar = lvar = create_tmp_var (type);
3530 TREE_ADDRESSABLE (ivar) = 1;
3531 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3532 NULL, DECL_ATTRIBUTES (ivar));
3533 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3534 tree clobber = build_constructor (type, NULL);
3535 TREE_THIS_VOLATILE (clobber) = 1;
3536 gimple *g = gimple_build_assign (ivar, clobber);
3537 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3539 else
3541 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3542 tree avar = create_tmp_var_raw (atype);
3543 if (TREE_ADDRESSABLE (new_var))
3544 TREE_ADDRESSABLE (avar) = 1;
3545 DECL_ATTRIBUTES (avar)
3546 = tree_cons (get_identifier ("omp simd array"), NULL,
3547 DECL_ATTRIBUTES (avar));
3548 gimple_add_tmp_var (avar);
3549 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3550 NULL_TREE, NULL_TREE);
3551 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3552 NULL_TREE, NULL_TREE);
3554 if (DECL_P (new_var))
3556 SET_DECL_VALUE_EXPR (new_var, lvar);
3557 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3559 return true;
3562 /* Helper function of lower_rec_input_clauses. For a reference
3563 in simd reduction, add an underlying variable it will reference. */
3565 static void
3566 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3568 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3569 if (TREE_CONSTANT (z))
3571 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3572 get_name (new_vard));
3573 gimple_add_tmp_var (z);
3574 TREE_ADDRESSABLE (z) = 1;
3575 z = build_fold_addr_expr_loc (loc, z);
3576 gimplify_assign (new_vard, z, ilist);
3580 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3581 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3582 private variables. Initialization statements go in ILIST, while calls
3583 to destructors go in DLIST. */
3585 static void
3586 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3587 omp_context *ctx, struct omp_for_data *fd)
3589 tree c, dtor, copyin_seq, x, ptr;
3590 bool copyin_by_ref = false;
3591 bool lastprivate_firstprivate = false;
3592 bool reduction_omp_orig_ref = false;
3593 int pass;
3594 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3595 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3596 omplow_simd_context sctx = omplow_simd_context ();
3597 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3598 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3599 gimple_seq llist[3] = { };
3601 copyin_seq = NULL;
3602 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3604 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3605 with data sharing clauses referencing variable sized vars. That
3606 is unnecessarily hard to support and very unlikely to result in
3607 vectorized code anyway. */
3608 if (is_simd)
3609 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3610 switch (OMP_CLAUSE_CODE (c))
3612 case OMP_CLAUSE_LINEAR:
3613 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3614 sctx.max_vf = 1;
3615 /* FALLTHRU */
3616 case OMP_CLAUSE_PRIVATE:
3617 case OMP_CLAUSE_FIRSTPRIVATE:
3618 case OMP_CLAUSE_LASTPRIVATE:
3619 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3620 sctx.max_vf = 1;
3621 break;
3622 case OMP_CLAUSE_REDUCTION:
3623 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3624 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3625 sctx.max_vf = 1;
3626 break;
3627 default:
3628 continue;
3631 /* Add a placeholder for simduid. */
3632 if (sctx.is_simt && sctx.max_vf != 1)
3633 sctx.simt_eargs.safe_push (NULL_TREE);
3635 /* Do all the fixed sized types in the first pass, and the variable sized
3636 types in the second pass. This makes sure that the scalar arguments to
3637 the variable sized types are processed before we use them in the
3638 variable sized operations. */
3639 for (pass = 0; pass < 2; ++pass)
3641 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3643 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3644 tree var, new_var;
3645 bool by_ref;
3646 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3648 switch (c_kind)
3650 case OMP_CLAUSE_PRIVATE:
3651 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3652 continue;
3653 break;
3654 case OMP_CLAUSE_SHARED:
3655 /* Ignore shared directives in teams construct. */
3656 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3657 continue;
3658 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3660 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3661 || is_global_var (OMP_CLAUSE_DECL (c)));
3662 continue;
3664 case OMP_CLAUSE_FIRSTPRIVATE:
3665 case OMP_CLAUSE_COPYIN:
3666 break;
3667 case OMP_CLAUSE_LINEAR:
3668 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3669 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3670 lastprivate_firstprivate = true;
3671 break;
3672 case OMP_CLAUSE_REDUCTION:
3673 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3674 reduction_omp_orig_ref = true;
3675 break;
3676 case OMP_CLAUSE__LOOPTEMP_:
3677 /* Handle _looptemp_ clauses only on parallel/task. */
3678 if (fd)
3679 continue;
3680 break;
3681 case OMP_CLAUSE_LASTPRIVATE:
3682 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3684 lastprivate_firstprivate = true;
3685 if (pass != 0 || is_taskloop_ctx (ctx))
3686 continue;
3688 /* Even without corresponding firstprivate, if
3689 decl is Fortran allocatable, it needs outer var
3690 reference. */
3691 else if (pass == 0
3692 && lang_hooks.decls.omp_private_outer_ref
3693 (OMP_CLAUSE_DECL (c)))
3694 lastprivate_firstprivate = true;
3695 break;
3696 case OMP_CLAUSE_ALIGNED:
3697 if (pass == 0)
3698 continue;
3699 var = OMP_CLAUSE_DECL (c);
3700 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3701 && !is_global_var (var))
3703 new_var = maybe_lookup_decl (var, ctx);
3704 if (new_var == NULL_TREE)
3705 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3706 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3707 tree alarg = omp_clause_aligned_alignment (c);
3708 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3709 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3710 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3711 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3712 gimplify_and_add (x, ilist);
3714 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3715 && is_global_var (var))
3717 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3718 new_var = lookup_decl (var, ctx);
3719 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3720 t = build_fold_addr_expr_loc (clause_loc, t);
3721 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3722 tree alarg = omp_clause_aligned_alignment (c);
3723 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3724 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3725 t = fold_convert_loc (clause_loc, ptype, t);
3726 x = create_tmp_var (ptype);
3727 t = build2 (MODIFY_EXPR, ptype, x, t);
3728 gimplify_and_add (t, ilist);
3729 t = build_simple_mem_ref_loc (clause_loc, x);
3730 SET_DECL_VALUE_EXPR (new_var, t);
3731 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3733 continue;
3734 default:
3735 continue;
3738 new_var = var = OMP_CLAUSE_DECL (c);
3739 if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3741 var = TREE_OPERAND (var, 0);
3742 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3743 var = TREE_OPERAND (var, 0);
3744 if (TREE_CODE (var) == INDIRECT_REF
3745 || TREE_CODE (var) == ADDR_EXPR)
3746 var = TREE_OPERAND (var, 0);
3747 if (is_variable_sized (var))
3749 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3750 var = DECL_VALUE_EXPR (var);
3751 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3752 var = TREE_OPERAND (var, 0);
3753 gcc_assert (DECL_P (var));
3755 new_var = var;
3757 if (c_kind != OMP_CLAUSE_COPYIN)
3758 new_var = lookup_decl (var, ctx);
3760 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3762 if (pass != 0)
3763 continue;
3765 /* C/C++ array section reductions. */
3766 else if (c_kind == OMP_CLAUSE_REDUCTION
3767 && var != OMP_CLAUSE_DECL (c))
3769 if (pass == 0)
3770 continue;
3772 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3773 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3774 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3776 tree b = TREE_OPERAND (orig_var, 1);
3777 b = maybe_lookup_decl (b, ctx);
3778 if (b == NULL)
3780 b = TREE_OPERAND (orig_var, 1);
3781 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3783 if (integer_zerop (bias))
3784 bias = b;
3785 else
3787 bias = fold_convert_loc (clause_loc,
3788 TREE_TYPE (b), bias);
3789 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3790 TREE_TYPE (b), b, bias);
3792 orig_var = TREE_OPERAND (orig_var, 0);
3794 if (TREE_CODE (orig_var) == INDIRECT_REF
3795 || TREE_CODE (orig_var) == ADDR_EXPR)
3796 orig_var = TREE_OPERAND (orig_var, 0);
3797 tree d = OMP_CLAUSE_DECL (c);
3798 tree type = TREE_TYPE (d);
3799 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3800 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3801 const char *name = get_name (orig_var);
3802 if (TREE_CONSTANT (v))
3804 x = create_tmp_var_raw (type, name);
3805 gimple_add_tmp_var (x);
3806 TREE_ADDRESSABLE (x) = 1;
3807 x = build_fold_addr_expr_loc (clause_loc, x);
3809 else
3811 tree atmp
3812 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3813 tree t = maybe_lookup_decl (v, ctx);
3814 if (t)
3815 v = t;
3816 else
3817 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3818 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3819 t = fold_build2_loc (clause_loc, PLUS_EXPR,
3820 TREE_TYPE (v), v,
3821 build_int_cst (TREE_TYPE (v), 1));
3822 t = fold_build2_loc (clause_loc, MULT_EXPR,
3823 TREE_TYPE (v), t,
3824 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3825 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3826 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3829 tree ptype = build_pointer_type (TREE_TYPE (type));
3830 x = fold_convert_loc (clause_loc, ptype, x);
3831 tree y = create_tmp_var (ptype, name);
3832 gimplify_assign (y, x, ilist);
3833 x = y;
3834 tree yb = y;
3836 if (!integer_zerop (bias))
3838 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3839 bias);
3840 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3842 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3843 pointer_sized_int_node, yb, bias);
3844 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3845 yb = create_tmp_var (ptype, name);
3846 gimplify_assign (yb, x, ilist);
3847 x = yb;
3850 d = TREE_OPERAND (d, 0);
3851 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3852 d = TREE_OPERAND (d, 0);
3853 if (TREE_CODE (d) == ADDR_EXPR)
3855 if (orig_var != var)
3857 gcc_assert (is_variable_sized (orig_var));
3858 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3860 gimplify_assign (new_var, x, ilist);
3861 tree new_orig_var = lookup_decl (orig_var, ctx);
3862 tree t = build_fold_indirect_ref (new_var);
3863 DECL_IGNORED_P (new_var) = 0;
3864 TREE_THIS_NOTRAP (t);
3865 SET_DECL_VALUE_EXPR (new_orig_var, t);
3866 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3868 else
3870 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3871 build_int_cst (ptype, 0));
3872 SET_DECL_VALUE_EXPR (new_var, x);
3873 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3876 else
3878 gcc_assert (orig_var == var);
3879 if (TREE_CODE (d) == INDIRECT_REF)
3881 x = create_tmp_var (ptype, name);
3882 TREE_ADDRESSABLE (x) = 1;
3883 gimplify_assign (x, yb, ilist);
3884 x = build_fold_addr_expr_loc (clause_loc, x);
3886 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3887 gimplify_assign (new_var, x, ilist);
3889 tree y1 = create_tmp_var (ptype, NULL);
3890 gimplify_assign (y1, y, ilist);
3891 tree i2 = NULL_TREE, y2 = NULL_TREE;
3892 tree body2 = NULL_TREE, end2 = NULL_TREE;
3893 tree y3 = NULL_TREE, y4 = NULL_TREE;
3894 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3896 y2 = create_tmp_var (ptype, NULL);
3897 gimplify_assign (y2, y, ilist);
3898 tree ref = build_outer_var_ref (var, ctx);
3899 /* For ref build_outer_var_ref already performs this. */
3900 if (TREE_CODE (d) == INDIRECT_REF)
3901 gcc_assert (omp_is_reference (var));
3902 else if (TREE_CODE (d) == ADDR_EXPR)
3903 ref = build_fold_addr_expr (ref);
3904 else if (omp_is_reference (var))
3905 ref = build_fold_addr_expr (ref);
3906 ref = fold_convert_loc (clause_loc, ptype, ref);
3907 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3908 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3910 y3 = create_tmp_var (ptype, NULL);
3911 gimplify_assign (y3, unshare_expr (ref), ilist);
3913 if (is_simd)
3915 y4 = create_tmp_var (ptype, NULL);
3916 gimplify_assign (y4, ref, dlist);
3919 tree i = create_tmp_var (TREE_TYPE (v), NULL);
3920 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3921 tree body = create_artificial_label (UNKNOWN_LOCATION);
3922 tree end = create_artificial_label (UNKNOWN_LOCATION);
3923 gimple_seq_add_stmt (ilist, gimple_build_label (body));
3924 if (y2)
3926 i2 = create_tmp_var (TREE_TYPE (v), NULL);
3927 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3928 body2 = create_artificial_label (UNKNOWN_LOCATION);
3929 end2 = create_artificial_label (UNKNOWN_LOCATION);
3930 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3932 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3934 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3935 tree decl_placeholder
3936 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3937 SET_DECL_VALUE_EXPR (decl_placeholder,
3938 build_simple_mem_ref (y1));
3939 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3940 SET_DECL_VALUE_EXPR (placeholder,
3941 y3 ? build_simple_mem_ref (y3)
3942 : error_mark_node);
3943 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3944 x = lang_hooks.decls.omp_clause_default_ctor
3945 (c, build_simple_mem_ref (y1),
3946 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3947 if (x)
3948 gimplify_and_add (x, ilist);
3949 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3951 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3952 lower_omp (&tseq, ctx);
3953 gimple_seq_add_seq (ilist, tseq);
3955 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3956 if (is_simd)
3958 SET_DECL_VALUE_EXPR (decl_placeholder,
3959 build_simple_mem_ref (y2));
3960 SET_DECL_VALUE_EXPR (placeholder,
3961 build_simple_mem_ref (y4));
3962 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3963 lower_omp (&tseq, ctx);
3964 gimple_seq_add_seq (dlist, tseq);
3965 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3967 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3968 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3969 x = lang_hooks.decls.omp_clause_dtor
3970 (c, build_simple_mem_ref (y2));
3971 if (x)
3973 gimple_seq tseq = NULL;
3974 dtor = x;
3975 gimplify_stmt (&dtor, &tseq);
3976 gimple_seq_add_seq (dlist, tseq);
3979 else
3981 x = omp_reduction_init (c, TREE_TYPE (type));
3982 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3984 /* reduction(-:var) sums up the partial results, so it
3985 acts identically to reduction(+:var). */
3986 if (code == MINUS_EXPR)
3987 code = PLUS_EXPR;
3989 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
3990 if (is_simd)
3992 x = build2 (code, TREE_TYPE (type),
3993 build_simple_mem_ref (y4),
3994 build_simple_mem_ref (y2));
3995 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
3998 gimple *g
3999 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4000 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4001 gimple_seq_add_stmt (ilist, g);
4002 if (y3)
4004 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4005 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4006 gimple_seq_add_stmt (ilist, g);
4008 g = gimple_build_assign (i, PLUS_EXPR, i,
4009 build_int_cst (TREE_TYPE (i), 1));
4010 gimple_seq_add_stmt (ilist, g);
4011 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4012 gimple_seq_add_stmt (ilist, g);
4013 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4014 if (y2)
4016 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4017 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4018 gimple_seq_add_stmt (dlist, g);
4019 if (y4)
4021 g = gimple_build_assign
4022 (y4, POINTER_PLUS_EXPR, y4,
4023 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4024 gimple_seq_add_stmt (dlist, g);
4026 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4027 build_int_cst (TREE_TYPE (i2), 1));
4028 gimple_seq_add_stmt (dlist, g);
4029 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4030 gimple_seq_add_stmt (dlist, g);
4031 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4033 continue;
4035 else if (is_variable_sized (var))
4037 /* For variable sized types, we need to allocate the
4038 actual storage here. Call alloca and store the
4039 result in the pointer decl that we created elsewhere. */
4040 if (pass == 0)
4041 continue;
4043 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4045 gcall *stmt;
4046 tree tmp, atmp;
4048 ptr = DECL_VALUE_EXPR (new_var);
4049 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4050 ptr = TREE_OPERAND (ptr, 0);
4051 gcc_assert (DECL_P (ptr));
4052 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4054 /* void *tmp = __builtin_alloca */
4055 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4056 stmt = gimple_build_call (atmp, 2, x,
4057 size_int (DECL_ALIGN (var)));
4058 tmp = create_tmp_var_raw (ptr_type_node);
4059 gimple_add_tmp_var (tmp);
4060 gimple_call_set_lhs (stmt, tmp);
4062 gimple_seq_add_stmt (ilist, stmt);
4064 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4065 gimplify_assign (ptr, x, ilist);
4068 else if (omp_is_reference (var))
4070 /* For references that are being privatized for Fortran,
4071 allocate new backing storage for the new pointer
4072 variable. This allows us to avoid changing all the
4073 code that expects a pointer to something that expects
4074 a direct variable. */
4075 if (pass == 0)
4076 continue;
4078 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4079 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4081 x = build_receiver_ref (var, false, ctx);
4082 x = build_fold_addr_expr_loc (clause_loc, x);
4084 else if (TREE_CONSTANT (x))
4086 /* For reduction in SIMD loop, defer adding the
4087 initialization of the reference, because if we decide
4088 to use SIMD array for it, the initilization could cause
4089 expansion ICE. */
4090 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4091 x = NULL_TREE;
4092 else
4094 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4095 get_name (var));
4096 gimple_add_tmp_var (x);
4097 TREE_ADDRESSABLE (x) = 1;
4098 x = build_fold_addr_expr_loc (clause_loc, x);
4101 else
4103 tree atmp
4104 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4105 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4106 tree al = size_int (TYPE_ALIGN (rtype));
4107 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4110 if (x)
4112 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4113 gimplify_assign (new_var, x, ilist);
4116 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4118 else if (c_kind == OMP_CLAUSE_REDUCTION
4119 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4121 if (pass == 0)
4122 continue;
4124 else if (pass != 0)
4125 continue;
4127 switch (OMP_CLAUSE_CODE (c))
4129 case OMP_CLAUSE_SHARED:
4130 /* Ignore shared directives in teams construct. */
4131 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4132 continue;
4133 /* Shared global vars are just accessed directly. */
4134 if (is_global_var (new_var))
4135 break;
4136 /* For taskloop firstprivate/lastprivate, represented
4137 as firstprivate and shared clause on the task, new_var
4138 is the firstprivate var. */
4139 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4140 break;
4141 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4142 needs to be delayed until after fixup_child_record_type so
4143 that we get the correct type during the dereference. */
4144 by_ref = use_pointer_for_field (var, ctx);
4145 x = build_receiver_ref (var, by_ref, ctx);
4146 SET_DECL_VALUE_EXPR (new_var, x);
4147 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4149 /* ??? If VAR is not passed by reference, and the variable
4150 hasn't been initialized yet, then we'll get a warning for
4151 the store into the omp_data_s structure. Ideally, we'd be
4152 able to notice this and not store anything at all, but
4153 we're generating code too early. Suppress the warning. */
4154 if (!by_ref)
4155 TREE_NO_WARNING (var) = 1;
4156 break;
4158 case OMP_CLAUSE_LASTPRIVATE:
4159 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4160 break;
4161 /* FALLTHRU */
4163 case OMP_CLAUSE_PRIVATE:
4164 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4165 x = build_outer_var_ref (var, ctx);
4166 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4168 if (is_task_ctx (ctx))
4169 x = build_receiver_ref (var, false, ctx);
4170 else
4171 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4173 else
4174 x = NULL;
4175 do_private:
4176 tree nx;
4177 nx = lang_hooks.decls.omp_clause_default_ctor
4178 (c, unshare_expr (new_var), x);
4179 if (is_simd)
4181 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4182 if ((TREE_ADDRESSABLE (new_var) || nx || y
4183 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4184 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4185 ivar, lvar))
4187 if (nx)
4188 x = lang_hooks.decls.omp_clause_default_ctor
4189 (c, unshare_expr (ivar), x);
4190 if (nx && x)
4191 gimplify_and_add (x, &llist[0]);
4192 if (y)
4194 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4195 if (y)
4197 gimple_seq tseq = NULL;
4199 dtor = y;
4200 gimplify_stmt (&dtor, &tseq);
4201 gimple_seq_add_seq (&llist[1], tseq);
4204 break;
4207 if (nx)
4208 gimplify_and_add (nx, ilist);
4209 /* FALLTHRU */
4211 do_dtor:
4212 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4213 if (x)
4215 gimple_seq tseq = NULL;
4217 dtor = x;
4218 gimplify_stmt (&dtor, &tseq);
4219 gimple_seq_add_seq (dlist, tseq);
4221 break;
4223 case OMP_CLAUSE_LINEAR:
4224 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4225 goto do_firstprivate;
4226 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4227 x = NULL;
4228 else
4229 x = build_outer_var_ref (var, ctx);
4230 goto do_private;
4232 case OMP_CLAUSE_FIRSTPRIVATE:
4233 if (is_task_ctx (ctx))
4235 if (omp_is_reference (var) || is_variable_sized (var))
4236 goto do_dtor;
4237 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4238 ctx))
4239 || use_pointer_for_field (var, NULL))
4241 x = build_receiver_ref (var, false, ctx);
4242 SET_DECL_VALUE_EXPR (new_var, x);
4243 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4244 goto do_dtor;
4247 do_firstprivate:
4248 x = build_outer_var_ref (var, ctx);
4249 if (is_simd)
4251 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4252 && gimple_omp_for_combined_into_p (ctx->stmt))
4254 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4255 tree stept = TREE_TYPE (t);
4256 tree ct = omp_find_clause (clauses,
4257 OMP_CLAUSE__LOOPTEMP_);
4258 gcc_assert (ct);
4259 tree l = OMP_CLAUSE_DECL (ct);
4260 tree n1 = fd->loop.n1;
4261 tree step = fd->loop.step;
4262 tree itype = TREE_TYPE (l);
4263 if (POINTER_TYPE_P (itype))
4264 itype = signed_type_for (itype);
4265 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4266 if (TYPE_UNSIGNED (itype)
4267 && fd->loop.cond_code == GT_EXPR)
4268 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4269 fold_build1 (NEGATE_EXPR, itype, l),
4270 fold_build1 (NEGATE_EXPR,
4271 itype, step));
4272 else
4273 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4274 t = fold_build2 (MULT_EXPR, stept,
4275 fold_convert (stept, l), t);
4277 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4279 x = lang_hooks.decls.omp_clause_linear_ctor
4280 (c, new_var, x, t);
4281 gimplify_and_add (x, ilist);
4282 goto do_dtor;
4285 if (POINTER_TYPE_P (TREE_TYPE (x)))
4286 x = fold_build2 (POINTER_PLUS_EXPR,
4287 TREE_TYPE (x), x, t);
4288 else
4289 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4292 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4293 || TREE_ADDRESSABLE (new_var))
4294 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4295 ivar, lvar))
4297 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4299 tree iv = create_tmp_var (TREE_TYPE (new_var));
4300 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4301 gimplify_and_add (x, ilist);
4302 gimple_stmt_iterator gsi
4303 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4304 gassign *g
4305 = gimple_build_assign (unshare_expr (lvar), iv);
4306 gsi_insert_before_without_update (&gsi, g,
4307 GSI_SAME_STMT);
4308 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4309 enum tree_code code = PLUS_EXPR;
4310 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4311 code = POINTER_PLUS_EXPR;
4312 g = gimple_build_assign (iv, code, iv, t);
4313 gsi_insert_before_without_update (&gsi, g,
4314 GSI_SAME_STMT);
4315 break;
4317 x = lang_hooks.decls.omp_clause_copy_ctor
4318 (c, unshare_expr (ivar), x);
4319 gimplify_and_add (x, &llist[0]);
4320 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4321 if (x)
4323 gimple_seq tseq = NULL;
4325 dtor = x;
4326 gimplify_stmt (&dtor, &tseq);
4327 gimple_seq_add_seq (&llist[1], tseq);
4329 break;
4332 x = lang_hooks.decls.omp_clause_copy_ctor
4333 (c, unshare_expr (new_var), x);
4334 gimplify_and_add (x, ilist);
4335 goto do_dtor;
4337 case OMP_CLAUSE__LOOPTEMP_:
4338 gcc_assert (is_taskreg_ctx (ctx));
4339 x = build_outer_var_ref (var, ctx);
4340 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4341 gimplify_and_add (x, ilist);
4342 break;
4344 case OMP_CLAUSE_COPYIN:
4345 by_ref = use_pointer_for_field (var, NULL);
4346 x = build_receiver_ref (var, by_ref, ctx);
4347 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4348 append_to_statement_list (x, &copyin_seq);
4349 copyin_by_ref |= by_ref;
4350 break;
4352 case OMP_CLAUSE_REDUCTION:
4353 /* OpenACC reductions are initialized using the
4354 GOACC_REDUCTION internal function. */
4355 if (is_gimple_omp_oacc (ctx->stmt))
4356 break;
4357 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4359 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4360 gimple *tseq;
4361 x = build_outer_var_ref (var, ctx);
4363 if (omp_is_reference (var)
4364 && !useless_type_conversion_p (TREE_TYPE (placeholder),
4365 TREE_TYPE (x)))
4366 x = build_fold_addr_expr_loc (clause_loc, x);
4367 SET_DECL_VALUE_EXPR (placeholder, x);
4368 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4369 tree new_vard = new_var;
4370 if (omp_is_reference (var))
4372 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4373 new_vard = TREE_OPERAND (new_var, 0);
4374 gcc_assert (DECL_P (new_vard));
4376 if (is_simd
4377 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4378 ivar, lvar))
4380 if (new_vard == new_var)
4382 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4383 SET_DECL_VALUE_EXPR (new_var, ivar);
4385 else
4387 SET_DECL_VALUE_EXPR (new_vard,
4388 build_fold_addr_expr (ivar));
4389 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4391 x = lang_hooks.decls.omp_clause_default_ctor
4392 (c, unshare_expr (ivar),
4393 build_outer_var_ref (var, ctx));
4394 if (x)
4395 gimplify_and_add (x, &llist[0]);
4396 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4398 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4399 lower_omp (&tseq, ctx);
4400 gimple_seq_add_seq (&llist[0], tseq);
4402 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4403 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4404 lower_omp (&tseq, ctx);
4405 gimple_seq_add_seq (&llist[1], tseq);
4406 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4407 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4408 if (new_vard == new_var)
4409 SET_DECL_VALUE_EXPR (new_var, lvar);
4410 else
4411 SET_DECL_VALUE_EXPR (new_vard,
4412 build_fold_addr_expr (lvar));
4413 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4414 if (x)
4416 tseq = NULL;
4417 dtor = x;
4418 gimplify_stmt (&dtor, &tseq);
4419 gimple_seq_add_seq (&llist[1], tseq);
4421 break;
4423 /* If this is a reference to constant size reduction var
4424 with placeholder, we haven't emitted the initializer
4425 for it because it is undesirable if SIMD arrays are used.
4426 But if they aren't used, we need to emit the deferred
4427 initialization now. */
4428 else if (omp_is_reference (var) && is_simd)
4429 handle_simd_reference (clause_loc, new_vard, ilist);
4430 x = lang_hooks.decls.omp_clause_default_ctor
4431 (c, unshare_expr (new_var),
4432 build_outer_var_ref (var, ctx));
4433 if (x)
4434 gimplify_and_add (x, ilist);
4435 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4437 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4438 lower_omp (&tseq, ctx);
4439 gimple_seq_add_seq (ilist, tseq);
4441 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4442 if (is_simd)
4444 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4445 lower_omp (&tseq, ctx);
4446 gimple_seq_add_seq (dlist, tseq);
4447 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4449 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4450 goto do_dtor;
4452 else
4454 x = omp_reduction_init (c, TREE_TYPE (new_var));
4455 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4456 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4458 /* reduction(-:var) sums up the partial results, so it
4459 acts identically to reduction(+:var). */
4460 if (code == MINUS_EXPR)
4461 code = PLUS_EXPR;
4463 tree new_vard = new_var;
4464 if (is_simd && omp_is_reference (var))
4466 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4467 new_vard = TREE_OPERAND (new_var, 0);
4468 gcc_assert (DECL_P (new_vard));
4470 if (is_simd
4471 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4472 ivar, lvar))
4474 tree ref = build_outer_var_ref (var, ctx);
4476 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4478 if (sctx.is_simt)
4480 if (!simt_lane)
4481 simt_lane = create_tmp_var (unsigned_type_node);
4482 x = build_call_expr_internal_loc
4483 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4484 TREE_TYPE (ivar), 2, ivar, simt_lane);
4485 x = build2 (code, TREE_TYPE (ivar), ivar, x);
4486 gimplify_assign (ivar, x, &llist[2]);
4488 x = build2 (code, TREE_TYPE (ref), ref, ivar);
4489 ref = build_outer_var_ref (var, ctx);
4490 gimplify_assign (ref, x, &llist[1]);
4492 if (new_vard != new_var)
4494 SET_DECL_VALUE_EXPR (new_vard,
4495 build_fold_addr_expr (lvar));
4496 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4499 else
4501 if (omp_is_reference (var) && is_simd)
4502 handle_simd_reference (clause_loc, new_vard, ilist);
4503 gimplify_assign (new_var, x, ilist);
4504 if (is_simd)
4506 tree ref = build_outer_var_ref (var, ctx);
4508 x = build2 (code, TREE_TYPE (ref), ref, new_var);
4509 ref = build_outer_var_ref (var, ctx);
4510 gimplify_assign (ref, x, dlist);
4514 break;
4516 default:
4517 gcc_unreachable ();
4522 if (sctx.max_vf == 1)
4523 sctx.is_simt = false;
4525 if (sctx.lane || sctx.is_simt)
4527 uid = create_tmp_var (ptr_type_node, "simduid");
4528 /* Don't want uninit warnings on simduid, it is always uninitialized,
4529 but we use it not for the value, but for the DECL_UID only. */
4530 TREE_NO_WARNING (uid) = 1;
4531 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4532 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4533 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4534 gimple_omp_for_set_clauses (ctx->stmt, c);
4536 /* Emit calls denoting privatized variables and initializing a pointer to
4537 structure that holds private variables as fields after ompdevlow pass. */
4538 if (sctx.is_simt)
4540 sctx.simt_eargs[0] = uid;
4541 gimple *g
4542 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4543 gimple_call_set_lhs (g, uid);
4544 gimple_seq_add_stmt (ilist, g);
4545 sctx.simt_eargs.release ();
4547 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4548 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4549 gimple_call_set_lhs (g, simtrec);
4550 gimple_seq_add_stmt (ilist, g);
4552 if (sctx.lane)
4554 gimple *g
4555 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4556 gimple_call_set_lhs (g, sctx.lane);
4557 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4558 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4559 g = gimple_build_assign (sctx.lane, INTEGER_CST,
4560 build_int_cst (unsigned_type_node, 0));
4561 gimple_seq_add_stmt (ilist, g);
4562 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4563 if (llist[2])
4565 tree simt_vf = create_tmp_var (unsigned_type_node);
4566 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4567 gimple_call_set_lhs (g, simt_vf);
4568 gimple_seq_add_stmt (dlist, g);
4570 tree t = build_int_cst (unsigned_type_node, 1);
4571 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4572 gimple_seq_add_stmt (dlist, g);
4574 t = build_int_cst (unsigned_type_node, 0);
4575 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4576 gimple_seq_add_stmt (dlist, g);
4578 tree body = create_artificial_label (UNKNOWN_LOCATION);
4579 tree header = create_artificial_label (UNKNOWN_LOCATION);
4580 tree end = create_artificial_label (UNKNOWN_LOCATION);
4581 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4582 gimple_seq_add_stmt (dlist, gimple_build_label (body));
4584 gimple_seq_add_seq (dlist, llist[2]);
4586 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4587 gimple_seq_add_stmt (dlist, g);
4589 gimple_seq_add_stmt (dlist, gimple_build_label (header));
4590 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4591 gimple_seq_add_stmt (dlist, g);
4593 gimple_seq_add_stmt (dlist, gimple_build_label (end));
4595 for (int i = 0; i < 2; i++)
4596 if (llist[i])
4598 tree vf = create_tmp_var (unsigned_type_node);
4599 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4600 gimple_call_set_lhs (g, vf);
4601 gimple_seq *seq = i == 0 ? ilist : dlist;
4602 gimple_seq_add_stmt (seq, g);
4603 tree t = build_int_cst (unsigned_type_node, 0);
4604 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4605 gimple_seq_add_stmt (seq, g);
4606 tree body = create_artificial_label (UNKNOWN_LOCATION);
4607 tree header = create_artificial_label (UNKNOWN_LOCATION);
4608 tree end = create_artificial_label (UNKNOWN_LOCATION);
4609 gimple_seq_add_stmt (seq, gimple_build_goto (header));
4610 gimple_seq_add_stmt (seq, gimple_build_label (body));
4611 gimple_seq_add_seq (seq, llist[i]);
4612 t = build_int_cst (unsigned_type_node, 1);
4613 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
4614 gimple_seq_add_stmt (seq, g);
4615 gimple_seq_add_stmt (seq, gimple_build_label (header));
4616 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
4617 gimple_seq_add_stmt (seq, g);
4618 gimple_seq_add_stmt (seq, gimple_build_label (end));
4621 if (sctx.is_simt)
4623 gimple_seq_add_seq (dlist, sctx.simt_dlist);
4624 gimple *g
4625 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4626 gimple_seq_add_stmt (dlist, g);
4629 /* The copyin sequence is not to be executed by the main thread, since
4630 that would result in self-copies. Perhaps not visible to scalars,
4631 but it certainly is to C++ operator=. */
4632 if (copyin_seq)
4634 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4636 x = build2 (NE_EXPR, boolean_type_node, x,
4637 build_int_cst (TREE_TYPE (x), 0));
4638 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4639 gimplify_and_add (x, ilist);
4642 /* If any copyin variable is passed by reference, we must ensure the
4643 master thread doesn't modify it before it is copied over in all
4644 threads. Similarly for variables in both firstprivate and
4645 lastprivate clauses we need to ensure the lastprivate copying
4646 happens after firstprivate copying in all threads. And similarly
4647 for UDRs if initializer expression refers to omp_orig. */
4648 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4650 /* Don't add any barrier for #pragma omp simd or
4651 #pragma omp distribute. */
4652 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4653 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4654 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4657 /* If max_vf is non-zero, then we can use only a vectorization factor
4658 up to the max_vf we chose. So stick it into the safelen clause. */
4659 if (sctx.max_vf)
4661 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4662 OMP_CLAUSE_SAFELEN);
4663 if (c == NULL_TREE
4664 || (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) == INTEGER_CST
4665 && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
4666 sctx.max_vf) == 1))
4668 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4669 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4670 sctx.max_vf);
4671 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4672 gimple_omp_for_set_clauses (ctx->stmt, c);
4678 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4679 both parallel and workshare constructs. PREDICATE may be NULL if it's
4680 always true. */
4682 static void
4683 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4684 omp_context *ctx)
4686 tree x, c, label = NULL, orig_clauses = clauses;
4687 bool par_clauses = false;
4688 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4690 /* Early exit if there are no lastprivate or linear clauses. */
4691 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4692 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4693 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4694 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4695 break;
4696 if (clauses == NULL)
4698 /* If this was a workshare clause, see if it had been combined
4699 with its parallel. In that case, look for the clauses on the
4700 parallel statement itself. */
4701 if (is_parallel_ctx (ctx))
4702 return;
4704 ctx = ctx->outer;
4705 if (ctx == NULL || !is_parallel_ctx (ctx))
4706 return;
4708 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4709 OMP_CLAUSE_LASTPRIVATE);
4710 if (clauses == NULL)
4711 return;
4712 par_clauses = true;
4715 bool maybe_simt = false;
4716 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4717 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4719 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4720 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4721 if (simduid)
4722 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4725 if (predicate)
4727 gcond *stmt;
4728 tree label_true, arm1, arm2;
4729 enum tree_code pred_code = TREE_CODE (predicate);
4731 label = create_artificial_label (UNKNOWN_LOCATION);
4732 label_true = create_artificial_label (UNKNOWN_LOCATION);
4733 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4735 arm1 = TREE_OPERAND (predicate, 0);
4736 arm2 = TREE_OPERAND (predicate, 1);
4737 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4738 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4740 else
4742 arm1 = predicate;
4743 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4744 arm2 = boolean_false_node;
4745 pred_code = NE_EXPR;
4747 if (maybe_simt)
4749 c = build2 (pred_code, boolean_type_node, arm1, arm2);
4750 c = fold_convert (integer_type_node, c);
4751 simtcond = create_tmp_var (integer_type_node);
4752 gimplify_assign (simtcond, c, stmt_list);
4753 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4754 1, simtcond);
4755 c = create_tmp_var (integer_type_node);
4756 gimple_call_set_lhs (g, c);
4757 gimple_seq_add_stmt (stmt_list, g);
4758 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4759 label_true, label);
4761 else
4762 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4763 gimple_seq_add_stmt (stmt_list, stmt);
4764 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4767 for (c = clauses; c ;)
4769 tree var, new_var;
4770 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4772 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4773 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4774 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4776 var = OMP_CLAUSE_DECL (c);
4777 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4778 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4779 && is_taskloop_ctx (ctx))
4781 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4782 new_var = lookup_decl (var, ctx->outer);
4784 else
4786 new_var = lookup_decl (var, ctx);
4787 /* Avoid uninitialized warnings for lastprivate and
4788 for linear iterators. */
4789 if (predicate
4790 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4791 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4792 TREE_NO_WARNING (new_var) = 1;
4795 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4797 tree val = DECL_VALUE_EXPR (new_var);
4798 if (TREE_CODE (val) == ARRAY_REF
4799 && VAR_P (TREE_OPERAND (val, 0))
4800 && lookup_attribute ("omp simd array",
4801 DECL_ATTRIBUTES (TREE_OPERAND (val,
4802 0))))
4804 if (lastlane == NULL)
4806 lastlane = create_tmp_var (unsigned_type_node);
4807 gcall *g
4808 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4809 2, simduid,
4810 TREE_OPERAND (val, 1));
4811 gimple_call_set_lhs (g, lastlane);
4812 gimple_seq_add_stmt (stmt_list, g);
4814 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4815 TREE_OPERAND (val, 0), lastlane,
4816 NULL_TREE, NULL_TREE);
4819 else if (maybe_simt)
4821 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
4822 ? DECL_VALUE_EXPR (new_var)
4823 : new_var);
4824 if (simtlast == NULL)
4826 simtlast = create_tmp_var (unsigned_type_node);
4827 gcall *g = gimple_build_call_internal
4828 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4829 gimple_call_set_lhs (g, simtlast);
4830 gimple_seq_add_stmt (stmt_list, g);
4832 x = build_call_expr_internal_loc
4833 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4834 TREE_TYPE (val), 2, val, simtlast);
4835 new_var = unshare_expr (new_var);
4836 gimplify_assign (new_var, x, stmt_list);
4837 new_var = unshare_expr (new_var);
4840 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4841 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4843 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4844 gimple_seq_add_seq (stmt_list,
4845 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4846 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4848 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4849 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4851 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4852 gimple_seq_add_seq (stmt_list,
4853 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4854 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4857 x = NULL_TREE;
4858 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4859 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4861 gcc_checking_assert (is_taskloop_ctx (ctx));
4862 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4863 ctx->outer->outer);
4864 if (is_global_var (ovar))
4865 x = ovar;
4867 if (!x)
4868 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4869 if (omp_is_reference (var))
4870 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4871 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4872 gimplify_and_add (x, stmt_list);
4874 c = OMP_CLAUSE_CHAIN (c);
4875 if (c == NULL && !par_clauses)
4877 /* If this was a workshare clause, see if it had been combined
4878 with its parallel. In that case, continue looking for the
4879 clauses also on the parallel statement itself. */
4880 if (is_parallel_ctx (ctx))
4881 break;
4883 ctx = ctx->outer;
4884 if (ctx == NULL || !is_parallel_ctx (ctx))
4885 break;
4887 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4888 OMP_CLAUSE_LASTPRIVATE);
4889 par_clauses = true;
4893 if (label)
4894 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4897 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4898 (which might be a placeholder). INNER is true if this is an inner
4899 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4900 join markers. Generate the before-loop forking sequence in
4901 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4902 general form of these sequences is
4904 GOACC_REDUCTION_SETUP
4905 GOACC_FORK
4906 GOACC_REDUCTION_INIT
4908 GOACC_REDUCTION_FINI
4909 GOACC_JOIN
4910 GOACC_REDUCTION_TEARDOWN. */
4912 static void
4913 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4914 gcall *fork, gcall *join, gimple_seq *fork_seq,
4915 gimple_seq *join_seq, omp_context *ctx)
4917 gimple_seq before_fork = NULL;
4918 gimple_seq after_fork = NULL;
4919 gimple_seq before_join = NULL;
4920 gimple_seq after_join = NULL;
4921 tree init_code = NULL_TREE, fini_code = NULL_TREE,
4922 setup_code = NULL_TREE, teardown_code = NULL_TREE;
4923 unsigned offset = 0;
4925 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4926 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4928 tree orig = OMP_CLAUSE_DECL (c);
4929 tree var = maybe_lookup_decl (orig, ctx);
4930 tree ref_to_res = NULL_TREE;
4931 tree incoming, outgoing, v1, v2, v3;
4932 bool is_private = false;
4934 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4935 if (rcode == MINUS_EXPR)
4936 rcode = PLUS_EXPR;
4937 else if (rcode == TRUTH_ANDIF_EXPR)
4938 rcode = BIT_AND_EXPR;
4939 else if (rcode == TRUTH_ORIF_EXPR)
4940 rcode = BIT_IOR_EXPR;
4941 tree op = build_int_cst (unsigned_type_node, rcode);
4943 if (!var)
4944 var = orig;
4946 incoming = outgoing = var;
4948 if (!inner)
4950 /* See if an outer construct also reduces this variable. */
4951 omp_context *outer = ctx;
4953 while (omp_context *probe = outer->outer)
4955 enum gimple_code type = gimple_code (probe->stmt);
4956 tree cls;
4958 switch (type)
4960 case GIMPLE_OMP_FOR:
4961 cls = gimple_omp_for_clauses (probe->stmt);
4962 break;
4964 case GIMPLE_OMP_TARGET:
4965 if (gimple_omp_target_kind (probe->stmt)
4966 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
4967 goto do_lookup;
4969 cls = gimple_omp_target_clauses (probe->stmt);
4970 break;
4972 default:
4973 goto do_lookup;
4976 outer = probe;
4977 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
4978 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4979 && orig == OMP_CLAUSE_DECL (cls))
4981 incoming = outgoing = lookup_decl (orig, probe);
4982 goto has_outer_reduction;
4984 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4985 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4986 && orig == OMP_CLAUSE_DECL (cls))
4988 is_private = true;
4989 goto do_lookup;
4993 do_lookup:
4994 /* This is the outermost construct with this reduction,
4995 see if there's a mapping for it. */
4996 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
4997 && maybe_lookup_field (orig, outer) && !is_private)
4999 ref_to_res = build_receiver_ref (orig, false, outer);
5000 if (omp_is_reference (orig))
5001 ref_to_res = build_simple_mem_ref (ref_to_res);
5003 tree type = TREE_TYPE (var);
5004 if (POINTER_TYPE_P (type))
5005 type = TREE_TYPE (type);
5007 outgoing = var;
5008 incoming = omp_reduction_init_op (loc, rcode, type);
5010 else
5012 /* Try to look at enclosing contexts for reduction var,
5013 use original if no mapping found. */
5014 tree t = NULL_TREE;
5015 omp_context *c = ctx->outer;
5016 while (c && !t)
5018 t = maybe_lookup_decl (orig, c);
5019 c = c->outer;
5021 incoming = outgoing = (t ? t : orig);
5024 has_outer_reduction:;
5027 if (!ref_to_res)
5028 ref_to_res = integer_zero_node;
5030 if (omp_is_reference (orig))
5032 tree type = TREE_TYPE (var);
5033 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
5035 if (!inner)
5037 tree x = create_tmp_var (TREE_TYPE (type), id);
5038 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
5041 v1 = create_tmp_var (type, id);
5042 v2 = create_tmp_var (type, id);
5043 v3 = create_tmp_var (type, id);
5045 gimplify_assign (v1, var, fork_seq);
5046 gimplify_assign (v2, var, fork_seq);
5047 gimplify_assign (v3, var, fork_seq);
5049 var = build_simple_mem_ref (var);
5050 v1 = build_simple_mem_ref (v1);
5051 v2 = build_simple_mem_ref (v2);
5052 v3 = build_simple_mem_ref (v3);
5053 outgoing = build_simple_mem_ref (outgoing);
5055 if (!TREE_CONSTANT (incoming))
5056 incoming = build_simple_mem_ref (incoming);
5058 else
5059 v1 = v2 = v3 = var;
5061 /* Determine position in reduction buffer, which may be used
5062 by target. */
5063 machine_mode mode = TYPE_MODE (TREE_TYPE (var));
5064 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
5065 offset = (offset + align - 1) & ~(align - 1);
5066 tree off = build_int_cst (sizetype, offset);
5067 offset += GET_MODE_SIZE (mode);
5069 if (!init_code)
5071 init_code = build_int_cst (integer_type_node,
5072 IFN_GOACC_REDUCTION_INIT);
5073 fini_code = build_int_cst (integer_type_node,
5074 IFN_GOACC_REDUCTION_FINI);
5075 setup_code = build_int_cst (integer_type_node,
5076 IFN_GOACC_REDUCTION_SETUP);
5077 teardown_code = build_int_cst (integer_type_node,
5078 IFN_GOACC_REDUCTION_TEARDOWN);
5081 tree setup_call
5082 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5083 TREE_TYPE (var), 6, setup_code,
5084 unshare_expr (ref_to_res),
5085 incoming, level, op, off);
5086 tree init_call
5087 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5088 TREE_TYPE (var), 6, init_code,
5089 unshare_expr (ref_to_res),
5090 v1, level, op, off);
5091 tree fini_call
5092 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5093 TREE_TYPE (var), 6, fini_code,
5094 unshare_expr (ref_to_res),
5095 v2, level, op, off);
5096 tree teardown_call
5097 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5098 TREE_TYPE (var), 6, teardown_code,
5099 ref_to_res, v3, level, op, off);
5101 gimplify_assign (v1, setup_call, &before_fork);
5102 gimplify_assign (v2, init_call, &after_fork);
5103 gimplify_assign (v3, fini_call, &before_join);
5104 gimplify_assign (outgoing, teardown_call, &after_join);
5107 /* Now stitch things together. */
5108 gimple_seq_add_seq (fork_seq, before_fork);
5109 if (fork)
5110 gimple_seq_add_stmt (fork_seq, fork);
5111 gimple_seq_add_seq (fork_seq, after_fork);
5113 gimple_seq_add_seq (join_seq, before_join);
5114 if (join)
5115 gimple_seq_add_stmt (join_seq, join);
5116 gimple_seq_add_seq (join_seq, after_join);
5119 /* Generate code to implement the REDUCTION clauses. */
5121 static void
5122 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5124 gimple_seq sub_seq = NULL;
5125 gimple *stmt;
5126 tree x, c;
5127 int count = 0;
5129 /* OpenACC loop reductions are handled elsewhere. */
5130 if (is_gimple_omp_oacc (ctx->stmt))
5131 return;
5133 /* SIMD reductions are handled in lower_rec_input_clauses. */
5134 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5135 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5136 return;
5138 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5139 update in that case, otherwise use a lock. */
5140 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5141 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5143 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5144 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5146 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5147 count = -1;
5148 break;
5150 count++;
5153 if (count == 0)
5154 return;
5156 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5158 tree var, ref, new_var, orig_var;
5159 enum tree_code code;
5160 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5162 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5163 continue;
5165 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
5166 orig_var = var = OMP_CLAUSE_DECL (c);
5167 if (TREE_CODE (var) == MEM_REF)
5169 var = TREE_OPERAND (var, 0);
5170 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5171 var = TREE_OPERAND (var, 0);
5172 if (TREE_CODE (var) == ADDR_EXPR)
5173 var = TREE_OPERAND (var, 0);
5174 else
5176 /* If this is a pointer or referenced based array
5177 section, the var could be private in the outer
5178 context e.g. on orphaned loop construct. Pretend this
5179 is private variable's outer reference. */
5180 ccode = OMP_CLAUSE_PRIVATE;
5181 if (TREE_CODE (var) == INDIRECT_REF)
5182 var = TREE_OPERAND (var, 0);
5184 orig_var = var;
5185 if (is_variable_sized (var))
5187 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5188 var = DECL_VALUE_EXPR (var);
5189 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5190 var = TREE_OPERAND (var, 0);
5191 gcc_assert (DECL_P (var));
5194 new_var = lookup_decl (var, ctx);
5195 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5196 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5197 ref = build_outer_var_ref (var, ctx, ccode);
5198 code = OMP_CLAUSE_REDUCTION_CODE (c);
5200 /* reduction(-:var) sums up the partial results, so it acts
5201 identically to reduction(+:var). */
5202 if (code == MINUS_EXPR)
5203 code = PLUS_EXPR;
5205 if (count == 1)
5207 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5209 addr = save_expr (addr);
5210 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5211 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5212 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5213 gimplify_and_add (x, stmt_seqp);
5214 return;
5216 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5218 tree d = OMP_CLAUSE_DECL (c);
5219 tree type = TREE_TYPE (d);
5220 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5221 tree i = create_tmp_var (TREE_TYPE (v), NULL);
5222 tree ptype = build_pointer_type (TREE_TYPE (type));
5223 tree bias = TREE_OPERAND (d, 1);
5224 d = TREE_OPERAND (d, 0);
5225 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5227 tree b = TREE_OPERAND (d, 1);
5228 b = maybe_lookup_decl (b, ctx);
5229 if (b == NULL)
5231 b = TREE_OPERAND (d, 1);
5232 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5234 if (integer_zerop (bias))
5235 bias = b;
5236 else
5238 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5239 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5240 TREE_TYPE (b), b, bias);
5242 d = TREE_OPERAND (d, 0);
5244 /* For ref build_outer_var_ref already performs this, so
5245 only new_var needs a dereference. */
5246 if (TREE_CODE (d) == INDIRECT_REF)
5248 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5249 gcc_assert (omp_is_reference (var) && var == orig_var);
5251 else if (TREE_CODE (d) == ADDR_EXPR)
5253 if (orig_var == var)
5255 new_var = build_fold_addr_expr (new_var);
5256 ref = build_fold_addr_expr (ref);
5259 else
5261 gcc_assert (orig_var == var);
5262 if (omp_is_reference (var))
5263 ref = build_fold_addr_expr (ref);
5265 if (DECL_P (v))
5267 tree t = maybe_lookup_decl (v, ctx);
5268 if (t)
5269 v = t;
5270 else
5271 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5272 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5274 if (!integer_zerop (bias))
5276 bias = fold_convert_loc (clause_loc, sizetype, bias);
5277 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5278 TREE_TYPE (new_var), new_var,
5279 unshare_expr (bias));
5280 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5281 TREE_TYPE (ref), ref, bias);
5283 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5284 ref = fold_convert_loc (clause_loc, ptype, ref);
5285 tree m = create_tmp_var (ptype, NULL);
5286 gimplify_assign (m, new_var, stmt_seqp);
5287 new_var = m;
5288 m = create_tmp_var (ptype, NULL);
5289 gimplify_assign (m, ref, stmt_seqp);
5290 ref = m;
5291 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5292 tree body = create_artificial_label (UNKNOWN_LOCATION);
5293 tree end = create_artificial_label (UNKNOWN_LOCATION);
5294 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5295 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5296 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5297 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5299 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5300 tree decl_placeholder
5301 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5302 SET_DECL_VALUE_EXPR (placeholder, out);
5303 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5304 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5305 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5306 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5307 gimple_seq_add_seq (&sub_seq,
5308 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5309 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5310 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5311 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5313 else
5315 x = build2 (code, TREE_TYPE (out), out, priv);
5316 out = unshare_expr (out);
5317 gimplify_assign (out, x, &sub_seq);
5319 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5320 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5321 gimple_seq_add_stmt (&sub_seq, g);
5322 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5323 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5324 gimple_seq_add_stmt (&sub_seq, g);
5325 g = gimple_build_assign (i, PLUS_EXPR, i,
5326 build_int_cst (TREE_TYPE (i), 1));
5327 gimple_seq_add_stmt (&sub_seq, g);
5328 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5329 gimple_seq_add_stmt (&sub_seq, g);
5330 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5332 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5334 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5336 if (omp_is_reference (var)
5337 && !useless_type_conversion_p (TREE_TYPE (placeholder),
5338 TREE_TYPE (ref)))
5339 ref = build_fold_addr_expr_loc (clause_loc, ref);
5340 SET_DECL_VALUE_EXPR (placeholder, ref);
5341 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5342 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5343 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5344 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5345 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5347 else
5349 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5350 ref = build_outer_var_ref (var, ctx);
5351 gimplify_assign (ref, x, &sub_seq);
5355 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5357 gimple_seq_add_stmt (stmt_seqp, stmt);
5359 gimple_seq_add_seq (stmt_seqp, sub_seq);
5361 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5363 gimple_seq_add_stmt (stmt_seqp, stmt);
5367 /* Generate code to implement the COPYPRIVATE clauses. */
5369 static void
5370 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5371 omp_context *ctx)
5373 tree c;
5375 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5377 tree var, new_var, ref, x;
5378 bool by_ref;
5379 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5381 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5382 continue;
5384 var = OMP_CLAUSE_DECL (c);
5385 by_ref = use_pointer_for_field (var, NULL);
5387 ref = build_sender_ref (var, ctx);
5388 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5389 if (by_ref)
5391 x = build_fold_addr_expr_loc (clause_loc, new_var);
5392 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5394 gimplify_assign (ref, x, slist);
5396 ref = build_receiver_ref (var, false, ctx);
5397 if (by_ref)
5399 ref = fold_convert_loc (clause_loc,
5400 build_pointer_type (TREE_TYPE (new_var)),
5401 ref);
5402 ref = build_fold_indirect_ref_loc (clause_loc, ref);
5404 if (omp_is_reference (var))
5406 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5407 ref = build_simple_mem_ref_loc (clause_loc, ref);
5408 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5410 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5411 gimplify_and_add (x, rlist);
5416 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5417 and REDUCTION from the sender (aka parent) side. */
5419 static void
5420 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5421 omp_context *ctx)
5423 tree c, t;
5424 int ignored_looptemp = 0;
5425 bool is_taskloop = false;
5427 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5428 by GOMP_taskloop. */
5429 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5431 ignored_looptemp = 2;
5432 is_taskloop = true;
5435 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5437 tree val, ref, x, var;
5438 bool by_ref, do_in = false, do_out = false;
5439 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5441 switch (OMP_CLAUSE_CODE (c))
5443 case OMP_CLAUSE_PRIVATE:
5444 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5445 break;
5446 continue;
5447 case OMP_CLAUSE_FIRSTPRIVATE:
5448 case OMP_CLAUSE_COPYIN:
5449 case OMP_CLAUSE_LASTPRIVATE:
5450 case OMP_CLAUSE_REDUCTION:
5451 break;
5452 case OMP_CLAUSE_SHARED:
5453 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5454 break;
5455 continue;
5456 case OMP_CLAUSE__LOOPTEMP_:
5457 if (ignored_looptemp)
5459 ignored_looptemp--;
5460 continue;
5462 break;
5463 default:
5464 continue;
5467 val = OMP_CLAUSE_DECL (c);
5468 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5469 && TREE_CODE (val) == MEM_REF)
5471 val = TREE_OPERAND (val, 0);
5472 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5473 val = TREE_OPERAND (val, 0);
5474 if (TREE_CODE (val) == INDIRECT_REF
5475 || TREE_CODE (val) == ADDR_EXPR)
5476 val = TREE_OPERAND (val, 0);
5477 if (is_variable_sized (val))
5478 continue;
5481 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5482 outer taskloop region. */
5483 omp_context *ctx_for_o = ctx;
5484 if (is_taskloop
5485 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5486 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5487 ctx_for_o = ctx->outer;
5489 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5491 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5492 && is_global_var (var))
5493 continue;
5495 t = omp_member_access_dummy_var (var);
5496 if (t)
5498 var = DECL_VALUE_EXPR (var);
5499 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5500 if (o != t)
5501 var = unshare_and_remap (var, t, o);
5502 else
5503 var = unshare_expr (var);
5506 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5508 /* Handle taskloop firstprivate/lastprivate, where the
5509 lastprivate on GIMPLE_OMP_TASK is represented as
5510 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5511 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5512 x = omp_build_component_ref (ctx->sender_decl, f);
5513 if (use_pointer_for_field (val, ctx))
5514 var = build_fold_addr_expr (var);
5515 gimplify_assign (x, var, ilist);
5516 DECL_ABSTRACT_ORIGIN (f) = NULL;
5517 continue;
5520 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5521 || val == OMP_CLAUSE_DECL (c))
5522 && is_variable_sized (val))
5523 continue;
5524 by_ref = use_pointer_for_field (val, NULL);
5526 switch (OMP_CLAUSE_CODE (c))
5528 case OMP_CLAUSE_FIRSTPRIVATE:
5529 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5530 && !by_ref
5531 && is_task_ctx (ctx))
5532 TREE_NO_WARNING (var) = 1;
5533 do_in = true;
5534 break;
5536 case OMP_CLAUSE_PRIVATE:
5537 case OMP_CLAUSE_COPYIN:
5538 case OMP_CLAUSE__LOOPTEMP_:
5539 do_in = true;
5540 break;
5542 case OMP_CLAUSE_LASTPRIVATE:
5543 if (by_ref || omp_is_reference (val))
5545 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5546 continue;
5547 do_in = true;
5549 else
5551 do_out = true;
5552 if (lang_hooks.decls.omp_private_outer_ref (val))
5553 do_in = true;
5555 break;
5557 case OMP_CLAUSE_REDUCTION:
5558 do_in = true;
5559 if (val == OMP_CLAUSE_DECL (c))
5560 do_out = !(by_ref || omp_is_reference (val));
5561 else
5562 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5563 break;
5565 default:
5566 gcc_unreachable ();
5569 if (do_in)
5571 ref = build_sender_ref (val, ctx);
5572 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5573 gimplify_assign (ref, x, ilist);
5574 if (is_task_ctx (ctx))
5575 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5578 if (do_out)
5580 ref = build_sender_ref (val, ctx);
5581 gimplify_assign (var, ref, olist);
5586 /* Generate code to implement SHARED from the sender (aka parent)
5587 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5588 list things that got automatically shared. */
5590 static void
5591 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5593 tree var, ovar, nvar, t, f, x, record_type;
5595 if (ctx->record_type == NULL)
5596 return;
5598 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5599 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5601 ovar = DECL_ABSTRACT_ORIGIN (f);
5602 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5603 continue;
5605 nvar = maybe_lookup_decl (ovar, ctx);
5606 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5607 continue;
5609 /* If CTX is a nested parallel directive. Find the immediately
5610 enclosing parallel or workshare construct that contains a
5611 mapping for OVAR. */
5612 var = lookup_decl_in_outer_ctx (ovar, ctx);
5614 t = omp_member_access_dummy_var (var);
5615 if (t)
5617 var = DECL_VALUE_EXPR (var);
5618 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5619 if (o != t)
5620 var = unshare_and_remap (var, t, o);
5621 else
5622 var = unshare_expr (var);
5625 if (use_pointer_for_field (ovar, ctx))
5627 x = build_sender_ref (ovar, ctx);
5628 var = build_fold_addr_expr (var);
5629 gimplify_assign (x, var, ilist);
5631 else
5633 x = build_sender_ref (ovar, ctx);
5634 gimplify_assign (x, var, ilist);
5636 if (!TREE_READONLY (var)
5637 /* We don't need to receive a new reference to a result
5638 or parm decl. In fact we may not store to it as we will
5639 invalidate any pending RSO and generate wrong gimple
5640 during inlining. */
5641 && !((TREE_CODE (var) == RESULT_DECL
5642 || TREE_CODE (var) == PARM_DECL)
5643 && DECL_BY_REFERENCE (var)))
5645 x = build_sender_ref (ovar, ctx);
5646 gimplify_assign (var, x, olist);
5652 /* Emit an OpenACC head marker call, encapulating the partitioning and
5653 other information that must be processed by the target compiler.
5654 Return the maximum number of dimensions the associated loop might
5655 be partitioned over. */
5657 static unsigned
5658 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5659 gimple_seq *seq, omp_context *ctx)
5661 unsigned levels = 0;
5662 unsigned tag = 0;
5663 tree gang_static = NULL_TREE;
5664 auto_vec<tree, 5> args;
5666 args.quick_push (build_int_cst
5667 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5668 args.quick_push (ddvar);
5669 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5671 switch (OMP_CLAUSE_CODE (c))
5673 case OMP_CLAUSE_GANG:
5674 tag |= OLF_DIM_GANG;
5675 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5676 /* static:* is represented by -1, and we can ignore it, as
5677 scheduling is always static. */
5678 if (gang_static && integer_minus_onep (gang_static))
5679 gang_static = NULL_TREE;
5680 levels++;
5681 break;
5683 case OMP_CLAUSE_WORKER:
5684 tag |= OLF_DIM_WORKER;
5685 levels++;
5686 break;
5688 case OMP_CLAUSE_VECTOR:
5689 tag |= OLF_DIM_VECTOR;
5690 levels++;
5691 break;
5693 case OMP_CLAUSE_SEQ:
5694 tag |= OLF_SEQ;
5695 break;
5697 case OMP_CLAUSE_AUTO:
5698 tag |= OLF_AUTO;
5699 break;
5701 case OMP_CLAUSE_INDEPENDENT:
5702 tag |= OLF_INDEPENDENT;
5703 break;
5705 case OMP_CLAUSE_TILE:
5706 tag |= OLF_TILE;
5707 break;
5709 default:
5710 continue;
5714 if (gang_static)
5716 if (DECL_P (gang_static))
5717 gang_static = build_outer_var_ref (gang_static, ctx);
5718 tag |= OLF_GANG_STATIC;
5721 /* In a parallel region, loops are implicitly INDEPENDENT. */
5722 omp_context *tgt = enclosing_target_ctx (ctx);
5723 if (!tgt || is_oacc_parallel (tgt))
5724 tag |= OLF_INDEPENDENT;
5726 if (tag & OLF_TILE)
5727 /* Tiling could use all 3 levels. */
5728 levels = 3;
5729 else
5731 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5732 Ensure at least one level, or 2 for possible auto
5733 partitioning */
5734 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5735 << OLF_DIM_BASE) | OLF_SEQ));
5737 if (levels < 1u + maybe_auto)
5738 levels = 1u + maybe_auto;
5741 args.quick_push (build_int_cst (integer_type_node, levels));
5742 args.quick_push (build_int_cst (integer_type_node, tag));
5743 if (gang_static)
5744 args.quick_push (gang_static);
5746 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5747 gimple_set_location (call, loc);
5748 gimple_set_lhs (call, ddvar);
5749 gimple_seq_add_stmt (seq, call);
5751 return levels;
5754 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5755 partitioning level of the enclosed region. */
5757 static void
5758 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5759 tree tofollow, gimple_seq *seq)
5761 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5762 : IFN_UNIQUE_OACC_TAIL_MARK);
5763 tree marker = build_int_cst (integer_type_node, marker_kind);
5764 int nargs = 2 + (tofollow != NULL_TREE);
5765 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5766 marker, ddvar, tofollow);
5767 gimple_set_location (call, loc);
5768 gimple_set_lhs (call, ddvar);
5769 gimple_seq_add_stmt (seq, call);
5772 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5773 the loop clauses, from which we extract reductions. Initialize
5774 HEAD and TAIL. */
5776 static void
5777 lower_oacc_head_tail (location_t loc, tree clauses,
5778 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5780 bool inner = false;
5781 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5782 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5784 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5785 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5786 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5788 gcc_assert (count);
5789 for (unsigned done = 1; count; count--, done++)
5791 gimple_seq fork_seq = NULL;
5792 gimple_seq join_seq = NULL;
5794 tree place = build_int_cst (integer_type_node, -1);
5795 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5796 fork_kind, ddvar, place);
5797 gimple_set_location (fork, loc);
5798 gimple_set_lhs (fork, ddvar);
5800 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5801 join_kind, ddvar, place);
5802 gimple_set_location (join, loc);
5803 gimple_set_lhs (join, ddvar);
5805 /* Mark the beginning of this level sequence. */
5806 if (inner)
5807 lower_oacc_loop_marker (loc, ddvar, true,
5808 build_int_cst (integer_type_node, count),
5809 &fork_seq);
5810 lower_oacc_loop_marker (loc, ddvar, false,
5811 build_int_cst (integer_type_node, done),
5812 &join_seq);
5814 lower_oacc_reductions (loc, clauses, place, inner,
5815 fork, join, &fork_seq, &join_seq, ctx);
5817 /* Append this level to head. */
5818 gimple_seq_add_seq (head, fork_seq);
5819 /* Prepend it to tail. */
5820 gimple_seq_add_seq (&join_seq, *tail);
5821 *tail = join_seq;
5823 inner = true;
5826 /* Mark the end of the sequence. */
5827 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5828 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5831 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5832 catch handler and return it. This prevents programs from violating the
5833 structured block semantics with throws. */
5835 static gimple_seq
5836 maybe_catch_exception (gimple_seq body)
5838 gimple *g;
5839 tree decl;
5841 if (!flag_exceptions)
5842 return body;
5844 if (lang_hooks.eh_protect_cleanup_actions != NULL)
5845 decl = lang_hooks.eh_protect_cleanup_actions ();
5846 else
5847 decl = builtin_decl_explicit (BUILT_IN_TRAP);
5849 g = gimple_build_eh_must_not_throw (decl);
5850 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5851 GIMPLE_TRY_CATCH);
5853 return gimple_seq_alloc_with_stmt (g);
5857 /* Routines to lower OMP directives into OMP-GIMPLE. */
5859 /* If ctx is a worksharing context inside of a cancellable parallel
5860 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5861 and conditional branch to parallel's cancel_label to handle
5862 cancellation in the implicit barrier. */
5864 static void
5865 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5867 gimple *omp_return = gimple_seq_last_stmt (*body);
5868 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5869 if (gimple_omp_return_nowait_p (omp_return))
5870 return;
5871 if (ctx->outer
5872 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5873 && ctx->outer->cancellable)
5875 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5876 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5877 tree lhs = create_tmp_var (c_bool_type);
5878 gimple_omp_return_set_lhs (omp_return, lhs);
5879 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5880 gimple *g = gimple_build_cond (NE_EXPR, lhs,
5881 fold_convert (c_bool_type,
5882 boolean_false_node),
5883 ctx->outer->cancel_label, fallthru_label);
5884 gimple_seq_add_stmt (body, g);
5885 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5889 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5890 CTX is the enclosing OMP context for the current statement. */
5892 static void
5893 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5895 tree block, control;
5896 gimple_stmt_iterator tgsi;
5897 gomp_sections *stmt;
5898 gimple *t;
5899 gbind *new_stmt, *bind;
5900 gimple_seq ilist, dlist, olist, new_body;
5902 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5904 push_gimplify_context ();
5906 dlist = NULL;
5907 ilist = NULL;
5908 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5909 &ilist, &dlist, ctx, NULL);
5911 new_body = gimple_omp_body (stmt);
5912 gimple_omp_set_body (stmt, NULL);
5913 tgsi = gsi_start (new_body);
5914 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5916 omp_context *sctx;
5917 gimple *sec_start;
5919 sec_start = gsi_stmt (tgsi);
5920 sctx = maybe_lookup_ctx (sec_start);
5921 gcc_assert (sctx);
5923 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5924 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5925 GSI_CONTINUE_LINKING);
5926 gimple_omp_set_body (sec_start, NULL);
5928 if (gsi_one_before_end_p (tgsi))
5930 gimple_seq l = NULL;
5931 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5932 &l, ctx);
5933 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5934 gimple_omp_section_set_last (sec_start);
5937 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5938 GSI_CONTINUE_LINKING);
5941 block = make_node (BLOCK);
5942 bind = gimple_build_bind (NULL, new_body, block);
5944 olist = NULL;
5945 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
5947 block = make_node (BLOCK);
5948 new_stmt = gimple_build_bind (NULL, NULL, block);
5949 gsi_replace (gsi_p, new_stmt, true);
5951 pop_gimplify_context (new_stmt);
5952 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5953 BLOCK_VARS (block) = gimple_bind_vars (bind);
5954 if (BLOCK_VARS (block))
5955 TREE_USED (block) = 1;
5957 new_body = NULL;
5958 gimple_seq_add_seq (&new_body, ilist);
5959 gimple_seq_add_stmt (&new_body, stmt);
5960 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5961 gimple_seq_add_stmt (&new_body, bind);
5963 control = create_tmp_var (unsigned_type_node, ".section");
5964 t = gimple_build_omp_continue (control, control);
5965 gimple_omp_sections_set_control (stmt, control);
5966 gimple_seq_add_stmt (&new_body, t);
5968 gimple_seq_add_seq (&new_body, olist);
5969 if (ctx->cancellable)
5970 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5971 gimple_seq_add_seq (&new_body, dlist);
5973 new_body = maybe_catch_exception (new_body);
5975 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5976 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5977 t = gimple_build_omp_return (nowait);
5978 gimple_seq_add_stmt (&new_body, t);
5979 maybe_add_implicit_barrier_cancel (ctx, &new_body);
5981 gimple_bind_set_body (new_stmt, new_body);
5985 /* A subroutine of lower_omp_single. Expand the simple form of
5986 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5988 if (GOMP_single_start ())
5989 BODY;
5990 [ GOMP_barrier (); ] -> unless 'nowait' is present.
5992 FIXME. It may be better to delay expanding the logic of this until
5993 pass_expand_omp. The expanded logic may make the job more difficult
5994 to a synchronization analysis pass. */
5996 static void
5997 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
5999 location_t loc = gimple_location (single_stmt);
6000 tree tlabel = create_artificial_label (loc);
6001 tree flabel = create_artificial_label (loc);
6002 gimple *call, *cond;
6003 tree lhs, decl;
6005 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
6006 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
6007 call = gimple_build_call (decl, 0);
6008 gimple_call_set_lhs (call, lhs);
6009 gimple_seq_add_stmt (pre_p, call);
6011 cond = gimple_build_cond (EQ_EXPR, lhs,
6012 fold_convert_loc (loc, TREE_TYPE (lhs),
6013 boolean_true_node),
6014 tlabel, flabel);
6015 gimple_seq_add_stmt (pre_p, cond);
6016 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
6017 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6018 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
6022 /* A subroutine of lower_omp_single. Expand the simple form of
6023 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
6025 #pragma omp single copyprivate (a, b, c)
6027 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
6030 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6032 BODY;
6033 copyout.a = a;
6034 copyout.b = b;
6035 copyout.c = c;
6036 GOMP_single_copy_end (&copyout);
6038 else
6040 a = copyout_p->a;
6041 b = copyout_p->b;
6042 c = copyout_p->c;
6044 GOMP_barrier ();
6047 FIXME. It may be better to delay expanding the logic of this until
6048 pass_expand_omp. The expanded logic may make the job more difficult
6049 to a synchronization analysis pass. */
6051 static void
6052 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
6053 omp_context *ctx)
6055 tree ptr_type, t, l0, l1, l2, bfn_decl;
6056 gimple_seq copyin_seq;
6057 location_t loc = gimple_location (single_stmt);
6059 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
6061 ptr_type = build_pointer_type (ctx->record_type);
6062 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
6064 l0 = create_artificial_label (loc);
6065 l1 = create_artificial_label (loc);
6066 l2 = create_artificial_label (loc);
6068 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6069 t = build_call_expr_loc (loc, bfn_decl, 0);
6070 t = fold_convert_loc (loc, ptr_type, t);
6071 gimplify_assign (ctx->receiver_decl, t, pre_p);
6073 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6074 build_int_cst (ptr_type, 0));
6075 t = build3 (COND_EXPR, void_type_node, t,
6076 build_and_jump (&l0), build_and_jump (&l1));
6077 gimplify_and_add (t, pre_p);
6079 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6081 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6083 copyin_seq = NULL;
6084 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6085 &copyin_seq, ctx);
6087 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6088 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6089 t = build_call_expr_loc (loc, bfn_decl, 1, t);
6090 gimplify_and_add (t, pre_p);
6092 t = build_and_jump (&l2);
6093 gimplify_and_add (t, pre_p);
6095 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6097 gimple_seq_add_seq (pre_p, copyin_seq);
6099 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6103 /* Expand code for an OpenMP single directive. */
6105 static void
6106 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6108 tree block;
6109 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6110 gbind *bind;
6111 gimple_seq bind_body, bind_body_tail = NULL, dlist;
6113 push_gimplify_context ();
6115 block = make_node (BLOCK);
6116 bind = gimple_build_bind (NULL, NULL, block);
6117 gsi_replace (gsi_p, bind, true);
6118 bind_body = NULL;
6119 dlist = NULL;
6120 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6121 &bind_body, &dlist, ctx, NULL);
6122 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6124 gimple_seq_add_stmt (&bind_body, single_stmt);
6126 if (ctx->record_type)
6127 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6128 else
6129 lower_omp_single_simple (single_stmt, &bind_body);
6131 gimple_omp_set_body (single_stmt, NULL);
6133 gimple_seq_add_seq (&bind_body, dlist);
6135 bind_body = maybe_catch_exception (bind_body);
6137 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6138 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6139 gimple *g = gimple_build_omp_return (nowait);
6140 gimple_seq_add_stmt (&bind_body_tail, g);
6141 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6142 if (ctx->record_type)
6144 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6145 tree clobber = build_constructor (ctx->record_type, NULL);
6146 TREE_THIS_VOLATILE (clobber) = 1;
6147 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6148 clobber), GSI_SAME_STMT);
6150 gimple_seq_add_seq (&bind_body, bind_body_tail);
6151 gimple_bind_set_body (bind, bind_body);
6153 pop_gimplify_context (bind);
6155 gimple_bind_append_vars (bind, ctx->block_vars);
6156 BLOCK_VARS (block) = ctx->block_vars;
6157 if (BLOCK_VARS (block))
6158 TREE_USED (block) = 1;
6162 /* Expand code for an OpenMP master directive. */
6164 static void
6165 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6167 tree block, lab = NULL, x, bfn_decl;
6168 gimple *stmt = gsi_stmt (*gsi_p);
6169 gbind *bind;
6170 location_t loc = gimple_location (stmt);
6171 gimple_seq tseq;
6173 push_gimplify_context ();
6175 block = make_node (BLOCK);
6176 bind = gimple_build_bind (NULL, NULL, block);
6177 gsi_replace (gsi_p, bind, true);
6178 gimple_bind_add_stmt (bind, stmt);
6180 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6181 x = build_call_expr_loc (loc, bfn_decl, 0);
6182 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6183 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6184 tseq = NULL;
6185 gimplify_and_add (x, &tseq);
6186 gimple_bind_add_seq (bind, tseq);
6188 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6189 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6190 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6191 gimple_omp_set_body (stmt, NULL);
6193 gimple_bind_add_stmt (bind, gimple_build_label (lab));
6195 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6197 pop_gimplify_context (bind);
6199 gimple_bind_append_vars (bind, ctx->block_vars);
6200 BLOCK_VARS (block) = ctx->block_vars;
6204 /* Expand code for an OpenMP taskgroup directive. */
6206 static void
6207 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6209 gimple *stmt = gsi_stmt (*gsi_p);
6210 gcall *x;
6211 gbind *bind;
6212 tree block = make_node (BLOCK);
6214 bind = gimple_build_bind (NULL, NULL, block);
6215 gsi_replace (gsi_p, bind, true);
6216 gimple_bind_add_stmt (bind, stmt);
6218 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6220 gimple_bind_add_stmt (bind, x);
6222 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6223 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6224 gimple_omp_set_body (stmt, NULL);
6226 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6228 gimple_bind_append_vars (bind, ctx->block_vars);
6229 BLOCK_VARS (block) = ctx->block_vars;
6233 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6235 static void
6236 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6237 omp_context *ctx)
6239 struct omp_for_data fd;
6240 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6241 return;
6243 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6244 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6245 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6246 if (!fd.ordered)
6247 return;
6249 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6250 tree c = gimple_omp_ordered_clauses (ord_stmt);
6251 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6252 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6254 /* Merge depend clauses from multiple adjacent
6255 #pragma omp ordered depend(sink:...) constructs
6256 into one #pragma omp ordered depend(sink:...), so that
6257 we can optimize them together. */
6258 gimple_stmt_iterator gsi = *gsi_p;
6259 gsi_next (&gsi);
6260 while (!gsi_end_p (gsi))
6262 gimple *stmt = gsi_stmt (gsi);
6263 if (is_gimple_debug (stmt)
6264 || gimple_code (stmt) == GIMPLE_NOP)
6266 gsi_next (&gsi);
6267 continue;
6269 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6270 break;
6271 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6272 c = gimple_omp_ordered_clauses (ord_stmt2);
6273 if (c == NULL_TREE
6274 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6275 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6276 break;
6277 while (*list_p)
6278 list_p = &OMP_CLAUSE_CHAIN (*list_p);
6279 *list_p = c;
6280 gsi_remove (&gsi, true);
6284 /* Canonicalize sink dependence clauses into one folded clause if
6285 possible.
6287 The basic algorithm is to create a sink vector whose first
6288 element is the GCD of all the first elements, and whose remaining
6289 elements are the minimum of the subsequent columns.
6291 We ignore dependence vectors whose first element is zero because
6292 such dependencies are known to be executed by the same thread.
6294 We take into account the direction of the loop, so a minimum
6295 becomes a maximum if the loop is iterating forwards. We also
6296 ignore sink clauses where the loop direction is unknown, or where
6297 the offsets are clearly invalid because they are not a multiple
6298 of the loop increment.
6300 For example:
6302 #pragma omp for ordered(2)
6303 for (i=0; i < N; ++i)
6304 for (j=0; j < M; ++j)
6306 #pragma omp ordered \
6307 depend(sink:i-8,j-2) \
6308 depend(sink:i,j-1) \ // Completely ignored because i+0.
6309 depend(sink:i-4,j-3) \
6310 depend(sink:i-6,j-4)
6311 #pragma omp ordered depend(source)
6314 Folded clause is:
6316 depend(sink:-gcd(8,4,6),-min(2,3,4))
6317 -or-
6318 depend(sink:-2,-2)
6321 /* FIXME: Computing GCD's where the first element is zero is
6322 non-trivial in the presence of collapsed loops. Do this later. */
6323 if (fd.collapse > 1)
6324 return;
6326 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6328 /* wide_int is not a POD so it must be default-constructed. */
6329 for (unsigned i = 0; i != 2 * len - 1; ++i)
6330 new (static_cast<void*>(folded_deps + i)) wide_int ();
6332 tree folded_dep = NULL_TREE;
6333 /* TRUE if the first dimension's offset is negative. */
6334 bool neg_offset_p = false;
6336 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6337 unsigned int i;
6338 while ((c = *list_p) != NULL)
6340 bool remove = false;
6342 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6343 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6344 goto next_ordered_clause;
6346 tree vec;
6347 for (vec = OMP_CLAUSE_DECL (c), i = 0;
6348 vec && TREE_CODE (vec) == TREE_LIST;
6349 vec = TREE_CHAIN (vec), ++i)
6351 gcc_assert (i < len);
6353 /* omp_extract_for_data has canonicalized the condition. */
6354 gcc_assert (fd.loops[i].cond_code == LT_EXPR
6355 || fd.loops[i].cond_code == GT_EXPR);
6356 bool forward = fd.loops[i].cond_code == LT_EXPR;
6357 bool maybe_lexically_later = true;
6359 /* While the committee makes up its mind, bail if we have any
6360 non-constant steps. */
6361 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6362 goto lower_omp_ordered_ret;
6364 tree itype = TREE_TYPE (TREE_VALUE (vec));
6365 if (POINTER_TYPE_P (itype))
6366 itype = sizetype;
6367 wide_int offset = wide_int::from (TREE_PURPOSE (vec),
6368 TYPE_PRECISION (itype),
6369 TYPE_SIGN (itype));
6371 /* Ignore invalid offsets that are not multiples of the step. */
6372 if (!wi::multiple_of_p
6373 (wi::abs (offset), wi::abs ((wide_int) fd.loops[i].step),
6374 UNSIGNED))
6376 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6377 "ignoring sink clause with offset that is not "
6378 "a multiple of the loop step");
6379 remove = true;
6380 goto next_ordered_clause;
6383 /* Calculate the first dimension. The first dimension of
6384 the folded dependency vector is the GCD of the first
6385 elements, while ignoring any first elements whose offset
6386 is 0. */
6387 if (i == 0)
6389 /* Ignore dependence vectors whose first dimension is 0. */
6390 if (offset == 0)
6392 remove = true;
6393 goto next_ordered_clause;
6395 else
6397 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6399 error_at (OMP_CLAUSE_LOCATION (c),
6400 "first offset must be in opposite direction "
6401 "of loop iterations");
6402 goto lower_omp_ordered_ret;
6404 if (forward)
6405 offset = -offset;
6406 neg_offset_p = forward;
6407 /* Initialize the first time around. */
6408 if (folded_dep == NULL_TREE)
6410 folded_dep = c;
6411 folded_deps[0] = offset;
6413 else
6414 folded_deps[0] = wi::gcd (folded_deps[0],
6415 offset, UNSIGNED);
6418 /* Calculate minimum for the remaining dimensions. */
6419 else
6421 folded_deps[len + i - 1] = offset;
6422 if (folded_dep == c)
6423 folded_deps[i] = offset;
6424 else if (maybe_lexically_later
6425 && !wi::eq_p (folded_deps[i], offset))
6427 if (forward ^ wi::gts_p (folded_deps[i], offset))
6429 unsigned int j;
6430 folded_dep = c;
6431 for (j = 1; j <= i; j++)
6432 folded_deps[j] = folded_deps[len + j - 1];
6434 else
6435 maybe_lexically_later = false;
6439 gcc_assert (i == len);
6441 remove = true;
6443 next_ordered_clause:
6444 if (remove)
6445 *list_p = OMP_CLAUSE_CHAIN (c);
6446 else
6447 list_p = &OMP_CLAUSE_CHAIN (c);
6450 if (folded_dep)
6452 if (neg_offset_p)
6453 folded_deps[0] = -folded_deps[0];
6455 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6456 if (POINTER_TYPE_P (itype))
6457 itype = sizetype;
6459 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6460 = wide_int_to_tree (itype, folded_deps[0]);
6461 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6462 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6465 lower_omp_ordered_ret:
6467 /* Ordered without clauses is #pragma omp threads, while we want
6468 a nop instead if we remove all clauses. */
6469 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6470 gsi_replace (gsi_p, gimple_build_nop (), true);
6474 /* Expand code for an OpenMP ordered directive. */
6476 static void
6477 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6479 tree block;
6480 gimple *stmt = gsi_stmt (*gsi_p), *g;
6481 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6482 gcall *x;
6483 gbind *bind;
6484 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6485 OMP_CLAUSE_SIMD);
6486 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6487 loop. */
6488 bool maybe_simt
6489 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6490 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6491 OMP_CLAUSE_THREADS);
6493 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6494 OMP_CLAUSE_DEPEND))
6496 /* FIXME: This is needs to be moved to the expansion to verify various
6497 conditions only testable on cfg with dominators computed, and also
6498 all the depend clauses to be merged still might need to be available
6499 for the runtime checks. */
6500 if (0)
6501 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6502 return;
6505 push_gimplify_context ();
6507 block = make_node (BLOCK);
6508 bind = gimple_build_bind (NULL, NULL, block);
6509 gsi_replace (gsi_p, bind, true);
6510 gimple_bind_add_stmt (bind, stmt);
6512 if (simd)
6514 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6515 build_int_cst (NULL_TREE, threads));
6516 cfun->has_simduid_loops = true;
6518 else
6519 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6521 gimple_bind_add_stmt (bind, x);
6523 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6524 if (maybe_simt)
6526 counter = create_tmp_var (integer_type_node);
6527 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6528 gimple_call_set_lhs (g, counter);
6529 gimple_bind_add_stmt (bind, g);
6531 body = create_artificial_label (UNKNOWN_LOCATION);
6532 test = create_artificial_label (UNKNOWN_LOCATION);
6533 gimple_bind_add_stmt (bind, gimple_build_label (body));
6535 tree simt_pred = create_tmp_var (integer_type_node);
6536 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6537 gimple_call_set_lhs (g, simt_pred);
6538 gimple_bind_add_stmt (bind, g);
6540 tree t = create_artificial_label (UNKNOWN_LOCATION);
6541 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6542 gimple_bind_add_stmt (bind, g);
6544 gimple_bind_add_stmt (bind, gimple_build_label (t));
6546 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6547 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6548 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6549 gimple_omp_set_body (stmt, NULL);
6551 if (maybe_simt)
6553 gimple_bind_add_stmt (bind, gimple_build_label (test));
6554 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6555 gimple_bind_add_stmt (bind, g);
6557 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6558 tree nonneg = create_tmp_var (integer_type_node);
6559 gimple_seq tseq = NULL;
6560 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6561 gimple_bind_add_seq (bind, tseq);
6563 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6564 gimple_call_set_lhs (g, nonneg);
6565 gimple_bind_add_stmt (bind, g);
6567 tree end = create_artificial_label (UNKNOWN_LOCATION);
6568 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6569 gimple_bind_add_stmt (bind, g);
6571 gimple_bind_add_stmt (bind, gimple_build_label (end));
6573 if (simd)
6574 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6575 build_int_cst (NULL_TREE, threads));
6576 else
6577 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6579 gimple_bind_add_stmt (bind, x);
6581 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6583 pop_gimplify_context (bind);
6585 gimple_bind_append_vars (bind, ctx->block_vars);
6586 BLOCK_VARS (block) = gimple_bind_vars (bind);
6590 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6591 substitution of a couple of function calls. But in the NAMED case,
6592 requires that languages coordinate a symbol name. It is therefore
6593 best put here in common code. */
6595 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6597 static void
6598 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6600 tree block;
6601 tree name, lock, unlock;
6602 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6603 gbind *bind;
6604 location_t loc = gimple_location (stmt);
6605 gimple_seq tbody;
6607 name = gimple_omp_critical_name (stmt);
6608 if (name)
6610 tree decl;
6612 if (!critical_name_mutexes)
6613 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6615 tree *n = critical_name_mutexes->get (name);
6616 if (n == NULL)
6618 char *new_str;
6620 decl = create_tmp_var_raw (ptr_type_node);
6622 new_str = ACONCAT ((".gomp_critical_user_",
6623 IDENTIFIER_POINTER (name), NULL));
6624 DECL_NAME (decl) = get_identifier (new_str);
6625 TREE_PUBLIC (decl) = 1;
6626 TREE_STATIC (decl) = 1;
6627 DECL_COMMON (decl) = 1;
6628 DECL_ARTIFICIAL (decl) = 1;
6629 DECL_IGNORED_P (decl) = 1;
6631 varpool_node::finalize_decl (decl);
6633 critical_name_mutexes->put (name, decl);
6635 else
6636 decl = *n;
6638 /* If '#pragma omp critical' is inside offloaded region or
6639 inside function marked as offloadable, the symbol must be
6640 marked as offloadable too. */
6641 omp_context *octx;
6642 if (cgraph_node::get (current_function_decl)->offloadable)
6643 varpool_node::get_create (decl)->offloadable = 1;
6644 else
6645 for (octx = ctx->outer; octx; octx = octx->outer)
6646 if (is_gimple_omp_offloaded (octx->stmt))
6648 varpool_node::get_create (decl)->offloadable = 1;
6649 break;
6652 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6653 lock = build_call_expr_loc (loc, lock, 1,
6654 build_fold_addr_expr_loc (loc, decl));
6656 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6657 unlock = build_call_expr_loc (loc, unlock, 1,
6658 build_fold_addr_expr_loc (loc, decl));
6660 else
6662 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6663 lock = build_call_expr_loc (loc, lock, 0);
6665 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6666 unlock = build_call_expr_loc (loc, unlock, 0);
6669 push_gimplify_context ();
6671 block = make_node (BLOCK);
6672 bind = gimple_build_bind (NULL, NULL, block);
6673 gsi_replace (gsi_p, bind, true);
6674 gimple_bind_add_stmt (bind, stmt);
6676 tbody = gimple_bind_body (bind);
6677 gimplify_and_add (lock, &tbody);
6678 gimple_bind_set_body (bind, tbody);
6680 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6681 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6682 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6683 gimple_omp_set_body (stmt, NULL);
6685 tbody = gimple_bind_body (bind);
6686 gimplify_and_add (unlock, &tbody);
6687 gimple_bind_set_body (bind, tbody);
6689 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6691 pop_gimplify_context (bind);
6692 gimple_bind_append_vars (bind, ctx->block_vars);
6693 BLOCK_VARS (block) = gimple_bind_vars (bind);
6696 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6697 for a lastprivate clause. Given a loop control predicate of (V
6698 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6699 is appended to *DLIST, iterator initialization is appended to
6700 *BODY_P. */
6702 static void
6703 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6704 gimple_seq *dlist, struct omp_context *ctx)
6706 tree clauses, cond, vinit;
6707 enum tree_code cond_code;
6708 gimple_seq stmts;
6710 cond_code = fd->loop.cond_code;
6711 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6713 /* When possible, use a strict equality expression. This can let VRP
6714 type optimizations deduce the value and remove a copy. */
6715 if (tree_fits_shwi_p (fd->loop.step))
6717 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6718 if (step == 1 || step == -1)
6719 cond_code = EQ_EXPR;
6722 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6723 || gimple_omp_for_grid_phony (fd->for_stmt))
6724 cond = omp_grid_lastprivate_predicate (fd);
6725 else
6727 tree n2 = fd->loop.n2;
6728 if (fd->collapse > 1
6729 && TREE_CODE (n2) != INTEGER_CST
6730 && gimple_omp_for_combined_into_p (fd->for_stmt))
6732 struct omp_context *taskreg_ctx = NULL;
6733 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6735 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6736 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6737 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6739 if (gimple_omp_for_combined_into_p (gfor))
6741 gcc_assert (ctx->outer->outer
6742 && is_parallel_ctx (ctx->outer->outer));
6743 taskreg_ctx = ctx->outer->outer;
6745 else
6747 struct omp_for_data outer_fd;
6748 omp_extract_for_data (gfor, &outer_fd, NULL);
6749 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6752 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6753 taskreg_ctx = ctx->outer->outer;
6755 else if (is_taskreg_ctx (ctx->outer))
6756 taskreg_ctx = ctx->outer;
6757 if (taskreg_ctx)
6759 int i;
6760 tree taskreg_clauses
6761 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6762 tree innerc = omp_find_clause (taskreg_clauses,
6763 OMP_CLAUSE__LOOPTEMP_);
6764 gcc_assert (innerc);
6765 for (i = 0; i < fd->collapse; i++)
6767 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6768 OMP_CLAUSE__LOOPTEMP_);
6769 gcc_assert (innerc);
6771 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6772 OMP_CLAUSE__LOOPTEMP_);
6773 if (innerc)
6774 n2 = fold_convert (TREE_TYPE (n2),
6775 lookup_decl (OMP_CLAUSE_DECL (innerc),
6776 taskreg_ctx));
6779 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6782 clauses = gimple_omp_for_clauses (fd->for_stmt);
6783 stmts = NULL;
6784 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6785 if (!gimple_seq_empty_p (stmts))
6787 gimple_seq_add_seq (&stmts, *dlist);
6788 *dlist = stmts;
6790 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6791 vinit = fd->loop.n1;
6792 if (cond_code == EQ_EXPR
6793 && tree_fits_shwi_p (fd->loop.n2)
6794 && ! integer_zerop (fd->loop.n2))
6795 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6796 else
6797 vinit = unshare_expr (vinit);
6799 /* Initialize the iterator variable, so that threads that don't execute
6800 any iterations don't execute the lastprivate clauses by accident. */
6801 gimplify_assign (fd->loop.v, vinit, body_p);
6806 /* Lower code for an OMP loop directive. */
6808 static void
6809 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6811 tree *rhs_p, block;
6812 struct omp_for_data fd, *fdp = NULL;
6813 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6814 gbind *new_stmt;
6815 gimple_seq omp_for_body, body, dlist;
6816 gimple_seq oacc_head = NULL, oacc_tail = NULL;
6817 size_t i;
6819 push_gimplify_context ();
6821 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6823 block = make_node (BLOCK);
6824 new_stmt = gimple_build_bind (NULL, NULL, block);
6825 /* Replace at gsi right away, so that 'stmt' is no member
6826 of a sequence anymore as we're going to add to a different
6827 one below. */
6828 gsi_replace (gsi_p, new_stmt, true);
6830 /* Move declaration of temporaries in the loop body before we make
6831 it go away. */
6832 omp_for_body = gimple_omp_body (stmt);
6833 if (!gimple_seq_empty_p (omp_for_body)
6834 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6836 gbind *inner_bind
6837 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6838 tree vars = gimple_bind_vars (inner_bind);
6839 gimple_bind_append_vars (new_stmt, vars);
6840 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6841 keep them on the inner_bind and it's block. */
6842 gimple_bind_set_vars (inner_bind, NULL_TREE);
6843 if (gimple_bind_block (inner_bind))
6844 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6847 if (gimple_omp_for_combined_into_p (stmt))
6849 omp_extract_for_data (stmt, &fd, NULL);
6850 fdp = &fd;
6852 /* We need two temporaries with fd.loop.v type (istart/iend)
6853 and then (fd.collapse - 1) temporaries with the same
6854 type for count2 ... countN-1 vars if not constant. */
6855 size_t count = 2;
6856 tree type = fd.iter_type;
6857 if (fd.collapse > 1
6858 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6859 count += fd.collapse - 1;
6860 bool taskreg_for
6861 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6862 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6863 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6864 tree simtc = NULL;
6865 tree clauses = *pc;
6866 if (taskreg_for)
6867 outerc
6868 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6869 OMP_CLAUSE__LOOPTEMP_);
6870 if (ctx->simt_stmt)
6871 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6872 OMP_CLAUSE__LOOPTEMP_);
6873 for (i = 0; i < count; i++)
6875 tree temp;
6876 if (taskreg_for)
6878 gcc_assert (outerc);
6879 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6880 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6881 OMP_CLAUSE__LOOPTEMP_);
6883 else
6885 /* If there are 2 adjacent SIMD stmts, one with _simt_
6886 clause, another without, make sure they have the same
6887 decls in _looptemp_ clauses, because the outer stmt
6888 they are combined into will look up just one inner_stmt. */
6889 if (ctx->simt_stmt)
6890 temp = OMP_CLAUSE_DECL (simtc);
6891 else
6892 temp = create_tmp_var (type);
6893 insert_decl_map (&ctx->outer->cb, temp, temp);
6895 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6896 OMP_CLAUSE_DECL (*pc) = temp;
6897 pc = &OMP_CLAUSE_CHAIN (*pc);
6898 if (ctx->simt_stmt)
6899 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6900 OMP_CLAUSE__LOOPTEMP_);
6902 *pc = clauses;
6905 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6906 dlist = NULL;
6907 body = NULL;
6908 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6909 fdp);
6910 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6912 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6914 /* Lower the header expressions. At this point, we can assume that
6915 the header is of the form:
6917 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6919 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6920 using the .omp_data_s mapping, if needed. */
6921 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6923 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6924 if (!is_gimple_min_invariant (*rhs_p))
6925 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6926 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6927 recompute_tree_invariant_for_addr_expr (*rhs_p);
6929 rhs_p = gimple_omp_for_final_ptr (stmt, i);
6930 if (!is_gimple_min_invariant (*rhs_p))
6931 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6932 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6933 recompute_tree_invariant_for_addr_expr (*rhs_p);
6935 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6936 if (!is_gimple_min_invariant (*rhs_p))
6937 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6940 /* Once lowered, extract the bounds and clauses. */
6941 omp_extract_for_data (stmt, &fd, NULL);
6943 if (is_gimple_omp_oacc (ctx->stmt)
6944 && !ctx_in_oacc_kernels_region (ctx))
6945 lower_oacc_head_tail (gimple_location (stmt),
6946 gimple_omp_for_clauses (stmt),
6947 &oacc_head, &oacc_tail, ctx);
6949 /* Add OpenACC partitioning and reduction markers just before the loop. */
6950 if (oacc_head)
6951 gimple_seq_add_seq (&body, oacc_head);
6953 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
6955 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6956 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
6957 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6958 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6960 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6961 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6962 OMP_CLAUSE_LINEAR_STEP (c)
6963 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6964 ctx);
6967 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6968 && gimple_omp_for_grid_phony (stmt));
6969 if (!phony_loop)
6970 gimple_seq_add_stmt (&body, stmt);
6971 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6973 if (!phony_loop)
6974 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6975 fd.loop.v));
6977 /* After the loop, add exit clauses. */
6978 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
6980 if (ctx->cancellable)
6981 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
6983 gimple_seq_add_seq (&body, dlist);
6985 body = maybe_catch_exception (body);
6987 if (!phony_loop)
6989 /* Region exit marker goes at the end of the loop body. */
6990 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
6991 maybe_add_implicit_barrier_cancel (ctx, &body);
6994 /* Add OpenACC joining and reduction markers just after the loop. */
6995 if (oacc_tail)
6996 gimple_seq_add_seq (&body, oacc_tail);
6998 pop_gimplify_context (new_stmt);
7000 gimple_bind_append_vars (new_stmt, ctx->block_vars);
7001 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
7002 if (BLOCK_VARS (block))
7003 TREE_USED (block) = 1;
7005 gimple_bind_set_body (new_stmt, body);
7006 gimple_omp_set_body (stmt, NULL);
7007 gimple_omp_for_set_pre_body (stmt, NULL);
7010 /* Callback for walk_stmts. Check if the current statement only contains
7011 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
7013 static tree
7014 check_combined_parallel (gimple_stmt_iterator *gsi_p,
7015 bool *handled_ops_p,
7016 struct walk_stmt_info *wi)
7018 int *info = (int *) wi->info;
7019 gimple *stmt = gsi_stmt (*gsi_p);
7021 *handled_ops_p = true;
7022 switch (gimple_code (stmt))
7024 WALK_SUBSTMTS;
7026 case GIMPLE_OMP_FOR:
7027 case GIMPLE_OMP_SECTIONS:
7028 *info = *info == 0 ? 1 : -1;
7029 break;
7030 default:
7031 *info = -1;
7032 break;
7034 return NULL;
7037 struct omp_taskcopy_context
7039 /* This field must be at the beginning, as we do "inheritance": Some
7040 callback functions for tree-inline.c (e.g., omp_copy_decl)
7041 receive a copy_body_data pointer that is up-casted to an
7042 omp_context pointer. */
7043 copy_body_data cb;
7044 omp_context *ctx;
7047 static tree
7048 task_copyfn_copy_decl (tree var, copy_body_data *cb)
7050 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
7052 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
7053 return create_tmp_var (TREE_TYPE (var));
7055 return var;
7058 static tree
7059 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
7061 tree name, new_fields = NULL, type, f;
7063 type = lang_hooks.types.make_type (RECORD_TYPE);
7064 name = DECL_NAME (TYPE_NAME (orig_type));
7065 name = build_decl (gimple_location (tcctx->ctx->stmt),
7066 TYPE_DECL, name, type);
7067 TYPE_NAME (type) = name;
7069 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
7071 tree new_f = copy_node (f);
7072 DECL_CONTEXT (new_f) = type;
7073 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7074 TREE_CHAIN (new_f) = new_fields;
7075 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7076 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7077 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7078 &tcctx->cb, NULL);
7079 new_fields = new_f;
7080 tcctx->cb.decl_map->put (f, new_f);
7082 TYPE_FIELDS (type) = nreverse (new_fields);
7083 layout_type (type);
7084 return type;
7087 /* Create task copyfn. */
7089 static void
7090 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7092 struct function *child_cfun;
7093 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7094 tree record_type, srecord_type, bind, list;
7095 bool record_needs_remap = false, srecord_needs_remap = false;
7096 splay_tree_node n;
7097 struct omp_taskcopy_context tcctx;
7098 location_t loc = gimple_location (task_stmt);
7100 child_fn = gimple_omp_task_copy_fn (task_stmt);
7101 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7102 gcc_assert (child_cfun->cfg == NULL);
7103 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
7105 /* Reset DECL_CONTEXT on function arguments. */
7106 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7107 DECL_CONTEXT (t) = child_fn;
7109 /* Populate the function. */
7110 push_gimplify_context ();
7111 push_cfun (child_cfun);
7113 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7114 TREE_SIDE_EFFECTS (bind) = 1;
7115 list = NULL;
7116 DECL_SAVED_TREE (child_fn) = bind;
7117 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
7119 /* Remap src and dst argument types if needed. */
7120 record_type = ctx->record_type;
7121 srecord_type = ctx->srecord_type;
7122 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7123 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7125 record_needs_remap = true;
7126 break;
7128 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7129 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7131 srecord_needs_remap = true;
7132 break;
7135 if (record_needs_remap || srecord_needs_remap)
7137 memset (&tcctx, '\0', sizeof (tcctx));
7138 tcctx.cb.src_fn = ctx->cb.src_fn;
7139 tcctx.cb.dst_fn = child_fn;
7140 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7141 gcc_checking_assert (tcctx.cb.src_node);
7142 tcctx.cb.dst_node = tcctx.cb.src_node;
7143 tcctx.cb.src_cfun = ctx->cb.src_cfun;
7144 tcctx.cb.copy_decl = task_copyfn_copy_decl;
7145 tcctx.cb.eh_lp_nr = 0;
7146 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7147 tcctx.cb.decl_map = new hash_map<tree, tree>;
7148 tcctx.ctx = ctx;
7150 if (record_needs_remap)
7151 record_type = task_copyfn_remap_type (&tcctx, record_type);
7152 if (srecord_needs_remap)
7153 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7155 else
7156 tcctx.cb.decl_map = NULL;
7158 arg = DECL_ARGUMENTS (child_fn);
7159 TREE_TYPE (arg) = build_pointer_type (record_type);
7160 sarg = DECL_CHAIN (arg);
7161 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7163 /* First pass: initialize temporaries used in record_type and srecord_type
7164 sizes and field offsets. */
7165 if (tcctx.cb.decl_map)
7166 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7167 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7169 tree *p;
7171 decl = OMP_CLAUSE_DECL (c);
7172 p = tcctx.cb.decl_map->get (decl);
7173 if (p == NULL)
7174 continue;
7175 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7176 sf = (tree) n->value;
7177 sf = *tcctx.cb.decl_map->get (sf);
7178 src = build_simple_mem_ref_loc (loc, sarg);
7179 src = omp_build_component_ref (src, sf);
7180 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7181 append_to_statement_list (t, &list);
7184 /* Second pass: copy shared var pointers and copy construct non-VLA
7185 firstprivate vars. */
7186 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7187 switch (OMP_CLAUSE_CODE (c))
7189 splay_tree_key key;
7190 case OMP_CLAUSE_SHARED:
7191 decl = OMP_CLAUSE_DECL (c);
7192 key = (splay_tree_key) decl;
7193 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7194 key = (splay_tree_key) &DECL_UID (decl);
7195 n = splay_tree_lookup (ctx->field_map, key);
7196 if (n == NULL)
7197 break;
7198 f = (tree) n->value;
7199 if (tcctx.cb.decl_map)
7200 f = *tcctx.cb.decl_map->get (f);
7201 n = splay_tree_lookup (ctx->sfield_map, key);
7202 sf = (tree) n->value;
7203 if (tcctx.cb.decl_map)
7204 sf = *tcctx.cb.decl_map->get (sf);
7205 src = build_simple_mem_ref_loc (loc, sarg);
7206 src = omp_build_component_ref (src, sf);
7207 dst = build_simple_mem_ref_loc (loc, arg);
7208 dst = omp_build_component_ref (dst, f);
7209 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7210 append_to_statement_list (t, &list);
7211 break;
7212 case OMP_CLAUSE_FIRSTPRIVATE:
7213 decl = OMP_CLAUSE_DECL (c);
7214 if (is_variable_sized (decl))
7215 break;
7216 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7217 if (n == NULL)
7218 break;
7219 f = (tree) n->value;
7220 if (tcctx.cb.decl_map)
7221 f = *tcctx.cb.decl_map->get (f);
7222 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7223 if (n != NULL)
7225 sf = (tree) n->value;
7226 if (tcctx.cb.decl_map)
7227 sf = *tcctx.cb.decl_map->get (sf);
7228 src = build_simple_mem_ref_loc (loc, sarg);
7229 src = omp_build_component_ref (src, sf);
7230 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7231 src = build_simple_mem_ref_loc (loc, src);
7233 else
7234 src = decl;
7235 dst = build_simple_mem_ref_loc (loc, arg);
7236 dst = omp_build_component_ref (dst, f);
7237 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7238 append_to_statement_list (t, &list);
7239 break;
7240 case OMP_CLAUSE_PRIVATE:
7241 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7242 break;
7243 decl = OMP_CLAUSE_DECL (c);
7244 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7245 f = (tree) n->value;
7246 if (tcctx.cb.decl_map)
7247 f = *tcctx.cb.decl_map->get (f);
7248 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7249 if (n != NULL)
7251 sf = (tree) n->value;
7252 if (tcctx.cb.decl_map)
7253 sf = *tcctx.cb.decl_map->get (sf);
7254 src = build_simple_mem_ref_loc (loc, sarg);
7255 src = omp_build_component_ref (src, sf);
7256 if (use_pointer_for_field (decl, NULL))
7257 src = build_simple_mem_ref_loc (loc, src);
7259 else
7260 src = decl;
7261 dst = build_simple_mem_ref_loc (loc, arg);
7262 dst = omp_build_component_ref (dst, f);
7263 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7264 append_to_statement_list (t, &list);
7265 break;
7266 default:
7267 break;
7270 /* Last pass: handle VLA firstprivates. */
7271 if (tcctx.cb.decl_map)
7272 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7273 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7275 tree ind, ptr, df;
7277 decl = OMP_CLAUSE_DECL (c);
7278 if (!is_variable_sized (decl))
7279 continue;
7280 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7281 if (n == NULL)
7282 continue;
7283 f = (tree) n->value;
7284 f = *tcctx.cb.decl_map->get (f);
7285 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7286 ind = DECL_VALUE_EXPR (decl);
7287 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7288 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7289 n = splay_tree_lookup (ctx->sfield_map,
7290 (splay_tree_key) TREE_OPERAND (ind, 0));
7291 sf = (tree) n->value;
7292 sf = *tcctx.cb.decl_map->get (sf);
7293 src = build_simple_mem_ref_loc (loc, sarg);
7294 src = omp_build_component_ref (src, sf);
7295 src = build_simple_mem_ref_loc (loc, src);
7296 dst = build_simple_mem_ref_loc (loc, arg);
7297 dst = omp_build_component_ref (dst, f);
7298 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7299 append_to_statement_list (t, &list);
7300 n = splay_tree_lookup (ctx->field_map,
7301 (splay_tree_key) TREE_OPERAND (ind, 0));
7302 df = (tree) n->value;
7303 df = *tcctx.cb.decl_map->get (df);
7304 ptr = build_simple_mem_ref_loc (loc, arg);
7305 ptr = omp_build_component_ref (ptr, df);
7306 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7307 build_fold_addr_expr_loc (loc, dst));
7308 append_to_statement_list (t, &list);
7311 t = build1 (RETURN_EXPR, void_type_node, NULL);
7312 append_to_statement_list (t, &list);
7314 if (tcctx.cb.decl_map)
7315 delete tcctx.cb.decl_map;
7316 pop_gimplify_context (NULL);
7317 BIND_EXPR_BODY (bind) = list;
7318 pop_cfun ();
7321 static void
7322 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7324 tree c, clauses;
7325 gimple *g;
7326 size_t n_in = 0, n_out = 0, idx = 2, i;
7328 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7329 gcc_assert (clauses);
7330 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7331 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7332 switch (OMP_CLAUSE_DEPEND_KIND (c))
7334 case OMP_CLAUSE_DEPEND_IN:
7335 n_in++;
7336 break;
7337 case OMP_CLAUSE_DEPEND_OUT:
7338 case OMP_CLAUSE_DEPEND_INOUT:
7339 n_out++;
7340 break;
7341 case OMP_CLAUSE_DEPEND_SOURCE:
7342 case OMP_CLAUSE_DEPEND_SINK:
7343 /* FALLTHRU */
7344 default:
7345 gcc_unreachable ();
7347 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7348 tree array = create_tmp_var (type);
7349 TREE_ADDRESSABLE (array) = 1;
7350 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7351 NULL_TREE);
7352 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7353 gimple_seq_add_stmt (iseq, g);
7354 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7355 NULL_TREE);
7356 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7357 gimple_seq_add_stmt (iseq, g);
7358 for (i = 0; i < 2; i++)
7360 if ((i ? n_in : n_out) == 0)
7361 continue;
7362 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7363 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7364 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7366 tree t = OMP_CLAUSE_DECL (c);
7367 t = fold_convert (ptr_type_node, t);
7368 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7369 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7370 NULL_TREE, NULL_TREE);
7371 g = gimple_build_assign (r, t);
7372 gimple_seq_add_stmt (iseq, g);
7375 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7376 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7377 OMP_CLAUSE_CHAIN (c) = *pclauses;
7378 *pclauses = c;
7379 tree clobber = build_constructor (type, NULL);
7380 TREE_THIS_VOLATILE (clobber) = 1;
7381 g = gimple_build_assign (array, clobber);
7382 gimple_seq_add_stmt (oseq, g);
7385 /* Lower the OpenMP parallel or task directive in the current statement
7386 in GSI_P. CTX holds context information for the directive. */
7388 static void
7389 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7391 tree clauses;
7392 tree child_fn, t;
7393 gimple *stmt = gsi_stmt (*gsi_p);
7394 gbind *par_bind, *bind, *dep_bind = NULL;
7395 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7396 location_t loc = gimple_location (stmt);
7398 clauses = gimple_omp_taskreg_clauses (stmt);
7399 par_bind
7400 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7401 par_body = gimple_bind_body (par_bind);
7402 child_fn = ctx->cb.dst_fn;
7403 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7404 && !gimple_omp_parallel_combined_p (stmt))
7406 struct walk_stmt_info wi;
7407 int ws_num = 0;
7409 memset (&wi, 0, sizeof (wi));
7410 wi.info = &ws_num;
7411 wi.val_only = true;
7412 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7413 if (ws_num == 1)
7414 gimple_omp_parallel_set_combined_p (stmt, true);
7416 gimple_seq dep_ilist = NULL;
7417 gimple_seq dep_olist = NULL;
7418 if (gimple_code (stmt) == GIMPLE_OMP_TASK
7419 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7421 push_gimplify_context ();
7422 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7423 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7424 &dep_ilist, &dep_olist);
7427 if (ctx->srecord_type)
7428 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7430 push_gimplify_context ();
7432 par_olist = NULL;
7433 par_ilist = NULL;
7434 par_rlist = NULL;
7435 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7436 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7437 if (phony_construct && ctx->record_type)
7439 gcc_checking_assert (!ctx->receiver_decl);
7440 ctx->receiver_decl = create_tmp_var
7441 (build_reference_type (ctx->record_type), ".omp_rec");
7443 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7444 lower_omp (&par_body, ctx);
7445 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7446 lower_reduction_clauses (clauses, &par_rlist, ctx);
7448 /* Declare all the variables created by mapping and the variables
7449 declared in the scope of the parallel body. */
7450 record_vars_into (ctx->block_vars, child_fn);
7451 record_vars_into (gimple_bind_vars (par_bind), child_fn);
7453 if (ctx->record_type)
7455 ctx->sender_decl
7456 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7457 : ctx->record_type, ".omp_data_o");
7458 DECL_NAMELESS (ctx->sender_decl) = 1;
7459 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7460 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7463 olist = NULL;
7464 ilist = NULL;
7465 lower_send_clauses (clauses, &ilist, &olist, ctx);
7466 lower_send_shared_vars (&ilist, &olist, ctx);
7468 if (ctx->record_type)
7470 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7471 TREE_THIS_VOLATILE (clobber) = 1;
7472 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7473 clobber));
7476 /* Once all the expansions are done, sequence all the different
7477 fragments inside gimple_omp_body. */
7479 new_body = NULL;
7481 if (ctx->record_type)
7483 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7484 /* fixup_child_record_type might have changed receiver_decl's type. */
7485 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7486 gimple_seq_add_stmt (&new_body,
7487 gimple_build_assign (ctx->receiver_decl, t));
7490 gimple_seq_add_seq (&new_body, par_ilist);
7491 gimple_seq_add_seq (&new_body, par_body);
7492 gimple_seq_add_seq (&new_body, par_rlist);
7493 if (ctx->cancellable)
7494 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7495 gimple_seq_add_seq (&new_body, par_olist);
7496 new_body = maybe_catch_exception (new_body);
7497 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7498 gimple_seq_add_stmt (&new_body,
7499 gimple_build_omp_continue (integer_zero_node,
7500 integer_zero_node));
7501 if (!phony_construct)
7503 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7504 gimple_omp_set_body (stmt, new_body);
7507 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7508 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7509 gimple_bind_add_seq (bind, ilist);
7510 if (!phony_construct)
7511 gimple_bind_add_stmt (bind, stmt);
7512 else
7513 gimple_bind_add_seq (bind, new_body);
7514 gimple_bind_add_seq (bind, olist);
7516 pop_gimplify_context (NULL);
7518 if (dep_bind)
7520 gimple_bind_add_seq (dep_bind, dep_ilist);
7521 gimple_bind_add_stmt (dep_bind, bind);
7522 gimple_bind_add_seq (dep_bind, dep_olist);
7523 pop_gimplify_context (dep_bind);
7527 /* Lower the GIMPLE_OMP_TARGET in the current statement
7528 in GSI_P. CTX holds context information for the directive. */
7530 static void
7531 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7533 tree clauses;
7534 tree child_fn, t, c;
7535 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7536 gbind *tgt_bind, *bind, *dep_bind = NULL;
7537 gimple_seq tgt_body, olist, ilist, fplist, new_body;
7538 location_t loc = gimple_location (stmt);
7539 bool offloaded, data_region;
7540 unsigned int map_cnt = 0;
7542 offloaded = is_gimple_omp_offloaded (stmt);
7543 switch (gimple_omp_target_kind (stmt))
7545 case GF_OMP_TARGET_KIND_REGION:
7546 case GF_OMP_TARGET_KIND_UPDATE:
7547 case GF_OMP_TARGET_KIND_ENTER_DATA:
7548 case GF_OMP_TARGET_KIND_EXIT_DATA:
7549 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7550 case GF_OMP_TARGET_KIND_OACC_KERNELS:
7551 case GF_OMP_TARGET_KIND_OACC_UPDATE:
7552 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7553 case GF_OMP_TARGET_KIND_OACC_DECLARE:
7554 data_region = false;
7555 break;
7556 case GF_OMP_TARGET_KIND_DATA:
7557 case GF_OMP_TARGET_KIND_OACC_DATA:
7558 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7559 data_region = true;
7560 break;
7561 default:
7562 gcc_unreachable ();
7565 clauses = gimple_omp_target_clauses (stmt);
7567 gimple_seq dep_ilist = NULL;
7568 gimple_seq dep_olist = NULL;
7569 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7571 push_gimplify_context ();
7572 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7573 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7574 &dep_ilist, &dep_olist);
7577 tgt_bind = NULL;
7578 tgt_body = NULL;
7579 if (offloaded)
7581 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7582 tgt_body = gimple_bind_body (tgt_bind);
7584 else if (data_region)
7585 tgt_body = gimple_omp_body (stmt);
7586 child_fn = ctx->cb.dst_fn;
7588 push_gimplify_context ();
7589 fplist = NULL;
7591 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7592 switch (OMP_CLAUSE_CODE (c))
7594 tree var, x;
7596 default:
7597 break;
7598 case OMP_CLAUSE_MAP:
7599 #if CHECKING_P
7600 /* First check what we're prepared to handle in the following. */
7601 switch (OMP_CLAUSE_MAP_KIND (c))
7603 case GOMP_MAP_ALLOC:
7604 case GOMP_MAP_TO:
7605 case GOMP_MAP_FROM:
7606 case GOMP_MAP_TOFROM:
7607 case GOMP_MAP_POINTER:
7608 case GOMP_MAP_TO_PSET:
7609 case GOMP_MAP_DELETE:
7610 case GOMP_MAP_RELEASE:
7611 case GOMP_MAP_ALWAYS_TO:
7612 case GOMP_MAP_ALWAYS_FROM:
7613 case GOMP_MAP_ALWAYS_TOFROM:
7614 case GOMP_MAP_FIRSTPRIVATE_POINTER:
7615 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7616 case GOMP_MAP_STRUCT:
7617 case GOMP_MAP_ALWAYS_POINTER:
7618 break;
7619 case GOMP_MAP_FORCE_ALLOC:
7620 case GOMP_MAP_FORCE_TO:
7621 case GOMP_MAP_FORCE_FROM:
7622 case GOMP_MAP_FORCE_TOFROM:
7623 case GOMP_MAP_FORCE_PRESENT:
7624 case GOMP_MAP_FORCE_DEVICEPTR:
7625 case GOMP_MAP_DEVICE_RESIDENT:
7626 case GOMP_MAP_LINK:
7627 gcc_assert (is_gimple_omp_oacc (stmt));
7628 break;
7629 default:
7630 gcc_unreachable ();
7632 #endif
7633 /* FALLTHRU */
7634 case OMP_CLAUSE_TO:
7635 case OMP_CLAUSE_FROM:
7636 oacc_firstprivate:
7637 var = OMP_CLAUSE_DECL (c);
7638 if (!DECL_P (var))
7640 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7641 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7642 && (OMP_CLAUSE_MAP_KIND (c)
7643 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
7644 map_cnt++;
7645 continue;
7648 if (DECL_SIZE (var)
7649 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7651 tree var2 = DECL_VALUE_EXPR (var);
7652 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7653 var2 = TREE_OPERAND (var2, 0);
7654 gcc_assert (DECL_P (var2));
7655 var = var2;
7658 if (offloaded
7659 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7660 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7661 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7663 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7665 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7666 && varpool_node::get_create (var)->offloadable)
7667 continue;
7669 tree type = build_pointer_type (TREE_TYPE (var));
7670 tree new_var = lookup_decl (var, ctx);
7671 x = create_tmp_var_raw (type, get_name (new_var));
7672 gimple_add_tmp_var (x);
7673 x = build_simple_mem_ref (x);
7674 SET_DECL_VALUE_EXPR (new_var, x);
7675 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7677 continue;
7680 if (!maybe_lookup_field (var, ctx))
7681 continue;
7683 /* Don't remap oacc parallel reduction variables, because the
7684 intermediate result must be local to each gang. */
7685 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7686 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7688 x = build_receiver_ref (var, true, ctx);
7689 tree new_var = lookup_decl (var, ctx);
7691 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7692 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7693 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7694 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7695 x = build_simple_mem_ref (x);
7696 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7698 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7699 if (omp_is_reference (new_var))
7701 /* Create a local object to hold the instance
7702 value. */
7703 tree type = TREE_TYPE (TREE_TYPE (new_var));
7704 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7705 tree inst = create_tmp_var (type, id);
7706 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7707 x = build_fold_addr_expr (inst);
7709 gimplify_assign (new_var, x, &fplist);
7711 else if (DECL_P (new_var))
7713 SET_DECL_VALUE_EXPR (new_var, x);
7714 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7716 else
7717 gcc_unreachable ();
7719 map_cnt++;
7720 break;
7722 case OMP_CLAUSE_FIRSTPRIVATE:
7723 if (is_oacc_parallel (ctx))
7724 goto oacc_firstprivate;
7725 map_cnt++;
7726 var = OMP_CLAUSE_DECL (c);
7727 if (!omp_is_reference (var)
7728 && !is_gimple_reg_type (TREE_TYPE (var)))
7730 tree new_var = lookup_decl (var, ctx);
7731 if (is_variable_sized (var))
7733 tree pvar = DECL_VALUE_EXPR (var);
7734 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7735 pvar = TREE_OPERAND (pvar, 0);
7736 gcc_assert (DECL_P (pvar));
7737 tree new_pvar = lookup_decl (pvar, ctx);
7738 x = build_fold_indirect_ref (new_pvar);
7739 TREE_THIS_NOTRAP (x) = 1;
7741 else
7742 x = build_receiver_ref (var, true, ctx);
7743 SET_DECL_VALUE_EXPR (new_var, x);
7744 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7746 break;
7748 case OMP_CLAUSE_PRIVATE:
7749 if (is_gimple_omp_oacc (ctx->stmt))
7750 break;
7751 var = OMP_CLAUSE_DECL (c);
7752 if (is_variable_sized (var))
7754 tree new_var = lookup_decl (var, ctx);
7755 tree pvar = DECL_VALUE_EXPR (var);
7756 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7757 pvar = TREE_OPERAND (pvar, 0);
7758 gcc_assert (DECL_P (pvar));
7759 tree new_pvar = lookup_decl (pvar, ctx);
7760 x = build_fold_indirect_ref (new_pvar);
7761 TREE_THIS_NOTRAP (x) = 1;
7762 SET_DECL_VALUE_EXPR (new_var, x);
7763 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7765 break;
7767 case OMP_CLAUSE_USE_DEVICE_PTR:
7768 case OMP_CLAUSE_IS_DEVICE_PTR:
7769 var = OMP_CLAUSE_DECL (c);
7770 map_cnt++;
7771 if (is_variable_sized (var))
7773 tree new_var = lookup_decl (var, ctx);
7774 tree pvar = DECL_VALUE_EXPR (var);
7775 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7776 pvar = TREE_OPERAND (pvar, 0);
7777 gcc_assert (DECL_P (pvar));
7778 tree new_pvar = lookup_decl (pvar, ctx);
7779 x = build_fold_indirect_ref (new_pvar);
7780 TREE_THIS_NOTRAP (x) = 1;
7781 SET_DECL_VALUE_EXPR (new_var, x);
7782 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7784 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7786 tree new_var = lookup_decl (var, ctx);
7787 tree type = build_pointer_type (TREE_TYPE (var));
7788 x = create_tmp_var_raw (type, get_name (new_var));
7789 gimple_add_tmp_var (x);
7790 x = build_simple_mem_ref (x);
7791 SET_DECL_VALUE_EXPR (new_var, x);
7792 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7794 else
7796 tree new_var = lookup_decl (var, ctx);
7797 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7798 gimple_add_tmp_var (x);
7799 SET_DECL_VALUE_EXPR (new_var, x);
7800 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7802 break;
7805 if (offloaded)
7807 target_nesting_level++;
7808 lower_omp (&tgt_body, ctx);
7809 target_nesting_level--;
7811 else if (data_region)
7812 lower_omp (&tgt_body, ctx);
7814 if (offloaded)
7816 /* Declare all the variables created by mapping and the variables
7817 declared in the scope of the target body. */
7818 record_vars_into (ctx->block_vars, child_fn);
7819 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7822 olist = NULL;
7823 ilist = NULL;
7824 if (ctx->record_type)
7826 ctx->sender_decl
7827 = create_tmp_var (ctx->record_type, ".omp_data_arr");
7828 DECL_NAMELESS (ctx->sender_decl) = 1;
7829 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7830 t = make_tree_vec (3);
7831 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7832 TREE_VEC_ELT (t, 1)
7833 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7834 ".omp_data_sizes");
7835 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7836 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7837 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7838 tree tkind_type = short_unsigned_type_node;
7839 int talign_shift = 8;
7840 TREE_VEC_ELT (t, 2)
7841 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7842 ".omp_data_kinds");
7843 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7844 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7845 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7846 gimple_omp_target_set_data_arg (stmt, t);
7848 vec<constructor_elt, va_gc> *vsize;
7849 vec<constructor_elt, va_gc> *vkind;
7850 vec_alloc (vsize, map_cnt);
7851 vec_alloc (vkind, map_cnt);
7852 unsigned int map_idx = 0;
7854 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7855 switch (OMP_CLAUSE_CODE (c))
7857 tree ovar, nc, s, purpose, var, x, type;
7858 unsigned int talign;
7860 default:
7861 break;
7863 case OMP_CLAUSE_MAP:
7864 case OMP_CLAUSE_TO:
7865 case OMP_CLAUSE_FROM:
7866 oacc_firstprivate_map:
7867 nc = c;
7868 ovar = OMP_CLAUSE_DECL (c);
7869 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7870 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7871 || (OMP_CLAUSE_MAP_KIND (c)
7872 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7873 break;
7874 if (!DECL_P (ovar))
7876 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7877 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7879 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7880 == get_base_address (ovar));
7881 nc = OMP_CLAUSE_CHAIN (c);
7882 ovar = OMP_CLAUSE_DECL (nc);
7884 else
7886 tree x = build_sender_ref (ovar, ctx);
7887 tree v
7888 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7889 gimplify_assign (x, v, &ilist);
7890 nc = NULL_TREE;
7893 else
7895 if (DECL_SIZE (ovar)
7896 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7898 tree ovar2 = DECL_VALUE_EXPR (ovar);
7899 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7900 ovar2 = TREE_OPERAND (ovar2, 0);
7901 gcc_assert (DECL_P (ovar2));
7902 ovar = ovar2;
7904 if (!maybe_lookup_field (ovar, ctx))
7905 continue;
7908 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7909 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7910 talign = DECL_ALIGN_UNIT (ovar);
7911 if (nc)
7913 var = lookup_decl_in_outer_ctx (ovar, ctx);
7914 x = build_sender_ref (ovar, ctx);
7916 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7917 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7918 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7919 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7921 gcc_assert (offloaded);
7922 tree avar
7923 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7924 mark_addressable (avar);
7925 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7926 talign = DECL_ALIGN_UNIT (avar);
7927 avar = build_fold_addr_expr (avar);
7928 gimplify_assign (x, avar, &ilist);
7930 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7932 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7933 if (!omp_is_reference (var))
7935 if (is_gimple_reg (var)
7936 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7937 TREE_NO_WARNING (var) = 1;
7938 var = build_fold_addr_expr (var);
7940 else
7941 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7942 gimplify_assign (x, var, &ilist);
7944 else if (is_gimple_reg (var))
7946 gcc_assert (offloaded);
7947 tree avar = create_tmp_var (TREE_TYPE (var));
7948 mark_addressable (avar);
7949 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7950 if (GOMP_MAP_COPY_TO_P (map_kind)
7951 || map_kind == GOMP_MAP_POINTER
7952 || map_kind == GOMP_MAP_TO_PSET
7953 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7955 /* If we need to initialize a temporary
7956 with VAR because it is not addressable, and
7957 the variable hasn't been initialized yet, then
7958 we'll get a warning for the store to avar.
7959 Don't warn in that case, the mapping might
7960 be implicit. */
7961 TREE_NO_WARNING (var) = 1;
7962 gimplify_assign (avar, var, &ilist);
7964 avar = build_fold_addr_expr (avar);
7965 gimplify_assign (x, avar, &ilist);
7966 if ((GOMP_MAP_COPY_FROM_P (map_kind)
7967 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7968 && !TYPE_READONLY (TREE_TYPE (var)))
7970 x = unshare_expr (x);
7971 x = build_simple_mem_ref (x);
7972 gimplify_assign (var, x, &olist);
7975 else
7977 var = build_fold_addr_expr (var);
7978 gimplify_assign (x, var, &ilist);
7981 s = NULL_TREE;
7982 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7984 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7985 s = TREE_TYPE (ovar);
7986 if (TREE_CODE (s) == REFERENCE_TYPE)
7987 s = TREE_TYPE (s);
7988 s = TYPE_SIZE_UNIT (s);
7990 else
7991 s = OMP_CLAUSE_SIZE (c);
7992 if (s == NULL_TREE)
7993 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
7994 s = fold_convert (size_type_node, s);
7995 purpose = size_int (map_idx++);
7996 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
7997 if (TREE_CODE (s) != INTEGER_CST)
7998 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8000 unsigned HOST_WIDE_INT tkind, tkind_zero;
8001 switch (OMP_CLAUSE_CODE (c))
8003 case OMP_CLAUSE_MAP:
8004 tkind = OMP_CLAUSE_MAP_KIND (c);
8005 tkind_zero = tkind;
8006 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
8007 switch (tkind)
8009 case GOMP_MAP_ALLOC:
8010 case GOMP_MAP_TO:
8011 case GOMP_MAP_FROM:
8012 case GOMP_MAP_TOFROM:
8013 case GOMP_MAP_ALWAYS_TO:
8014 case GOMP_MAP_ALWAYS_FROM:
8015 case GOMP_MAP_ALWAYS_TOFROM:
8016 case GOMP_MAP_RELEASE:
8017 case GOMP_MAP_FORCE_TO:
8018 case GOMP_MAP_FORCE_FROM:
8019 case GOMP_MAP_FORCE_TOFROM:
8020 case GOMP_MAP_FORCE_PRESENT:
8021 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
8022 break;
8023 case GOMP_MAP_DELETE:
8024 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
8025 default:
8026 break;
8028 if (tkind_zero != tkind)
8030 if (integer_zerop (s))
8031 tkind = tkind_zero;
8032 else if (integer_nonzerop (s))
8033 tkind_zero = tkind;
8035 break;
8036 case OMP_CLAUSE_FIRSTPRIVATE:
8037 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
8038 tkind = GOMP_MAP_TO;
8039 tkind_zero = tkind;
8040 break;
8041 case OMP_CLAUSE_TO:
8042 tkind = GOMP_MAP_TO;
8043 tkind_zero = tkind;
8044 break;
8045 case OMP_CLAUSE_FROM:
8046 tkind = GOMP_MAP_FROM;
8047 tkind_zero = tkind;
8048 break;
8049 default:
8050 gcc_unreachable ();
8052 gcc_checking_assert (tkind
8053 < (HOST_WIDE_INT_C (1U) << talign_shift));
8054 gcc_checking_assert (tkind_zero
8055 < (HOST_WIDE_INT_C (1U) << talign_shift));
8056 talign = ceil_log2 (talign);
8057 tkind |= talign << talign_shift;
8058 tkind_zero |= talign << talign_shift;
8059 gcc_checking_assert (tkind
8060 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8061 gcc_checking_assert (tkind_zero
8062 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8063 if (tkind == tkind_zero)
8064 x = build_int_cstu (tkind_type, tkind);
8065 else
8067 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8068 x = build3 (COND_EXPR, tkind_type,
8069 fold_build2 (EQ_EXPR, boolean_type_node,
8070 unshare_expr (s), size_zero_node),
8071 build_int_cstu (tkind_type, tkind_zero),
8072 build_int_cstu (tkind_type, tkind));
8074 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8075 if (nc && nc != c)
8076 c = nc;
8077 break;
8079 case OMP_CLAUSE_FIRSTPRIVATE:
8080 if (is_oacc_parallel (ctx))
8081 goto oacc_firstprivate_map;
8082 ovar = OMP_CLAUSE_DECL (c);
8083 if (omp_is_reference (ovar))
8084 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8085 else
8086 talign = DECL_ALIGN_UNIT (ovar);
8087 var = lookup_decl_in_outer_ctx (ovar, ctx);
8088 x = build_sender_ref (ovar, ctx);
8089 tkind = GOMP_MAP_FIRSTPRIVATE;
8090 type = TREE_TYPE (ovar);
8091 if (omp_is_reference (ovar))
8092 type = TREE_TYPE (type);
8093 if ((INTEGRAL_TYPE_P (type)
8094 && TYPE_PRECISION (type) <= POINTER_SIZE)
8095 || TREE_CODE (type) == POINTER_TYPE)
8097 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8098 tree t = var;
8099 if (omp_is_reference (var))
8100 t = build_simple_mem_ref (var);
8101 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8102 TREE_NO_WARNING (var) = 1;
8103 if (TREE_CODE (type) != POINTER_TYPE)
8104 t = fold_convert (pointer_sized_int_node, t);
8105 t = fold_convert (TREE_TYPE (x), t);
8106 gimplify_assign (x, t, &ilist);
8108 else if (omp_is_reference (var))
8109 gimplify_assign (x, var, &ilist);
8110 else if (is_gimple_reg (var))
8112 tree avar = create_tmp_var (TREE_TYPE (var));
8113 mark_addressable (avar);
8114 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8115 TREE_NO_WARNING (var) = 1;
8116 gimplify_assign (avar, var, &ilist);
8117 avar = build_fold_addr_expr (avar);
8118 gimplify_assign (x, avar, &ilist);
8120 else
8122 var = build_fold_addr_expr (var);
8123 gimplify_assign (x, var, &ilist);
8125 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8126 s = size_int (0);
8127 else if (omp_is_reference (ovar))
8128 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8129 else
8130 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8131 s = fold_convert (size_type_node, s);
8132 purpose = size_int (map_idx++);
8133 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8134 if (TREE_CODE (s) != INTEGER_CST)
8135 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8137 gcc_checking_assert (tkind
8138 < (HOST_WIDE_INT_C (1U) << talign_shift));
8139 talign = ceil_log2 (talign);
8140 tkind |= talign << talign_shift;
8141 gcc_checking_assert (tkind
8142 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8143 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8144 build_int_cstu (tkind_type, tkind));
8145 break;
8147 case OMP_CLAUSE_USE_DEVICE_PTR:
8148 case OMP_CLAUSE_IS_DEVICE_PTR:
8149 ovar = OMP_CLAUSE_DECL (c);
8150 var = lookup_decl_in_outer_ctx (ovar, ctx);
8151 x = build_sender_ref (ovar, ctx);
8152 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8153 tkind = GOMP_MAP_USE_DEVICE_PTR;
8154 else
8155 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8156 type = TREE_TYPE (ovar);
8157 if (TREE_CODE (type) == ARRAY_TYPE)
8158 var = build_fold_addr_expr (var);
8159 else
8161 if (omp_is_reference (ovar))
8163 type = TREE_TYPE (type);
8164 if (TREE_CODE (type) != ARRAY_TYPE)
8165 var = build_simple_mem_ref (var);
8166 var = fold_convert (TREE_TYPE (x), var);
8169 gimplify_assign (x, var, &ilist);
8170 s = size_int (0);
8171 purpose = size_int (map_idx++);
8172 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8173 gcc_checking_assert (tkind
8174 < (HOST_WIDE_INT_C (1U) << talign_shift));
8175 gcc_checking_assert (tkind
8176 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8177 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8178 build_int_cstu (tkind_type, tkind));
8179 break;
8182 gcc_assert (map_idx == map_cnt);
8184 DECL_INITIAL (TREE_VEC_ELT (t, 1))
8185 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8186 DECL_INITIAL (TREE_VEC_ELT (t, 2))
8187 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8188 for (int i = 1; i <= 2; i++)
8189 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8191 gimple_seq initlist = NULL;
8192 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8193 TREE_VEC_ELT (t, i)),
8194 &initlist, true, NULL_TREE);
8195 gimple_seq_add_seq (&ilist, initlist);
8197 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8198 NULL);
8199 TREE_THIS_VOLATILE (clobber) = 1;
8200 gimple_seq_add_stmt (&olist,
8201 gimple_build_assign (TREE_VEC_ELT (t, i),
8202 clobber));
8205 tree clobber = build_constructor (ctx->record_type, NULL);
8206 TREE_THIS_VOLATILE (clobber) = 1;
8207 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8208 clobber));
8211 /* Once all the expansions are done, sequence all the different
8212 fragments inside gimple_omp_body. */
8214 new_body = NULL;
8216 if (offloaded
8217 && ctx->record_type)
8219 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8220 /* fixup_child_record_type might have changed receiver_decl's type. */
8221 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8222 gimple_seq_add_stmt (&new_body,
8223 gimple_build_assign (ctx->receiver_decl, t));
8225 gimple_seq_add_seq (&new_body, fplist);
8227 if (offloaded || data_region)
8229 tree prev = NULL_TREE;
8230 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8231 switch (OMP_CLAUSE_CODE (c))
8233 tree var, x;
8234 default:
8235 break;
8236 case OMP_CLAUSE_FIRSTPRIVATE:
8237 if (is_gimple_omp_oacc (ctx->stmt))
8238 break;
8239 var = OMP_CLAUSE_DECL (c);
8240 if (omp_is_reference (var)
8241 || is_gimple_reg_type (TREE_TYPE (var)))
8243 tree new_var = lookup_decl (var, ctx);
8244 tree type;
8245 type = TREE_TYPE (var);
8246 if (omp_is_reference (var))
8247 type = TREE_TYPE (type);
8248 if ((INTEGRAL_TYPE_P (type)
8249 && TYPE_PRECISION (type) <= POINTER_SIZE)
8250 || TREE_CODE (type) == POINTER_TYPE)
8252 x = build_receiver_ref (var, false, ctx);
8253 if (TREE_CODE (type) != POINTER_TYPE)
8254 x = fold_convert (pointer_sized_int_node, x);
8255 x = fold_convert (type, x);
8256 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8257 fb_rvalue);
8258 if (omp_is_reference (var))
8260 tree v = create_tmp_var_raw (type, get_name (var));
8261 gimple_add_tmp_var (v);
8262 TREE_ADDRESSABLE (v) = 1;
8263 gimple_seq_add_stmt (&new_body,
8264 gimple_build_assign (v, x));
8265 x = build_fold_addr_expr (v);
8267 gimple_seq_add_stmt (&new_body,
8268 gimple_build_assign (new_var, x));
8270 else
8272 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8273 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8274 fb_rvalue);
8275 gimple_seq_add_stmt (&new_body,
8276 gimple_build_assign (new_var, x));
8279 else if (is_variable_sized (var))
8281 tree pvar = DECL_VALUE_EXPR (var);
8282 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8283 pvar = TREE_OPERAND (pvar, 0);
8284 gcc_assert (DECL_P (pvar));
8285 tree new_var = lookup_decl (pvar, ctx);
8286 x = build_receiver_ref (var, false, ctx);
8287 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8288 gimple_seq_add_stmt (&new_body,
8289 gimple_build_assign (new_var, x));
8291 break;
8292 case OMP_CLAUSE_PRIVATE:
8293 if (is_gimple_omp_oacc (ctx->stmt))
8294 break;
8295 var = OMP_CLAUSE_DECL (c);
8296 if (omp_is_reference (var))
8298 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8299 tree new_var = lookup_decl (var, ctx);
8300 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8301 if (TREE_CONSTANT (x))
8303 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8304 get_name (var));
8305 gimple_add_tmp_var (x);
8306 TREE_ADDRESSABLE (x) = 1;
8307 x = build_fold_addr_expr_loc (clause_loc, x);
8309 else
8310 break;
8312 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8313 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8314 gimple_seq_add_stmt (&new_body,
8315 gimple_build_assign (new_var, x));
8317 break;
8318 case OMP_CLAUSE_USE_DEVICE_PTR:
8319 case OMP_CLAUSE_IS_DEVICE_PTR:
8320 var = OMP_CLAUSE_DECL (c);
8321 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8322 x = build_sender_ref (var, ctx);
8323 else
8324 x = build_receiver_ref (var, false, ctx);
8325 if (is_variable_sized (var))
8327 tree pvar = DECL_VALUE_EXPR (var);
8328 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8329 pvar = TREE_OPERAND (pvar, 0);
8330 gcc_assert (DECL_P (pvar));
8331 tree new_var = lookup_decl (pvar, ctx);
8332 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8333 gimple_seq_add_stmt (&new_body,
8334 gimple_build_assign (new_var, x));
8336 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8338 tree new_var = lookup_decl (var, ctx);
8339 new_var = DECL_VALUE_EXPR (new_var);
8340 gcc_assert (TREE_CODE (new_var) == MEM_REF);
8341 new_var = TREE_OPERAND (new_var, 0);
8342 gcc_assert (DECL_P (new_var));
8343 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8344 gimple_seq_add_stmt (&new_body,
8345 gimple_build_assign (new_var, x));
8347 else
8349 tree type = TREE_TYPE (var);
8350 tree new_var = lookup_decl (var, ctx);
8351 if (omp_is_reference (var))
8353 type = TREE_TYPE (type);
8354 if (TREE_CODE (type) != ARRAY_TYPE)
8356 tree v = create_tmp_var_raw (type, get_name (var));
8357 gimple_add_tmp_var (v);
8358 TREE_ADDRESSABLE (v) = 1;
8359 x = fold_convert (type, x);
8360 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8361 fb_rvalue);
8362 gimple_seq_add_stmt (&new_body,
8363 gimple_build_assign (v, x));
8364 x = build_fold_addr_expr (v);
8367 new_var = DECL_VALUE_EXPR (new_var);
8368 x = fold_convert (TREE_TYPE (new_var), x);
8369 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8370 gimple_seq_add_stmt (&new_body,
8371 gimple_build_assign (new_var, x));
8373 break;
8375 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8376 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8377 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8378 or references to VLAs. */
8379 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8380 switch (OMP_CLAUSE_CODE (c))
8382 tree var;
8383 default:
8384 break;
8385 case OMP_CLAUSE_MAP:
8386 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8387 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8389 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8390 HOST_WIDE_INT offset = 0;
8391 gcc_assert (prev);
8392 var = OMP_CLAUSE_DECL (c);
8393 if (DECL_P (var)
8394 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8395 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8396 ctx))
8397 && varpool_node::get_create (var)->offloadable)
8398 break;
8399 if (TREE_CODE (var) == INDIRECT_REF
8400 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8401 var = TREE_OPERAND (var, 0);
8402 if (TREE_CODE (var) == COMPONENT_REF)
8404 var = get_addr_base_and_unit_offset (var, &offset);
8405 gcc_assert (var != NULL_TREE && DECL_P (var));
8407 else if (DECL_SIZE (var)
8408 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8410 tree var2 = DECL_VALUE_EXPR (var);
8411 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8412 var2 = TREE_OPERAND (var2, 0);
8413 gcc_assert (DECL_P (var2));
8414 var = var2;
8416 tree new_var = lookup_decl (var, ctx), x;
8417 tree type = TREE_TYPE (new_var);
8418 bool is_ref;
8419 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8420 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8421 == COMPONENT_REF))
8423 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8424 is_ref = true;
8425 new_var = build2 (MEM_REF, type,
8426 build_fold_addr_expr (new_var),
8427 build_int_cst (build_pointer_type (type),
8428 offset));
8430 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8432 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8433 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8434 new_var = build2 (MEM_REF, type,
8435 build_fold_addr_expr (new_var),
8436 build_int_cst (build_pointer_type (type),
8437 offset));
8439 else
8440 is_ref = omp_is_reference (var);
8441 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8442 is_ref = false;
8443 bool ref_to_array = false;
8444 if (is_ref)
8446 type = TREE_TYPE (type);
8447 if (TREE_CODE (type) == ARRAY_TYPE)
8449 type = build_pointer_type (type);
8450 ref_to_array = true;
8453 else if (TREE_CODE (type) == ARRAY_TYPE)
8455 tree decl2 = DECL_VALUE_EXPR (new_var);
8456 gcc_assert (TREE_CODE (decl2) == MEM_REF);
8457 decl2 = TREE_OPERAND (decl2, 0);
8458 gcc_assert (DECL_P (decl2));
8459 new_var = decl2;
8460 type = TREE_TYPE (new_var);
8462 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8463 x = fold_convert_loc (clause_loc, type, x);
8464 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8466 tree bias = OMP_CLAUSE_SIZE (c);
8467 if (DECL_P (bias))
8468 bias = lookup_decl (bias, ctx);
8469 bias = fold_convert_loc (clause_loc, sizetype, bias);
8470 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8471 bias);
8472 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8473 TREE_TYPE (x), x, bias);
8475 if (ref_to_array)
8476 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8477 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8478 if (is_ref && !ref_to_array)
8480 tree t = create_tmp_var_raw (type, get_name (var));
8481 gimple_add_tmp_var (t);
8482 TREE_ADDRESSABLE (t) = 1;
8483 gimple_seq_add_stmt (&new_body,
8484 gimple_build_assign (t, x));
8485 x = build_fold_addr_expr_loc (clause_loc, t);
8487 gimple_seq_add_stmt (&new_body,
8488 gimple_build_assign (new_var, x));
8489 prev = NULL_TREE;
8491 else if (OMP_CLAUSE_CHAIN (c)
8492 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8493 == OMP_CLAUSE_MAP
8494 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8495 == GOMP_MAP_FIRSTPRIVATE_POINTER
8496 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8497 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8498 prev = c;
8499 break;
8500 case OMP_CLAUSE_PRIVATE:
8501 var = OMP_CLAUSE_DECL (c);
8502 if (is_variable_sized (var))
8504 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8505 tree new_var = lookup_decl (var, ctx);
8506 tree pvar = DECL_VALUE_EXPR (var);
8507 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8508 pvar = TREE_OPERAND (pvar, 0);
8509 gcc_assert (DECL_P (pvar));
8510 tree new_pvar = lookup_decl (pvar, ctx);
8511 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8512 tree al = size_int (DECL_ALIGN (var));
8513 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8514 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8515 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8516 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8517 gimple_seq_add_stmt (&new_body,
8518 gimple_build_assign (new_pvar, x));
8520 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8522 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8523 tree new_var = lookup_decl (var, ctx);
8524 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8525 if (TREE_CONSTANT (x))
8526 break;
8527 else
8529 tree atmp
8530 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8531 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8532 tree al = size_int (TYPE_ALIGN (rtype));
8533 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8536 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8537 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8538 gimple_seq_add_stmt (&new_body,
8539 gimple_build_assign (new_var, x));
8541 break;
8544 gimple_seq fork_seq = NULL;
8545 gimple_seq join_seq = NULL;
8547 if (is_oacc_parallel (ctx))
8549 /* If there are reductions on the offloaded region itself, treat
8550 them as a dummy GANG loop. */
8551 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8553 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8554 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8557 gimple_seq_add_seq (&new_body, fork_seq);
8558 gimple_seq_add_seq (&new_body, tgt_body);
8559 gimple_seq_add_seq (&new_body, join_seq);
8561 if (offloaded)
8562 new_body = maybe_catch_exception (new_body);
8564 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8565 gimple_omp_set_body (stmt, new_body);
8568 bind = gimple_build_bind (NULL, NULL,
8569 tgt_bind ? gimple_bind_block (tgt_bind)
8570 : NULL_TREE);
8571 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8572 gimple_bind_add_seq (bind, ilist);
8573 gimple_bind_add_stmt (bind, stmt);
8574 gimple_bind_add_seq (bind, olist);
8576 pop_gimplify_context (NULL);
8578 if (dep_bind)
8580 gimple_bind_add_seq (dep_bind, dep_ilist);
8581 gimple_bind_add_stmt (dep_bind, bind);
8582 gimple_bind_add_seq (dep_bind, dep_olist);
8583 pop_gimplify_context (dep_bind);
8587 /* Expand code for an OpenMP teams directive. */
8589 static void
8590 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8592 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8593 push_gimplify_context ();
8595 tree block = make_node (BLOCK);
8596 gbind *bind = gimple_build_bind (NULL, NULL, block);
8597 gsi_replace (gsi_p, bind, true);
8598 gimple_seq bind_body = NULL;
8599 gimple_seq dlist = NULL;
8600 gimple_seq olist = NULL;
8602 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8603 OMP_CLAUSE_NUM_TEAMS);
8604 if (num_teams == NULL_TREE)
8605 num_teams = build_int_cst (unsigned_type_node, 0);
8606 else
8608 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8609 num_teams = fold_convert (unsigned_type_node, num_teams);
8610 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8612 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8613 OMP_CLAUSE_THREAD_LIMIT);
8614 if (thread_limit == NULL_TREE)
8615 thread_limit = build_int_cst (unsigned_type_node, 0);
8616 else
8618 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8619 thread_limit = fold_convert (unsigned_type_node, thread_limit);
8620 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8621 fb_rvalue);
8624 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8625 &bind_body, &dlist, ctx, NULL);
8626 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8627 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8628 if (!gimple_omp_teams_grid_phony (teams_stmt))
8630 gimple_seq_add_stmt (&bind_body, teams_stmt);
8631 location_t loc = gimple_location (teams_stmt);
8632 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8633 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8634 gimple_set_location (call, loc);
8635 gimple_seq_add_stmt (&bind_body, call);
8638 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8639 gimple_omp_set_body (teams_stmt, NULL);
8640 gimple_seq_add_seq (&bind_body, olist);
8641 gimple_seq_add_seq (&bind_body, dlist);
8642 if (!gimple_omp_teams_grid_phony (teams_stmt))
8643 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8644 gimple_bind_set_body (bind, bind_body);
8646 pop_gimplify_context (bind);
8648 gimple_bind_append_vars (bind, ctx->block_vars);
8649 BLOCK_VARS (block) = ctx->block_vars;
8650 if (BLOCK_VARS (block))
8651 TREE_USED (block) = 1;
8654 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8656 static void
8657 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8659 gimple *stmt = gsi_stmt (*gsi_p);
8660 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8661 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8662 gimple_build_omp_return (false));
8666 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8667 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8668 of OMP context, but with task_shared_vars set. */
8670 static tree
8671 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8672 void *data)
8674 tree t = *tp;
8676 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8677 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8678 return t;
8680 if (task_shared_vars
8681 && DECL_P (t)
8682 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8683 return t;
8685 /* If a global variable has been privatized, TREE_CONSTANT on
8686 ADDR_EXPR might be wrong. */
8687 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8688 recompute_tree_invariant_for_addr_expr (t);
8690 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8691 return NULL_TREE;
8694 /* Data to be communicated between lower_omp_regimplify_operands and
8695 lower_omp_regimplify_operands_p. */
8697 struct lower_omp_regimplify_operands_data
8699 omp_context *ctx;
8700 vec<tree> *decls;
8703 /* Helper function for lower_omp_regimplify_operands. Find
8704 omp_member_access_dummy_var vars and adjust temporarily their
8705 DECL_VALUE_EXPRs if needed. */
8707 static tree
8708 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8709 void *data)
8711 tree t = omp_member_access_dummy_var (*tp);
8712 if (t)
8714 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8715 lower_omp_regimplify_operands_data *ldata
8716 = (lower_omp_regimplify_operands_data *) wi->info;
8717 tree o = maybe_lookup_decl (t, ldata->ctx);
8718 if (o != t)
8720 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8721 ldata->decls->safe_push (*tp);
8722 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8723 SET_DECL_VALUE_EXPR (*tp, v);
8726 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8727 return NULL_TREE;
8730 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8731 of omp_member_access_dummy_var vars during regimplification. */
8733 static void
8734 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8735 gimple_stmt_iterator *gsi_p)
8737 auto_vec<tree, 10> decls;
8738 if (ctx)
8740 struct walk_stmt_info wi;
8741 memset (&wi, '\0', sizeof (wi));
8742 struct lower_omp_regimplify_operands_data data;
8743 data.ctx = ctx;
8744 data.decls = &decls;
8745 wi.info = &data;
8746 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8748 gimple_regimplify_operands (stmt, gsi_p);
8749 while (!decls.is_empty ())
8751 tree t = decls.pop ();
8752 tree v = decls.pop ();
8753 SET_DECL_VALUE_EXPR (t, v);
8757 static void
8758 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8760 gimple *stmt = gsi_stmt (*gsi_p);
8761 struct walk_stmt_info wi;
8762 gcall *call_stmt;
8764 if (gimple_has_location (stmt))
8765 input_location = gimple_location (stmt);
8767 if (task_shared_vars)
8768 memset (&wi, '\0', sizeof (wi));
8770 /* If we have issued syntax errors, avoid doing any heavy lifting.
8771 Just replace the OMP directives with a NOP to avoid
8772 confusing RTL expansion. */
8773 if (seen_error () && is_gimple_omp (stmt))
8775 gsi_replace (gsi_p, gimple_build_nop (), true);
8776 return;
8779 switch (gimple_code (stmt))
8781 case GIMPLE_COND:
8783 gcond *cond_stmt = as_a <gcond *> (stmt);
8784 if ((ctx || task_shared_vars)
8785 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8786 lower_omp_regimplify_p,
8787 ctx ? NULL : &wi, NULL)
8788 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8789 lower_omp_regimplify_p,
8790 ctx ? NULL : &wi, NULL)))
8791 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8793 break;
8794 case GIMPLE_CATCH:
8795 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8796 break;
8797 case GIMPLE_EH_FILTER:
8798 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8799 break;
8800 case GIMPLE_TRY:
8801 lower_omp (gimple_try_eval_ptr (stmt), ctx);
8802 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8803 break;
8804 case GIMPLE_TRANSACTION:
8805 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8806 ctx);
8807 break;
8808 case GIMPLE_BIND:
8809 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8810 break;
8811 case GIMPLE_OMP_PARALLEL:
8812 case GIMPLE_OMP_TASK:
8813 ctx = maybe_lookup_ctx (stmt);
8814 gcc_assert (ctx);
8815 if (ctx->cancellable)
8816 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8817 lower_omp_taskreg (gsi_p, ctx);
8818 break;
8819 case GIMPLE_OMP_FOR:
8820 ctx = maybe_lookup_ctx (stmt);
8821 gcc_assert (ctx);
8822 if (ctx->cancellable)
8823 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8824 lower_omp_for (gsi_p, ctx);
8825 break;
8826 case GIMPLE_OMP_SECTIONS:
8827 ctx = maybe_lookup_ctx (stmt);
8828 gcc_assert (ctx);
8829 if (ctx->cancellable)
8830 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8831 lower_omp_sections (gsi_p, ctx);
8832 break;
8833 case GIMPLE_OMP_SINGLE:
8834 ctx = maybe_lookup_ctx (stmt);
8835 gcc_assert (ctx);
8836 lower_omp_single (gsi_p, ctx);
8837 break;
8838 case GIMPLE_OMP_MASTER:
8839 ctx = maybe_lookup_ctx (stmt);
8840 gcc_assert (ctx);
8841 lower_omp_master (gsi_p, ctx);
8842 break;
8843 case GIMPLE_OMP_TASKGROUP:
8844 ctx = maybe_lookup_ctx (stmt);
8845 gcc_assert (ctx);
8846 lower_omp_taskgroup (gsi_p, ctx);
8847 break;
8848 case GIMPLE_OMP_ORDERED:
8849 ctx = maybe_lookup_ctx (stmt);
8850 gcc_assert (ctx);
8851 lower_omp_ordered (gsi_p, ctx);
8852 break;
8853 case GIMPLE_OMP_CRITICAL:
8854 ctx = maybe_lookup_ctx (stmt);
8855 gcc_assert (ctx);
8856 lower_omp_critical (gsi_p, ctx);
8857 break;
8858 case GIMPLE_OMP_ATOMIC_LOAD:
8859 if ((ctx || task_shared_vars)
8860 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8861 as_a <gomp_atomic_load *> (stmt)),
8862 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8863 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8864 break;
8865 case GIMPLE_OMP_TARGET:
8866 ctx = maybe_lookup_ctx (stmt);
8867 gcc_assert (ctx);
8868 lower_omp_target (gsi_p, ctx);
8869 break;
8870 case GIMPLE_OMP_TEAMS:
8871 ctx = maybe_lookup_ctx (stmt);
8872 gcc_assert (ctx);
8873 lower_omp_teams (gsi_p, ctx);
8874 break;
8875 case GIMPLE_OMP_GRID_BODY:
8876 ctx = maybe_lookup_ctx (stmt);
8877 gcc_assert (ctx);
8878 lower_omp_grid_body (gsi_p, ctx);
8879 break;
8880 case GIMPLE_CALL:
8881 tree fndecl;
8882 call_stmt = as_a <gcall *> (stmt);
8883 fndecl = gimple_call_fndecl (call_stmt);
8884 if (fndecl
8885 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8886 switch (DECL_FUNCTION_CODE (fndecl))
8888 case BUILT_IN_GOMP_BARRIER:
8889 if (ctx == NULL)
8890 break;
8891 /* FALLTHRU */
8892 case BUILT_IN_GOMP_CANCEL:
8893 case BUILT_IN_GOMP_CANCELLATION_POINT:
8894 omp_context *cctx;
8895 cctx = ctx;
8896 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8897 cctx = cctx->outer;
8898 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8899 if (!cctx->cancellable)
8901 if (DECL_FUNCTION_CODE (fndecl)
8902 == BUILT_IN_GOMP_CANCELLATION_POINT)
8904 stmt = gimple_build_nop ();
8905 gsi_replace (gsi_p, stmt, false);
8907 break;
8909 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8911 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8912 gimple_call_set_fndecl (call_stmt, fndecl);
8913 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8915 tree lhs;
8916 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8917 gimple_call_set_lhs (call_stmt, lhs);
8918 tree fallthru_label;
8919 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8920 gimple *g;
8921 g = gimple_build_label (fallthru_label);
8922 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8923 g = gimple_build_cond (NE_EXPR, lhs,
8924 fold_convert (TREE_TYPE (lhs),
8925 boolean_false_node),
8926 cctx->cancel_label, fallthru_label);
8927 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8928 break;
8929 default:
8930 break;
8932 /* FALLTHRU */
8933 default:
8934 if ((ctx || task_shared_vars)
8935 && walk_gimple_op (stmt, lower_omp_regimplify_p,
8936 ctx ? NULL : &wi))
8938 /* Just remove clobbers, this should happen only if we have
8939 "privatized" local addressable variables in SIMD regions,
8940 the clobber isn't needed in that case and gimplifying address
8941 of the ARRAY_REF into a pointer and creating MEM_REF based
8942 clobber would create worse code than we get with the clobber
8943 dropped. */
8944 if (gimple_clobber_p (stmt))
8946 gsi_replace (gsi_p, gimple_build_nop (), true);
8947 break;
8949 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8951 break;
8955 static void
8956 lower_omp (gimple_seq *body, omp_context *ctx)
8958 location_t saved_location = input_location;
8959 gimple_stmt_iterator gsi;
8960 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8961 lower_omp_1 (&gsi, ctx);
8962 /* During gimplification, we haven't folded statments inside offloading
8963 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8964 if (target_nesting_level || taskreg_nesting_level)
8965 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8966 fold_stmt (&gsi);
8967 input_location = saved_location;
8970 /* Main entry point. */
8972 static unsigned int
8973 execute_lower_omp (void)
8975 gimple_seq body;
8976 int i;
8977 omp_context *ctx;
8979 /* This pass always runs, to provide PROP_gimple_lomp.
8980 But often, there is nothing to do. */
8981 if (flag_cilkplus == 0 && flag_openacc == 0 && flag_openmp == 0
8982 && flag_openmp_simd == 0)
8983 return 0;
8985 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
8986 delete_omp_context);
8988 body = gimple_body (current_function_decl);
8990 if (hsa_gen_requested_p ())
8991 omp_grid_gridify_all_targets (&body);
8993 scan_omp (&body, NULL);
8994 gcc_assert (taskreg_nesting_level == 0);
8995 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
8996 finish_taskreg_scan (ctx);
8997 taskreg_contexts.release ();
8999 if (all_contexts->root)
9001 if (task_shared_vars)
9002 push_gimplify_context ();
9003 lower_omp (&body, NULL);
9004 if (task_shared_vars)
9005 pop_gimplify_context (NULL);
9008 if (all_contexts)
9010 splay_tree_delete (all_contexts);
9011 all_contexts = NULL;
9013 BITMAP_FREE (task_shared_vars);
9014 return 0;
9017 namespace {
9019 const pass_data pass_data_lower_omp =
9021 GIMPLE_PASS, /* type */
9022 "omplower", /* name */
9023 OPTGROUP_OMP, /* optinfo_flags */
9024 TV_NONE, /* tv_id */
9025 PROP_gimple_any, /* properties_required */
9026 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
9027 0, /* properties_destroyed */
9028 0, /* todo_flags_start */
9029 0, /* todo_flags_finish */
9032 class pass_lower_omp : public gimple_opt_pass
9034 public:
9035 pass_lower_omp (gcc::context *ctxt)
9036 : gimple_opt_pass (pass_data_lower_omp, ctxt)
9039 /* opt_pass methods: */
9040 virtual unsigned int execute (function *) { return execute_lower_omp (); }
9042 }; // class pass_lower_omp
9044 } // anon namespace
9046 gimple_opt_pass *
9047 make_pass_lower_omp (gcc::context *ctxt)
9049 return new pass_lower_omp (ctxt);
9052 /* The following is a utility to diagnose structured block violations.
9053 It is not part of the "omplower" pass, as that's invoked too late. It
9054 should be invoked by the respective front ends after gimplification. */
9056 static splay_tree all_labels;
9058 /* Check for mismatched contexts and generate an error if needed. Return
9059 true if an error is detected. */
9061 static bool
9062 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9063 gimple *branch_ctx, gimple *label_ctx)
9065 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9066 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9068 if (label_ctx == branch_ctx)
9069 return false;
9071 const char* kind = NULL;
9073 if (flag_cilkplus)
9075 if ((branch_ctx
9076 && gimple_code (branch_ctx) == GIMPLE_OMP_FOR
9077 && gimple_omp_for_kind (branch_ctx) == GF_OMP_FOR_KIND_CILKSIMD)
9078 || (label_ctx
9079 && gimple_code (label_ctx) == GIMPLE_OMP_FOR
9080 && gimple_omp_for_kind (label_ctx) == GF_OMP_FOR_KIND_CILKSIMD))
9081 kind = "Cilk Plus";
9083 if (flag_openacc)
9085 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9086 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9088 gcc_checking_assert (kind == NULL);
9089 kind = "OpenACC";
9092 if (kind == NULL)
9094 gcc_checking_assert (flag_openmp || flag_openmp_simd);
9095 kind = "OpenMP";
9098 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9099 so we could traverse it and issue a correct "exit" or "enter" error
9100 message upon a structured block violation.
9102 We built the context by building a list with tree_cons'ing, but there is
9103 no easy counterpart in gimple tuples. It seems like far too much work
9104 for issuing exit/enter error messages. If someone really misses the
9105 distinct error message... patches welcome. */
9107 #if 0
9108 /* Try to avoid confusing the user by producing and error message
9109 with correct "exit" or "enter" verbiage. We prefer "exit"
9110 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9111 if (branch_ctx == NULL)
9112 exit_p = false;
9113 else
9115 while (label_ctx)
9117 if (TREE_VALUE (label_ctx) == branch_ctx)
9119 exit_p = false;
9120 break;
9122 label_ctx = TREE_CHAIN (label_ctx);
9126 if (exit_p)
9127 error ("invalid exit from %s structured block", kind);
9128 else
9129 error ("invalid entry to %s structured block", kind);
9130 #endif
9132 /* If it's obvious we have an invalid entry, be specific about the error. */
9133 if (branch_ctx == NULL)
9134 error ("invalid entry to %s structured block", kind);
9135 else
9137 /* Otherwise, be vague and lazy, but efficient. */
9138 error ("invalid branch to/from %s structured block", kind);
9141 gsi_replace (gsi_p, gimple_build_nop (), false);
9142 return true;
9145 /* Pass 1: Create a minimal tree of structured blocks, and record
9146 where each label is found. */
9148 static tree
9149 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9150 struct walk_stmt_info *wi)
9152 gimple *context = (gimple *) wi->info;
9153 gimple *inner_context;
9154 gimple *stmt = gsi_stmt (*gsi_p);
9156 *handled_ops_p = true;
9158 switch (gimple_code (stmt))
9160 WALK_SUBSTMTS;
9162 case GIMPLE_OMP_PARALLEL:
9163 case GIMPLE_OMP_TASK:
9164 case GIMPLE_OMP_SECTIONS:
9165 case GIMPLE_OMP_SINGLE:
9166 case GIMPLE_OMP_SECTION:
9167 case GIMPLE_OMP_MASTER:
9168 case GIMPLE_OMP_ORDERED:
9169 case GIMPLE_OMP_CRITICAL:
9170 case GIMPLE_OMP_TARGET:
9171 case GIMPLE_OMP_TEAMS:
9172 case GIMPLE_OMP_TASKGROUP:
9173 /* The minimal context here is just the current OMP construct. */
9174 inner_context = stmt;
9175 wi->info = inner_context;
9176 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9177 wi->info = context;
9178 break;
9180 case GIMPLE_OMP_FOR:
9181 inner_context = stmt;
9182 wi->info = inner_context;
9183 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9184 walk them. */
9185 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9186 diagnose_sb_1, NULL, wi);
9187 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9188 wi->info = context;
9189 break;
9191 case GIMPLE_LABEL:
9192 splay_tree_insert (all_labels,
9193 (splay_tree_key) gimple_label_label (
9194 as_a <glabel *> (stmt)),
9195 (splay_tree_value) context);
9196 break;
9198 default:
9199 break;
9202 return NULL_TREE;
9205 /* Pass 2: Check each branch and see if its context differs from that of
9206 the destination label's context. */
9208 static tree
9209 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9210 struct walk_stmt_info *wi)
9212 gimple *context = (gimple *) wi->info;
9213 splay_tree_node n;
9214 gimple *stmt = gsi_stmt (*gsi_p);
9216 *handled_ops_p = true;
9218 switch (gimple_code (stmt))
9220 WALK_SUBSTMTS;
9222 case GIMPLE_OMP_PARALLEL:
9223 case GIMPLE_OMP_TASK:
9224 case GIMPLE_OMP_SECTIONS:
9225 case GIMPLE_OMP_SINGLE:
9226 case GIMPLE_OMP_SECTION:
9227 case GIMPLE_OMP_MASTER:
9228 case GIMPLE_OMP_ORDERED:
9229 case GIMPLE_OMP_CRITICAL:
9230 case GIMPLE_OMP_TARGET:
9231 case GIMPLE_OMP_TEAMS:
9232 case GIMPLE_OMP_TASKGROUP:
9233 wi->info = stmt;
9234 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9235 wi->info = context;
9236 break;
9238 case GIMPLE_OMP_FOR:
9239 wi->info = stmt;
9240 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9241 walk them. */
9242 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9243 diagnose_sb_2, NULL, wi);
9244 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9245 wi->info = context;
9246 break;
9248 case GIMPLE_COND:
9250 gcond *cond_stmt = as_a <gcond *> (stmt);
9251 tree lab = gimple_cond_true_label (cond_stmt);
9252 if (lab)
9254 n = splay_tree_lookup (all_labels,
9255 (splay_tree_key) lab);
9256 diagnose_sb_0 (gsi_p, context,
9257 n ? (gimple *) n->value : NULL);
9259 lab = gimple_cond_false_label (cond_stmt);
9260 if (lab)
9262 n = splay_tree_lookup (all_labels,
9263 (splay_tree_key) lab);
9264 diagnose_sb_0 (gsi_p, context,
9265 n ? (gimple *) n->value : NULL);
9268 break;
9270 case GIMPLE_GOTO:
9272 tree lab = gimple_goto_dest (stmt);
9273 if (TREE_CODE (lab) != LABEL_DECL)
9274 break;
9276 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9277 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9279 break;
9281 case GIMPLE_SWITCH:
9283 gswitch *switch_stmt = as_a <gswitch *> (stmt);
9284 unsigned int i;
9285 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9287 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9288 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9289 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9290 break;
9293 break;
9295 case GIMPLE_RETURN:
9296 diagnose_sb_0 (gsi_p, context, NULL);
9297 break;
9299 default:
9300 break;
9303 return NULL_TREE;
9306 static unsigned int
9307 diagnose_omp_structured_block_errors (void)
9309 struct walk_stmt_info wi;
9310 gimple_seq body = gimple_body (current_function_decl);
9312 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9314 memset (&wi, 0, sizeof (wi));
9315 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9317 memset (&wi, 0, sizeof (wi));
9318 wi.want_locations = true;
9319 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9321 gimple_set_body (current_function_decl, body);
9323 splay_tree_delete (all_labels);
9324 all_labels = NULL;
9326 return 0;
9329 namespace {
9331 const pass_data pass_data_diagnose_omp_blocks =
9333 GIMPLE_PASS, /* type */
9334 "*diagnose_omp_blocks", /* name */
9335 OPTGROUP_OMP, /* optinfo_flags */
9336 TV_NONE, /* tv_id */
9337 PROP_gimple_any, /* properties_required */
9338 0, /* properties_provided */
9339 0, /* properties_destroyed */
9340 0, /* todo_flags_start */
9341 0, /* todo_flags_finish */
9344 class pass_diagnose_omp_blocks : public gimple_opt_pass
9346 public:
9347 pass_diagnose_omp_blocks (gcc::context *ctxt)
9348 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9351 /* opt_pass methods: */
9352 virtual bool gate (function *)
9354 return flag_cilkplus || flag_openacc || flag_openmp || flag_openmp_simd;
9356 virtual unsigned int execute (function *)
9358 return diagnose_omp_structured_block_errors ();
9361 }; // class pass_diagnose_omp_blocks
9363 } // anon namespace
9365 gimple_opt_pass *
9366 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9368 return new pass_diagnose_omp_blocks (ctxt);
9372 #include "gt-omp-low.h"