usr.sbin/makefs/hammer2: Remove redundant hammer2_inode_modify()
[dragonfly.git] / contrib / gcc-8.0 / gcc / omp-low.c
blob144076f080c24376dfd9cd25e75f2d0fc46ddb7c
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2018 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* Nesting depth of this context. Used to beautify error messages re
118 invalid gotos. The outermost ctx is depth 1, with depth 0 being
119 reserved for the main body of the function. */
120 int depth;
122 /* True if this parallel directive is nested within another. */
123 bool is_nested;
125 /* True if this construct can be cancelled. */
126 bool cancellable;
129 static splay_tree all_contexts;
130 static int taskreg_nesting_level;
131 static int target_nesting_level;
132 static bitmap task_shared_vars;
133 static vec<omp_context *> taskreg_contexts;
135 static void scan_omp (gimple_seq *, omp_context *);
136 static tree scan_omp_1_op (tree *, int *, void *);
138 #define WALK_SUBSTMTS \
139 case GIMPLE_BIND: \
140 case GIMPLE_TRY: \
141 case GIMPLE_CATCH: \
142 case GIMPLE_EH_FILTER: \
143 case GIMPLE_TRANSACTION: \
144 /* The sub-statements for these should be walked. */ \
145 *handled_ops_p = false; \
146 break;
148 /* Return true if CTX corresponds to an oacc parallel region. */
150 static bool
151 is_oacc_parallel (omp_context *ctx)
153 enum gimple_code outer_type = gimple_code (ctx->stmt);
154 return ((outer_type == GIMPLE_OMP_TARGET)
155 && (gimple_omp_target_kind (ctx->stmt)
156 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
159 /* Return true if CTX corresponds to an oacc kernels region. */
161 static bool
162 is_oacc_kernels (omp_context *ctx)
164 enum gimple_code outer_type = gimple_code (ctx->stmt);
165 return ((outer_type == GIMPLE_OMP_TARGET)
166 && (gimple_omp_target_kind (ctx->stmt)
167 == GF_OMP_TARGET_KIND_OACC_KERNELS));
170 /* If DECL is the artificial dummy VAR_DECL created for non-static
171 data member privatization, return the underlying "this" parameter,
172 otherwise return NULL. */
174 tree
175 omp_member_access_dummy_var (tree decl)
177 if (!VAR_P (decl)
178 || !DECL_ARTIFICIAL (decl)
179 || !DECL_IGNORED_P (decl)
180 || !DECL_HAS_VALUE_EXPR_P (decl)
181 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
182 return NULL_TREE;
184 tree v = DECL_VALUE_EXPR (decl);
185 if (TREE_CODE (v) != COMPONENT_REF)
186 return NULL_TREE;
188 while (1)
189 switch (TREE_CODE (v))
191 case COMPONENT_REF:
192 case MEM_REF:
193 case INDIRECT_REF:
194 CASE_CONVERT:
195 case POINTER_PLUS_EXPR:
196 v = TREE_OPERAND (v, 0);
197 continue;
198 case PARM_DECL:
199 if (DECL_CONTEXT (v) == current_function_decl
200 && DECL_ARTIFICIAL (v)
201 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
202 return v;
203 return NULL_TREE;
204 default:
205 return NULL_TREE;
209 /* Helper for unshare_and_remap, called through walk_tree. */
211 static tree
212 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
214 tree *pair = (tree *) data;
215 if (*tp == pair[0])
217 *tp = unshare_expr (pair[1]);
218 *walk_subtrees = 0;
220 else if (IS_TYPE_OR_DECL_P (*tp))
221 *walk_subtrees = 0;
222 return NULL_TREE;
225 /* Return unshare_expr (X) with all occurrences of FROM
226 replaced with TO. */
228 static tree
229 unshare_and_remap (tree x, tree from, tree to)
231 tree pair[2] = { from, to };
232 x = unshare_expr (x);
233 walk_tree (&x, unshare_and_remap_1, pair, NULL);
234 return x;
237 /* Convenience function for calling scan_omp_1_op on tree operands. */
239 static inline tree
240 scan_omp_op (tree *tp, omp_context *ctx)
242 struct walk_stmt_info wi;
244 memset (&wi, 0, sizeof (wi));
245 wi.info = ctx;
246 wi.want_locations = true;
248 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
251 static void lower_omp (gimple_seq *, omp_context *);
252 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
253 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
255 /* Return true if CTX is for an omp parallel. */
257 static inline bool
258 is_parallel_ctx (omp_context *ctx)
260 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
264 /* Return true if CTX is for an omp task. */
266 static inline bool
267 is_task_ctx (omp_context *ctx)
269 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
273 /* Return true if CTX is for an omp taskloop. */
275 static inline bool
276 is_taskloop_ctx (omp_context *ctx)
278 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
279 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
283 /* Return true if CTX is for an omp parallel or omp task. */
285 static inline bool
286 is_taskreg_ctx (omp_context *ctx)
288 return is_parallel_ctx (ctx) || is_task_ctx (ctx);
291 /* Return true if EXPR is variable sized. */
293 static inline bool
294 is_variable_sized (const_tree expr)
296 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
299 /* Lookup variables. The "maybe" form
300 allows for the variable form to not have been entered, otherwise we
301 assert that the variable must have been entered. */
303 static inline tree
304 lookup_decl (tree var, omp_context *ctx)
306 tree *n = ctx->cb.decl_map->get (var);
307 return *n;
310 static inline tree
311 maybe_lookup_decl (const_tree var, omp_context *ctx)
313 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
314 return n ? *n : NULL_TREE;
317 static inline tree
318 lookup_field (tree var, omp_context *ctx)
320 splay_tree_node n;
321 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
322 return (tree) n->value;
325 static inline tree
326 lookup_sfield (splay_tree_key key, omp_context *ctx)
328 splay_tree_node n;
329 n = splay_tree_lookup (ctx->sfield_map
330 ? ctx->sfield_map : ctx->field_map, key);
331 return (tree) n->value;
334 static inline tree
335 lookup_sfield (tree var, omp_context *ctx)
337 return lookup_sfield ((splay_tree_key) var, ctx);
340 static inline tree
341 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
343 splay_tree_node n;
344 n = splay_tree_lookup (ctx->field_map, key);
345 return n ? (tree) n->value : NULL_TREE;
348 static inline tree
349 maybe_lookup_field (tree var, omp_context *ctx)
351 return maybe_lookup_field ((splay_tree_key) var, ctx);
354 /* Return true if DECL should be copied by pointer. SHARED_CTX is
355 the parallel context if DECL is to be shared. */
357 static bool
358 use_pointer_for_field (tree decl, omp_context *shared_ctx)
360 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
361 || TYPE_ATOMIC (TREE_TYPE (decl)))
362 return true;
364 /* We can only use copy-in/copy-out semantics for shared variables
365 when we know the value is not accessible from an outer scope. */
366 if (shared_ctx)
368 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
370 /* ??? Trivially accessible from anywhere. But why would we even
371 be passing an address in this case? Should we simply assert
372 this to be false, or should we have a cleanup pass that removes
373 these from the list of mappings? */
374 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
375 return true;
377 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
378 without analyzing the expression whether or not its location
379 is accessible to anyone else. In the case of nested parallel
380 regions it certainly may be. */
381 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
382 return true;
384 /* Do not use copy-in/copy-out for variables that have their
385 address taken. */
386 if (TREE_ADDRESSABLE (decl))
387 return true;
389 /* lower_send_shared_vars only uses copy-in, but not copy-out
390 for these. */
391 if (TREE_READONLY (decl)
392 || ((TREE_CODE (decl) == RESULT_DECL
393 || TREE_CODE (decl) == PARM_DECL)
394 && DECL_BY_REFERENCE (decl)))
395 return false;
397 /* Disallow copy-in/out in nested parallel if
398 decl is shared in outer parallel, otherwise
399 each thread could store the shared variable
400 in its own copy-in location, making the
401 variable no longer really shared. */
402 if (shared_ctx->is_nested)
404 omp_context *up;
406 for (up = shared_ctx->outer; up; up = up->outer)
407 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
408 break;
410 if (up)
412 tree c;
414 for (c = gimple_omp_taskreg_clauses (up->stmt);
415 c; c = OMP_CLAUSE_CHAIN (c))
416 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
417 && OMP_CLAUSE_DECL (c) == decl)
418 break;
420 if (c)
421 goto maybe_mark_addressable_and_ret;
425 /* For tasks avoid using copy-in/out. As tasks can be
426 deferred or executed in different thread, when GOMP_task
427 returns, the task hasn't necessarily terminated. */
428 if (is_task_ctx (shared_ctx))
430 tree outer;
431 maybe_mark_addressable_and_ret:
432 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
433 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
435 /* Taking address of OUTER in lower_send_shared_vars
436 might need regimplification of everything that uses the
437 variable. */
438 if (!task_shared_vars)
439 task_shared_vars = BITMAP_ALLOC (NULL);
440 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
441 TREE_ADDRESSABLE (outer) = 1;
443 return true;
447 return false;
450 /* Construct a new automatic decl similar to VAR. */
452 static tree
453 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
455 tree copy = copy_var_decl (var, name, type);
457 DECL_CONTEXT (copy) = current_function_decl;
458 DECL_CHAIN (copy) = ctx->block_vars;
459 /* If VAR is listed in task_shared_vars, it means it wasn't
460 originally addressable and is just because task needs to take
461 it's address. But we don't need to take address of privatizations
462 from that var. */
463 if (TREE_ADDRESSABLE (var)
464 && task_shared_vars
465 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
466 TREE_ADDRESSABLE (copy) = 0;
467 ctx->block_vars = copy;
469 return copy;
472 static tree
473 omp_copy_decl_1 (tree var, omp_context *ctx)
475 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
478 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
479 as appropriate. */
480 static tree
481 omp_build_component_ref (tree obj, tree field)
483 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
484 if (TREE_THIS_VOLATILE (field))
485 TREE_THIS_VOLATILE (ret) |= 1;
486 if (TREE_READONLY (field))
487 TREE_READONLY (ret) |= 1;
488 return ret;
491 /* Build tree nodes to access the field for VAR on the receiver side. */
493 static tree
494 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
496 tree x, field = lookup_field (var, ctx);
498 /* If the receiver record type was remapped in the child function,
499 remap the field into the new record type. */
500 x = maybe_lookup_field (field, ctx);
501 if (x != NULL)
502 field = x;
504 x = build_simple_mem_ref (ctx->receiver_decl);
505 TREE_THIS_NOTRAP (x) = 1;
506 x = omp_build_component_ref (x, field);
507 if (by_ref)
509 x = build_simple_mem_ref (x);
510 TREE_THIS_NOTRAP (x) = 1;
513 return x;
516 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
517 of a parallel, this is a component reference; for workshare constructs
518 this is some variable. */
520 static tree
521 build_outer_var_ref (tree var, omp_context *ctx,
522 enum omp_clause_code code = OMP_CLAUSE_ERROR)
524 tree x;
526 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
527 x = var;
528 else if (is_variable_sized (var))
530 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
531 x = build_outer_var_ref (x, ctx, code);
532 x = build_simple_mem_ref (x);
534 else if (is_taskreg_ctx (ctx))
536 bool by_ref = use_pointer_for_field (var, NULL);
537 x = build_receiver_ref (var, by_ref, ctx);
539 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
540 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
541 || (code == OMP_CLAUSE_PRIVATE
542 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
543 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
544 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
546 /* #pragma omp simd isn't a worksharing construct, and can reference
547 even private vars in its linear etc. clauses.
548 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
549 to private vars in all worksharing constructs. */
550 x = NULL_TREE;
551 if (ctx->outer && is_taskreg_ctx (ctx))
552 x = lookup_decl (var, ctx->outer);
553 else if (ctx->outer)
554 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
555 if (x == NULL_TREE)
556 x = var;
558 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
560 gcc_assert (ctx->outer);
561 splay_tree_node n
562 = splay_tree_lookup (ctx->outer->field_map,
563 (splay_tree_key) &DECL_UID (var));
564 if (n == NULL)
566 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
567 x = var;
568 else
569 x = lookup_decl (var, ctx->outer);
571 else
573 tree field = (tree) n->value;
574 /* If the receiver record type was remapped in the child function,
575 remap the field into the new record type. */
576 x = maybe_lookup_field (field, ctx->outer);
577 if (x != NULL)
578 field = x;
580 x = build_simple_mem_ref (ctx->outer->receiver_decl);
581 x = omp_build_component_ref (x, field);
582 if (use_pointer_for_field (var, ctx->outer))
583 x = build_simple_mem_ref (x);
586 else if (ctx->outer)
588 omp_context *outer = ctx->outer;
589 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
591 outer = outer->outer;
592 gcc_assert (outer
593 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
595 x = lookup_decl (var, outer);
597 else if (omp_is_reference (var))
598 /* This can happen with orphaned constructs. If var is reference, it is
599 possible it is shared and as such valid. */
600 x = var;
601 else if (omp_member_access_dummy_var (var))
602 x = var;
603 else
604 gcc_unreachable ();
606 if (x == var)
608 tree t = omp_member_access_dummy_var (var);
609 if (t)
611 x = DECL_VALUE_EXPR (var);
612 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
613 if (o != t)
614 x = unshare_and_remap (x, t, o);
615 else
616 x = unshare_expr (x);
620 if (omp_is_reference (var))
621 x = build_simple_mem_ref (x);
623 return x;
626 /* Build tree nodes to access the field for VAR on the sender side. */
628 static tree
629 build_sender_ref (splay_tree_key key, omp_context *ctx)
631 tree field = lookup_sfield (key, ctx);
632 return omp_build_component_ref (ctx->sender_decl, field);
635 static tree
636 build_sender_ref (tree var, omp_context *ctx)
638 return build_sender_ref ((splay_tree_key) var, ctx);
641 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
642 BASE_POINTERS_RESTRICT, declare the field with restrict. */
644 static void
645 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx,
646 bool base_pointers_restrict = false)
648 tree field, type, sfield = NULL_TREE;
649 splay_tree_key key = (splay_tree_key) var;
651 if ((mask & 8) != 0)
653 key = (splay_tree_key) &DECL_UID (var);
654 gcc_checking_assert (key != (splay_tree_key) var);
656 gcc_assert ((mask & 1) == 0
657 || !splay_tree_lookup (ctx->field_map, key));
658 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
659 || !splay_tree_lookup (ctx->sfield_map, key));
660 gcc_assert ((mask & 3) == 3
661 || !is_gimple_omp_oacc (ctx->stmt));
663 type = TREE_TYPE (var);
664 /* Prevent redeclaring the var in the split-off function with a restrict
665 pointer type. Note that we only clear type itself, restrict qualifiers in
666 the pointed-to type will be ignored by points-to analysis. */
667 if (POINTER_TYPE_P (type)
668 && TYPE_RESTRICT (type))
669 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
671 if (mask & 4)
673 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
674 type = build_pointer_type (build_pointer_type (type));
676 else if (by_ref)
678 type = build_pointer_type (type);
679 if (base_pointers_restrict)
680 type = build_qualified_type (type, TYPE_QUAL_RESTRICT);
682 else if ((mask & 3) == 1 && omp_is_reference (var))
683 type = TREE_TYPE (type);
685 field = build_decl (DECL_SOURCE_LOCATION (var),
686 FIELD_DECL, DECL_NAME (var), type);
688 /* Remember what variable this field was created for. This does have a
689 side effect of making dwarf2out ignore this member, so for helpful
690 debugging we clear it later in delete_omp_context. */
691 DECL_ABSTRACT_ORIGIN (field) = var;
692 if (type == TREE_TYPE (var))
694 SET_DECL_ALIGN (field, DECL_ALIGN (var));
695 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
696 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
698 else
699 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
701 if ((mask & 3) == 3)
703 insert_field_into_struct (ctx->record_type, field);
704 if (ctx->srecord_type)
706 sfield = build_decl (DECL_SOURCE_LOCATION (var),
707 FIELD_DECL, DECL_NAME (var), type);
708 DECL_ABSTRACT_ORIGIN (sfield) = var;
709 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
710 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
711 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
712 insert_field_into_struct (ctx->srecord_type, sfield);
715 else
717 if (ctx->srecord_type == NULL_TREE)
719 tree t;
721 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
722 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
723 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
725 sfield = build_decl (DECL_SOURCE_LOCATION (t),
726 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
727 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
728 insert_field_into_struct (ctx->srecord_type, sfield);
729 splay_tree_insert (ctx->sfield_map,
730 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
731 (splay_tree_value) sfield);
734 sfield = field;
735 insert_field_into_struct ((mask & 1) ? ctx->record_type
736 : ctx->srecord_type, field);
739 if (mask & 1)
740 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
741 if ((mask & 2) && ctx->sfield_map)
742 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
745 static tree
746 install_var_local (tree var, omp_context *ctx)
748 tree new_var = omp_copy_decl_1 (var, ctx);
749 insert_decl_map (&ctx->cb, var, new_var);
750 return new_var;
753 /* Adjust the replacement for DECL in CTX for the new context. This means
754 copying the DECL_VALUE_EXPR, and fixing up the type. */
756 static void
757 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
759 tree new_decl, size;
761 new_decl = lookup_decl (decl, ctx);
763 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
765 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
766 && DECL_HAS_VALUE_EXPR_P (decl))
768 tree ve = DECL_VALUE_EXPR (decl);
769 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
770 SET_DECL_VALUE_EXPR (new_decl, ve);
771 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
774 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
776 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
777 if (size == error_mark_node)
778 size = TYPE_SIZE (TREE_TYPE (new_decl));
779 DECL_SIZE (new_decl) = size;
781 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
782 if (size == error_mark_node)
783 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
784 DECL_SIZE_UNIT (new_decl) = size;
788 /* The callback for remap_decl. Search all containing contexts for a
789 mapping of the variable; this avoids having to duplicate the splay
790 tree ahead of time. We know a mapping doesn't already exist in the
791 given context. Create new mappings to implement default semantics. */
793 static tree
794 omp_copy_decl (tree var, copy_body_data *cb)
796 omp_context *ctx = (omp_context *) cb;
797 tree new_var;
799 if (TREE_CODE (var) == LABEL_DECL)
801 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
802 return var;
803 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
804 DECL_CONTEXT (new_var) = current_function_decl;
805 insert_decl_map (&ctx->cb, var, new_var);
806 return new_var;
809 while (!is_taskreg_ctx (ctx))
811 ctx = ctx->outer;
812 if (ctx == NULL)
813 return var;
814 new_var = maybe_lookup_decl (var, ctx);
815 if (new_var)
816 return new_var;
819 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
820 return var;
822 return error_mark_node;
825 /* Create a new context, with OUTER_CTX being the surrounding context. */
827 static omp_context *
828 new_omp_context (gimple *stmt, omp_context *outer_ctx)
830 omp_context *ctx = XCNEW (omp_context);
832 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
833 (splay_tree_value) ctx);
834 ctx->stmt = stmt;
836 if (outer_ctx)
838 ctx->outer = outer_ctx;
839 ctx->cb = outer_ctx->cb;
840 ctx->cb.block = NULL;
841 ctx->depth = outer_ctx->depth + 1;
843 else
845 ctx->cb.src_fn = current_function_decl;
846 ctx->cb.dst_fn = current_function_decl;
847 ctx->cb.src_node = cgraph_node::get (current_function_decl);
848 gcc_checking_assert (ctx->cb.src_node);
849 ctx->cb.dst_node = ctx->cb.src_node;
850 ctx->cb.src_cfun = cfun;
851 ctx->cb.copy_decl = omp_copy_decl;
852 ctx->cb.eh_lp_nr = 0;
853 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
854 ctx->depth = 1;
857 ctx->cb.decl_map = new hash_map<tree, tree>;
859 return ctx;
862 static gimple_seq maybe_catch_exception (gimple_seq);
864 /* Finalize task copyfn. */
866 static void
867 finalize_task_copyfn (gomp_task *task_stmt)
869 struct function *child_cfun;
870 tree child_fn;
871 gimple_seq seq = NULL, new_seq;
872 gbind *bind;
874 child_fn = gimple_omp_task_copy_fn (task_stmt);
875 if (child_fn == NULL_TREE)
876 return;
878 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
879 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
881 push_cfun (child_cfun);
882 bind = gimplify_body (child_fn, false);
883 gimple_seq_add_stmt (&seq, bind);
884 new_seq = maybe_catch_exception (seq);
885 if (new_seq != seq)
887 bind = gimple_build_bind (NULL, new_seq, NULL);
888 seq = NULL;
889 gimple_seq_add_stmt (&seq, bind);
891 gimple_set_body (child_fn, seq);
892 pop_cfun ();
894 /* Inform the callgraph about the new function. */
895 cgraph_node *node = cgraph_node::get_create (child_fn);
896 node->parallelized_function = 1;
897 cgraph_node::add_new_function (child_fn, false);
900 /* Destroy a omp_context data structures. Called through the splay tree
901 value delete callback. */
903 static void
904 delete_omp_context (splay_tree_value value)
906 omp_context *ctx = (omp_context *) value;
908 delete ctx->cb.decl_map;
910 if (ctx->field_map)
911 splay_tree_delete (ctx->field_map);
912 if (ctx->sfield_map)
913 splay_tree_delete (ctx->sfield_map);
915 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
916 it produces corrupt debug information. */
917 if (ctx->record_type)
919 tree t;
920 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
921 DECL_ABSTRACT_ORIGIN (t) = NULL;
923 if (ctx->srecord_type)
925 tree t;
926 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
927 DECL_ABSTRACT_ORIGIN (t) = NULL;
930 if (is_task_ctx (ctx))
931 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
933 XDELETE (ctx);
936 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
937 context. */
939 static void
940 fixup_child_record_type (omp_context *ctx)
942 tree f, type = ctx->record_type;
944 if (!ctx->receiver_decl)
945 return;
946 /* ??? It isn't sufficient to just call remap_type here, because
947 variably_modified_type_p doesn't work the way we expect for
948 record types. Testing each field for whether it needs remapping
949 and creating a new record by hand works, however. */
950 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
951 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
952 break;
953 if (f)
955 tree name, new_fields = NULL;
957 type = lang_hooks.types.make_type (RECORD_TYPE);
958 name = DECL_NAME (TYPE_NAME (ctx->record_type));
959 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
960 TYPE_DECL, name, type);
961 TYPE_NAME (type) = name;
963 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
965 tree new_f = copy_node (f);
966 DECL_CONTEXT (new_f) = type;
967 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
968 DECL_CHAIN (new_f) = new_fields;
969 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
970 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
971 &ctx->cb, NULL);
972 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
973 &ctx->cb, NULL);
974 new_fields = new_f;
976 /* Arrange to be able to look up the receiver field
977 given the sender field. */
978 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
979 (splay_tree_value) new_f);
981 TYPE_FIELDS (type) = nreverse (new_fields);
982 layout_type (type);
985 /* In a target region we never modify any of the pointers in *.omp_data_i,
986 so attempt to help the optimizers. */
987 if (is_gimple_omp_offloaded (ctx->stmt))
988 type = build_qualified_type (type, TYPE_QUAL_CONST);
990 TREE_TYPE (ctx->receiver_decl)
991 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
994 /* Instantiate decls as necessary in CTX to satisfy the data sharing
995 specified by CLAUSES. If BASE_POINTERS_RESTRICT, install var field with
996 restrict. */
998 static void
999 scan_sharing_clauses (tree clauses, omp_context *ctx,
1000 bool base_pointers_restrict = false)
1002 tree c, decl;
1003 bool scan_array_reductions = false;
1005 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1007 bool by_ref;
1009 switch (OMP_CLAUSE_CODE (c))
1011 case OMP_CLAUSE_PRIVATE:
1012 decl = OMP_CLAUSE_DECL (c);
1013 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1014 goto do_private;
1015 else if (!is_variable_sized (decl))
1016 install_var_local (decl, ctx);
1017 break;
1019 case OMP_CLAUSE_SHARED:
1020 decl = OMP_CLAUSE_DECL (c);
1021 /* Ignore shared directives in teams construct. */
1022 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1024 /* Global variables don't need to be copied,
1025 the receiver side will use them directly. */
1026 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1027 if (is_global_var (odecl))
1028 break;
1029 insert_decl_map (&ctx->cb, decl, odecl);
1030 break;
1032 gcc_assert (is_taskreg_ctx (ctx));
1033 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1034 || !is_variable_sized (decl));
1035 /* Global variables don't need to be copied,
1036 the receiver side will use them directly. */
1037 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1038 break;
1039 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1041 use_pointer_for_field (decl, ctx);
1042 break;
1044 by_ref = use_pointer_for_field (decl, NULL);
1045 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1046 || TREE_ADDRESSABLE (decl)
1047 || by_ref
1048 || omp_is_reference (decl))
1050 by_ref = use_pointer_for_field (decl, ctx);
1051 install_var_field (decl, by_ref, 3, ctx);
1052 install_var_local (decl, ctx);
1053 break;
1055 /* We don't need to copy const scalar vars back. */
1056 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1057 goto do_private;
1059 case OMP_CLAUSE_REDUCTION:
1060 decl = OMP_CLAUSE_DECL (c);
1061 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1062 && TREE_CODE (decl) == MEM_REF)
1064 tree t = TREE_OPERAND (decl, 0);
1065 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1066 t = TREE_OPERAND (t, 0);
1067 if (TREE_CODE (t) == INDIRECT_REF
1068 || TREE_CODE (t) == ADDR_EXPR)
1069 t = TREE_OPERAND (t, 0);
1070 install_var_local (t, ctx);
1071 if (is_taskreg_ctx (ctx)
1072 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1073 && !is_variable_sized (t))
1075 by_ref = use_pointer_for_field (t, ctx);
1076 install_var_field (t, by_ref, 3, ctx);
1078 break;
1080 goto do_private;
1082 case OMP_CLAUSE_LASTPRIVATE:
1083 /* Let the corresponding firstprivate clause create
1084 the variable. */
1085 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1086 break;
1087 /* FALLTHRU */
1089 case OMP_CLAUSE_FIRSTPRIVATE:
1090 case OMP_CLAUSE_LINEAR:
1091 decl = OMP_CLAUSE_DECL (c);
1092 do_private:
1093 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1094 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1095 && is_gimple_omp_offloaded (ctx->stmt))
1097 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1098 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1099 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1100 install_var_field (decl, true, 3, ctx);
1101 else
1102 install_var_field (decl, false, 3, ctx);
1104 if (is_variable_sized (decl))
1106 if (is_task_ctx (ctx))
1107 install_var_field (decl, false, 1, ctx);
1108 break;
1110 else if (is_taskreg_ctx (ctx))
1112 bool global
1113 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1114 by_ref = use_pointer_for_field (decl, NULL);
1116 if (is_task_ctx (ctx)
1117 && (global || by_ref || omp_is_reference (decl)))
1119 install_var_field (decl, false, 1, ctx);
1120 if (!global)
1121 install_var_field (decl, by_ref, 2, ctx);
1123 else if (!global)
1124 install_var_field (decl, by_ref, 3, ctx);
1126 install_var_local (decl, ctx);
1127 break;
1129 case OMP_CLAUSE_USE_DEVICE_PTR:
1130 decl = OMP_CLAUSE_DECL (c);
1131 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1132 install_var_field (decl, true, 3, ctx);
1133 else
1134 install_var_field (decl, false, 3, ctx);
1135 if (DECL_SIZE (decl)
1136 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1138 tree decl2 = DECL_VALUE_EXPR (decl);
1139 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1140 decl2 = TREE_OPERAND (decl2, 0);
1141 gcc_assert (DECL_P (decl2));
1142 install_var_local (decl2, ctx);
1144 install_var_local (decl, ctx);
1145 break;
1147 case OMP_CLAUSE_IS_DEVICE_PTR:
1148 decl = OMP_CLAUSE_DECL (c);
1149 goto do_private;
1151 case OMP_CLAUSE__LOOPTEMP_:
1152 gcc_assert (is_taskreg_ctx (ctx));
1153 decl = OMP_CLAUSE_DECL (c);
1154 install_var_field (decl, false, 3, ctx);
1155 install_var_local (decl, ctx);
1156 break;
1158 case OMP_CLAUSE_COPYPRIVATE:
1159 case OMP_CLAUSE_COPYIN:
1160 decl = OMP_CLAUSE_DECL (c);
1161 by_ref = use_pointer_for_field (decl, NULL);
1162 install_var_field (decl, by_ref, 3, ctx);
1163 break;
1165 case OMP_CLAUSE_FINAL:
1166 case OMP_CLAUSE_IF:
1167 case OMP_CLAUSE_NUM_THREADS:
1168 case OMP_CLAUSE_NUM_TEAMS:
1169 case OMP_CLAUSE_THREAD_LIMIT:
1170 case OMP_CLAUSE_DEVICE:
1171 case OMP_CLAUSE_SCHEDULE:
1172 case OMP_CLAUSE_DIST_SCHEDULE:
1173 case OMP_CLAUSE_DEPEND:
1174 case OMP_CLAUSE_PRIORITY:
1175 case OMP_CLAUSE_GRAINSIZE:
1176 case OMP_CLAUSE_NUM_TASKS:
1177 case OMP_CLAUSE_NUM_GANGS:
1178 case OMP_CLAUSE_NUM_WORKERS:
1179 case OMP_CLAUSE_VECTOR_LENGTH:
1180 if (ctx->outer)
1181 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1182 break;
1184 case OMP_CLAUSE_TO:
1185 case OMP_CLAUSE_FROM:
1186 case OMP_CLAUSE_MAP:
1187 if (ctx->outer)
1188 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1189 decl = OMP_CLAUSE_DECL (c);
1190 /* Global variables with "omp declare target" attribute
1191 don't need to be copied, the receiver side will use them
1192 directly. However, global variables with "omp declare target link"
1193 attribute need to be copied. Or when ALWAYS modifier is used. */
1194 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1195 && DECL_P (decl)
1196 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1197 && (OMP_CLAUSE_MAP_KIND (c)
1198 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1199 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1200 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1201 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1202 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1203 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1204 && varpool_node::get_create (decl)->offloadable
1205 && !lookup_attribute ("omp declare target link",
1206 DECL_ATTRIBUTES (decl)))
1207 break;
1208 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1209 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1211 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1212 not offloaded; there is nothing to map for those. */
1213 if (!is_gimple_omp_offloaded (ctx->stmt)
1214 && !POINTER_TYPE_P (TREE_TYPE (decl))
1215 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1216 break;
1218 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1219 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1220 || (OMP_CLAUSE_MAP_KIND (c)
1221 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1223 if (TREE_CODE (decl) == COMPONENT_REF
1224 || (TREE_CODE (decl) == INDIRECT_REF
1225 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1226 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1227 == REFERENCE_TYPE)))
1228 break;
1229 if (DECL_SIZE (decl)
1230 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1232 tree decl2 = DECL_VALUE_EXPR (decl);
1233 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1234 decl2 = TREE_OPERAND (decl2, 0);
1235 gcc_assert (DECL_P (decl2));
1236 install_var_local (decl2, ctx);
1238 install_var_local (decl, ctx);
1239 break;
1241 if (DECL_P (decl))
1243 if (DECL_SIZE (decl)
1244 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1246 tree decl2 = DECL_VALUE_EXPR (decl);
1247 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1248 decl2 = TREE_OPERAND (decl2, 0);
1249 gcc_assert (DECL_P (decl2));
1250 install_var_field (decl2, true, 3, ctx);
1251 install_var_local (decl2, ctx);
1252 install_var_local (decl, ctx);
1254 else
1256 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1257 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1258 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1259 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1260 install_var_field (decl, true, 7, ctx);
1261 else
1262 install_var_field (decl, true, 3, ctx,
1263 base_pointers_restrict);
1264 if (is_gimple_omp_offloaded (ctx->stmt)
1265 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1266 install_var_local (decl, ctx);
1269 else
1271 tree base = get_base_address (decl);
1272 tree nc = OMP_CLAUSE_CHAIN (c);
1273 if (DECL_P (base)
1274 && nc != NULL_TREE
1275 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1276 && OMP_CLAUSE_DECL (nc) == base
1277 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1278 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1280 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1281 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1283 else
1285 if (ctx->outer)
1287 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1288 decl = OMP_CLAUSE_DECL (c);
1290 gcc_assert (!splay_tree_lookup (ctx->field_map,
1291 (splay_tree_key) decl));
1292 tree field
1293 = build_decl (OMP_CLAUSE_LOCATION (c),
1294 FIELD_DECL, NULL_TREE, ptr_type_node);
1295 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1296 insert_field_into_struct (ctx->record_type, field);
1297 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1298 (splay_tree_value) field);
1301 break;
1303 case OMP_CLAUSE__GRIDDIM_:
1304 if (ctx->outer)
1306 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1307 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1309 break;
1311 case OMP_CLAUSE_NOWAIT:
1312 case OMP_CLAUSE_ORDERED:
1313 case OMP_CLAUSE_COLLAPSE:
1314 case OMP_CLAUSE_UNTIED:
1315 case OMP_CLAUSE_MERGEABLE:
1316 case OMP_CLAUSE_PROC_BIND:
1317 case OMP_CLAUSE_SAFELEN:
1318 case OMP_CLAUSE_SIMDLEN:
1319 case OMP_CLAUSE_THREADS:
1320 case OMP_CLAUSE_SIMD:
1321 case OMP_CLAUSE_NOGROUP:
1322 case OMP_CLAUSE_DEFAULTMAP:
1323 case OMP_CLAUSE_ASYNC:
1324 case OMP_CLAUSE_WAIT:
1325 case OMP_CLAUSE_GANG:
1326 case OMP_CLAUSE_WORKER:
1327 case OMP_CLAUSE_VECTOR:
1328 case OMP_CLAUSE_INDEPENDENT:
1329 case OMP_CLAUSE_AUTO:
1330 case OMP_CLAUSE_SEQ:
1331 case OMP_CLAUSE_TILE:
1332 case OMP_CLAUSE__SIMT_:
1333 case OMP_CLAUSE_DEFAULT:
1334 break;
1336 case OMP_CLAUSE_ALIGNED:
1337 decl = OMP_CLAUSE_DECL (c);
1338 if (is_global_var (decl)
1339 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1340 install_var_local (decl, ctx);
1341 break;
1343 case OMP_CLAUSE__CACHE_:
1344 default:
1345 gcc_unreachable ();
1349 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1351 switch (OMP_CLAUSE_CODE (c))
1353 case OMP_CLAUSE_LASTPRIVATE:
1354 /* Let the corresponding firstprivate clause create
1355 the variable. */
1356 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1357 scan_array_reductions = true;
1358 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1359 break;
1360 /* FALLTHRU */
1362 case OMP_CLAUSE_FIRSTPRIVATE:
1363 case OMP_CLAUSE_PRIVATE:
1364 case OMP_CLAUSE_LINEAR:
1365 case OMP_CLAUSE_IS_DEVICE_PTR:
1366 decl = OMP_CLAUSE_DECL (c);
1367 if (is_variable_sized (decl))
1369 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1370 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1371 && is_gimple_omp_offloaded (ctx->stmt))
1373 tree decl2 = DECL_VALUE_EXPR (decl);
1374 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1375 decl2 = TREE_OPERAND (decl2, 0);
1376 gcc_assert (DECL_P (decl2));
1377 install_var_local (decl2, ctx);
1378 fixup_remapped_decl (decl2, ctx, false);
1380 install_var_local (decl, ctx);
1382 fixup_remapped_decl (decl, ctx,
1383 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1384 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1385 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1386 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1387 scan_array_reductions = true;
1388 break;
1390 case OMP_CLAUSE_REDUCTION:
1391 decl = OMP_CLAUSE_DECL (c);
1392 if (TREE_CODE (decl) != MEM_REF)
1394 if (is_variable_sized (decl))
1395 install_var_local (decl, ctx);
1396 fixup_remapped_decl (decl, ctx, false);
1398 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1399 scan_array_reductions = true;
1400 break;
1402 case OMP_CLAUSE_SHARED:
1403 /* Ignore shared directives in teams construct. */
1404 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1405 break;
1406 decl = OMP_CLAUSE_DECL (c);
1407 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1408 break;
1409 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1411 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1412 ctx->outer)))
1413 break;
1414 bool by_ref = use_pointer_for_field (decl, ctx);
1415 install_var_field (decl, by_ref, 11, ctx);
1416 break;
1418 fixup_remapped_decl (decl, ctx, false);
1419 break;
1421 case OMP_CLAUSE_MAP:
1422 if (!is_gimple_omp_offloaded (ctx->stmt))
1423 break;
1424 decl = OMP_CLAUSE_DECL (c);
1425 if (DECL_P (decl)
1426 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1427 && (OMP_CLAUSE_MAP_KIND (c)
1428 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1429 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1430 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1431 && varpool_node::get_create (decl)->offloadable)
1432 break;
1433 if (DECL_P (decl))
1435 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1436 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1437 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1438 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1440 tree new_decl = lookup_decl (decl, ctx);
1441 TREE_TYPE (new_decl)
1442 = remap_type (TREE_TYPE (decl), &ctx->cb);
1444 else if (DECL_SIZE (decl)
1445 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1447 tree decl2 = DECL_VALUE_EXPR (decl);
1448 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1449 decl2 = TREE_OPERAND (decl2, 0);
1450 gcc_assert (DECL_P (decl2));
1451 fixup_remapped_decl (decl2, ctx, false);
1452 fixup_remapped_decl (decl, ctx, true);
1454 else
1455 fixup_remapped_decl (decl, ctx, false);
1457 break;
1459 case OMP_CLAUSE_COPYPRIVATE:
1460 case OMP_CLAUSE_COPYIN:
1461 case OMP_CLAUSE_DEFAULT:
1462 case OMP_CLAUSE_IF:
1463 case OMP_CLAUSE_NUM_THREADS:
1464 case OMP_CLAUSE_NUM_TEAMS:
1465 case OMP_CLAUSE_THREAD_LIMIT:
1466 case OMP_CLAUSE_DEVICE:
1467 case OMP_CLAUSE_SCHEDULE:
1468 case OMP_CLAUSE_DIST_SCHEDULE:
1469 case OMP_CLAUSE_NOWAIT:
1470 case OMP_CLAUSE_ORDERED:
1471 case OMP_CLAUSE_COLLAPSE:
1472 case OMP_CLAUSE_UNTIED:
1473 case OMP_CLAUSE_FINAL:
1474 case OMP_CLAUSE_MERGEABLE:
1475 case OMP_CLAUSE_PROC_BIND:
1476 case OMP_CLAUSE_SAFELEN:
1477 case OMP_CLAUSE_SIMDLEN:
1478 case OMP_CLAUSE_ALIGNED:
1479 case OMP_CLAUSE_DEPEND:
1480 case OMP_CLAUSE__LOOPTEMP_:
1481 case OMP_CLAUSE_TO:
1482 case OMP_CLAUSE_FROM:
1483 case OMP_CLAUSE_PRIORITY:
1484 case OMP_CLAUSE_GRAINSIZE:
1485 case OMP_CLAUSE_NUM_TASKS:
1486 case OMP_CLAUSE_THREADS:
1487 case OMP_CLAUSE_SIMD:
1488 case OMP_CLAUSE_NOGROUP:
1489 case OMP_CLAUSE_DEFAULTMAP:
1490 case OMP_CLAUSE_USE_DEVICE_PTR:
1491 case OMP_CLAUSE_ASYNC:
1492 case OMP_CLAUSE_WAIT:
1493 case OMP_CLAUSE_NUM_GANGS:
1494 case OMP_CLAUSE_NUM_WORKERS:
1495 case OMP_CLAUSE_VECTOR_LENGTH:
1496 case OMP_CLAUSE_GANG:
1497 case OMP_CLAUSE_WORKER:
1498 case OMP_CLAUSE_VECTOR:
1499 case OMP_CLAUSE_INDEPENDENT:
1500 case OMP_CLAUSE_AUTO:
1501 case OMP_CLAUSE_SEQ:
1502 case OMP_CLAUSE_TILE:
1503 case OMP_CLAUSE__GRIDDIM_:
1504 case OMP_CLAUSE__SIMT_:
1505 break;
1507 case OMP_CLAUSE__CACHE_:
1508 default:
1509 gcc_unreachable ();
1513 gcc_checking_assert (!scan_array_reductions
1514 || !is_gimple_omp_oacc (ctx->stmt));
1515 if (scan_array_reductions)
1517 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1518 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1519 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1521 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1522 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1524 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1525 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1526 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1527 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1528 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1529 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1533 /* Create a new name for omp child function. Returns an identifier. */
1535 static tree
1536 create_omp_child_function_name (bool task_copy)
1538 return clone_function_name (current_function_decl,
1539 task_copy ? "_omp_cpyfn" : "_omp_fn");
1542 /* Return true if CTX may belong to offloaded code: either if current function
1543 is offloaded, or any enclosing context corresponds to a target region. */
1545 static bool
1546 omp_maybe_offloaded_ctx (omp_context *ctx)
1548 if (cgraph_node::get (current_function_decl)->offloadable)
1549 return true;
1550 for (; ctx; ctx = ctx->outer)
1551 if (is_gimple_omp_offloaded (ctx->stmt))
1552 return true;
1553 return false;
1556 /* Build a decl for the omp child function. It'll not contain a body
1557 yet, just the bare decl. */
1559 static void
1560 create_omp_child_function (omp_context *ctx, bool task_copy)
1562 tree decl, type, name, t;
1564 name = create_omp_child_function_name (task_copy);
1565 if (task_copy)
1566 type = build_function_type_list (void_type_node, ptr_type_node,
1567 ptr_type_node, NULL_TREE);
1568 else
1569 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1571 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1573 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1574 || !task_copy);
1575 if (!task_copy)
1576 ctx->cb.dst_fn = decl;
1577 else
1578 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1580 TREE_STATIC (decl) = 1;
1581 TREE_USED (decl) = 1;
1582 DECL_ARTIFICIAL (decl) = 1;
1583 DECL_IGNORED_P (decl) = 0;
1584 TREE_PUBLIC (decl) = 0;
1585 DECL_UNINLINABLE (decl) = 1;
1586 DECL_EXTERNAL (decl) = 0;
1587 DECL_CONTEXT (decl) = NULL_TREE;
1588 DECL_INITIAL (decl) = make_node (BLOCK);
1589 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1590 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1591 /* Remove omp declare simd attribute from the new attributes. */
1592 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1594 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1595 a = a2;
1596 a = TREE_CHAIN (a);
1597 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1598 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1599 *p = TREE_CHAIN (*p);
1600 else
1602 tree chain = TREE_CHAIN (*p);
1603 *p = copy_node (*p);
1604 p = &TREE_CHAIN (*p);
1605 *p = chain;
1608 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1609 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1610 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1611 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1612 DECL_FUNCTION_VERSIONED (decl)
1613 = DECL_FUNCTION_VERSIONED (current_function_decl);
1615 if (omp_maybe_offloaded_ctx (ctx))
1617 cgraph_node::get_create (decl)->offloadable = 1;
1618 if (ENABLE_OFFLOADING)
1619 g->have_offload = true;
1622 if (cgraph_node::get_create (decl)->offloadable
1623 && !lookup_attribute ("omp declare target",
1624 DECL_ATTRIBUTES (current_function_decl)))
1626 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1627 ? "omp target entrypoint"
1628 : "omp declare target");
1629 DECL_ATTRIBUTES (decl)
1630 = tree_cons (get_identifier (target_attr),
1631 NULL_TREE, DECL_ATTRIBUTES (decl));
1634 t = build_decl (DECL_SOURCE_LOCATION (decl),
1635 RESULT_DECL, NULL_TREE, void_type_node);
1636 DECL_ARTIFICIAL (t) = 1;
1637 DECL_IGNORED_P (t) = 1;
1638 DECL_CONTEXT (t) = decl;
1639 DECL_RESULT (decl) = t;
1641 tree data_name = get_identifier (".omp_data_i");
1642 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1643 ptr_type_node);
1644 DECL_ARTIFICIAL (t) = 1;
1645 DECL_NAMELESS (t) = 1;
1646 DECL_ARG_TYPE (t) = ptr_type_node;
1647 DECL_CONTEXT (t) = current_function_decl;
1648 TREE_USED (t) = 1;
1649 TREE_READONLY (t) = 1;
1650 DECL_ARGUMENTS (decl) = t;
1651 if (!task_copy)
1652 ctx->receiver_decl = t;
1653 else
1655 t = build_decl (DECL_SOURCE_LOCATION (decl),
1656 PARM_DECL, get_identifier (".omp_data_o"),
1657 ptr_type_node);
1658 DECL_ARTIFICIAL (t) = 1;
1659 DECL_NAMELESS (t) = 1;
1660 DECL_ARG_TYPE (t) = ptr_type_node;
1661 DECL_CONTEXT (t) = current_function_decl;
1662 TREE_USED (t) = 1;
1663 TREE_ADDRESSABLE (t) = 1;
1664 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1665 DECL_ARGUMENTS (decl) = t;
1668 /* Allocate memory for the function structure. The call to
1669 allocate_struct_function clobbers CFUN, so we need to restore
1670 it afterward. */
1671 push_struct_function (decl);
1672 cfun->function_end_locus = gimple_location (ctx->stmt);
1673 init_tree_ssa (cfun);
1674 pop_cfun ();
1677 /* Callback for walk_gimple_seq. Check if combined parallel
1678 contains gimple_omp_for_combined_into_p OMP_FOR. */
1680 tree
1681 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1682 bool *handled_ops_p,
1683 struct walk_stmt_info *wi)
1685 gimple *stmt = gsi_stmt (*gsi_p);
1687 *handled_ops_p = true;
1688 switch (gimple_code (stmt))
1690 WALK_SUBSTMTS;
1692 case GIMPLE_OMP_FOR:
1693 if (gimple_omp_for_combined_into_p (stmt)
1694 && gimple_omp_for_kind (stmt)
1695 == *(const enum gf_mask *) (wi->info))
1697 wi->info = stmt;
1698 return integer_zero_node;
1700 break;
1701 default:
1702 break;
1704 return NULL;
1707 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1709 static void
1710 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1711 omp_context *outer_ctx)
1713 struct walk_stmt_info wi;
1715 memset (&wi, 0, sizeof (wi));
1716 wi.val_only = true;
1717 wi.info = (void *) &msk;
1718 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1719 if (wi.info != (void *) &msk)
1721 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1722 struct omp_for_data fd;
1723 omp_extract_for_data (for_stmt, &fd, NULL);
1724 /* We need two temporaries with fd.loop.v type (istart/iend)
1725 and then (fd.collapse - 1) temporaries with the same
1726 type for count2 ... countN-1 vars if not constant. */
1727 size_t count = 2, i;
1728 tree type = fd.iter_type;
1729 if (fd.collapse > 1
1730 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1732 count += fd.collapse - 1;
1733 /* If there are lastprivate clauses on the inner
1734 GIMPLE_OMP_FOR, add one more temporaries for the total number
1735 of iterations (product of count1 ... countN-1). */
1736 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1737 OMP_CLAUSE_LASTPRIVATE))
1738 count++;
1739 else if (msk == GF_OMP_FOR_KIND_FOR
1740 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1741 OMP_CLAUSE_LASTPRIVATE))
1742 count++;
1744 for (i = 0; i < count; i++)
1746 tree temp = create_tmp_var (type);
1747 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1748 insert_decl_map (&outer_ctx->cb, temp, temp);
1749 OMP_CLAUSE_DECL (c) = temp;
1750 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1751 gimple_omp_taskreg_set_clauses (stmt, c);
1756 /* Scan an OpenMP parallel directive. */
1758 static void
1759 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1761 omp_context *ctx;
1762 tree name;
1763 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1765 /* Ignore parallel directives with empty bodies, unless there
1766 are copyin clauses. */
1767 if (optimize > 0
1768 && empty_body_p (gimple_omp_body (stmt))
1769 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1770 OMP_CLAUSE_COPYIN) == NULL)
1772 gsi_replace (gsi, gimple_build_nop (), false);
1773 return;
1776 if (gimple_omp_parallel_combined_p (stmt))
1777 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1779 ctx = new_omp_context (stmt, outer_ctx);
1780 taskreg_contexts.safe_push (ctx);
1781 if (taskreg_nesting_level > 1)
1782 ctx->is_nested = true;
1783 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1784 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1785 name = create_tmp_var_name (".omp_data_s");
1786 name = build_decl (gimple_location (stmt),
1787 TYPE_DECL, name, ctx->record_type);
1788 DECL_ARTIFICIAL (name) = 1;
1789 DECL_NAMELESS (name) = 1;
1790 TYPE_NAME (ctx->record_type) = name;
1791 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1792 if (!gimple_omp_parallel_grid_phony (stmt))
1794 create_omp_child_function (ctx, false);
1795 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1798 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1799 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1801 if (TYPE_FIELDS (ctx->record_type) == NULL)
1802 ctx->record_type = ctx->receiver_decl = NULL;
1805 /* Scan an OpenMP task directive. */
1807 static void
1808 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1810 omp_context *ctx;
1811 tree name, t;
1812 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1814 /* Ignore task directives with empty bodies, unless they have depend
1815 clause. */
1816 if (optimize > 0
1817 && empty_body_p (gimple_omp_body (stmt))
1818 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1820 gsi_replace (gsi, gimple_build_nop (), false);
1821 return;
1824 if (gimple_omp_task_taskloop_p (stmt))
1825 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1827 ctx = new_omp_context (stmt, outer_ctx);
1828 taskreg_contexts.safe_push (ctx);
1829 if (taskreg_nesting_level > 1)
1830 ctx->is_nested = true;
1831 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1832 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1833 name = create_tmp_var_name (".omp_data_s");
1834 name = build_decl (gimple_location (stmt),
1835 TYPE_DECL, name, ctx->record_type);
1836 DECL_ARTIFICIAL (name) = 1;
1837 DECL_NAMELESS (name) = 1;
1838 TYPE_NAME (ctx->record_type) = name;
1839 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1840 create_omp_child_function (ctx, false);
1841 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1843 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1845 if (ctx->srecord_type)
1847 name = create_tmp_var_name (".omp_data_a");
1848 name = build_decl (gimple_location (stmt),
1849 TYPE_DECL, name, ctx->srecord_type);
1850 DECL_ARTIFICIAL (name) = 1;
1851 DECL_NAMELESS (name) = 1;
1852 TYPE_NAME (ctx->srecord_type) = name;
1853 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1854 create_omp_child_function (ctx, true);
1857 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1859 if (TYPE_FIELDS (ctx->record_type) == NULL)
1861 ctx->record_type = ctx->receiver_decl = NULL;
1862 t = build_int_cst (long_integer_type_node, 0);
1863 gimple_omp_task_set_arg_size (stmt, t);
1864 t = build_int_cst (long_integer_type_node, 1);
1865 gimple_omp_task_set_arg_align (stmt, t);
1869 /* Helper function for finish_taskreg_scan, called through walk_tree.
1870 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1871 tree, replace it in the expression. */
1873 static tree
1874 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1876 if (VAR_P (*tp))
1878 omp_context *ctx = (omp_context *) data;
1879 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1880 if (t != *tp)
1882 if (DECL_HAS_VALUE_EXPR_P (t))
1883 t = unshare_expr (DECL_VALUE_EXPR (t));
1884 *tp = t;
1886 *walk_subtrees = 0;
1888 else if (IS_TYPE_OR_DECL_P (*tp))
1889 *walk_subtrees = 0;
1890 return NULL_TREE;
1893 /* If any decls have been made addressable during scan_omp,
1894 adjust their fields if needed, and layout record types
1895 of parallel/task constructs. */
1897 static void
1898 finish_taskreg_scan (omp_context *ctx)
1900 if (ctx->record_type == NULL_TREE)
1901 return;
1903 /* If any task_shared_vars were needed, verify all
1904 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1905 statements if use_pointer_for_field hasn't changed
1906 because of that. If it did, update field types now. */
1907 if (task_shared_vars)
1909 tree c;
1911 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1912 c; c = OMP_CLAUSE_CHAIN (c))
1913 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1914 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1916 tree decl = OMP_CLAUSE_DECL (c);
1918 /* Global variables don't need to be copied,
1919 the receiver side will use them directly. */
1920 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1921 continue;
1922 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1923 || !use_pointer_for_field (decl, ctx))
1924 continue;
1925 tree field = lookup_field (decl, ctx);
1926 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1927 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1928 continue;
1929 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1930 TREE_THIS_VOLATILE (field) = 0;
1931 DECL_USER_ALIGN (field) = 0;
1932 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1933 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1934 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1935 if (ctx->srecord_type)
1937 tree sfield = lookup_sfield (decl, ctx);
1938 TREE_TYPE (sfield) = TREE_TYPE (field);
1939 TREE_THIS_VOLATILE (sfield) = 0;
1940 DECL_USER_ALIGN (sfield) = 0;
1941 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1942 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1943 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1948 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1950 layout_type (ctx->record_type);
1951 fixup_child_record_type (ctx);
1953 else
1955 location_t loc = gimple_location (ctx->stmt);
1956 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1957 /* Move VLA fields to the end. */
1958 p = &TYPE_FIELDS (ctx->record_type);
1959 while (*p)
1960 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1961 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1963 *q = *p;
1964 *p = TREE_CHAIN (*p);
1965 TREE_CHAIN (*q) = NULL_TREE;
1966 q = &TREE_CHAIN (*q);
1968 else
1969 p = &DECL_CHAIN (*p);
1970 *p = vla_fields;
1971 if (gimple_omp_task_taskloop_p (ctx->stmt))
1973 /* Move fields corresponding to first and second _looptemp_
1974 clause first. There are filled by GOMP_taskloop
1975 and thus need to be in specific positions. */
1976 tree c1 = gimple_omp_task_clauses (ctx->stmt);
1977 c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
1978 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
1979 OMP_CLAUSE__LOOPTEMP_);
1980 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
1981 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
1982 p = &TYPE_FIELDS (ctx->record_type);
1983 while (*p)
1984 if (*p == f1 || *p == f2)
1985 *p = DECL_CHAIN (*p);
1986 else
1987 p = &DECL_CHAIN (*p);
1988 DECL_CHAIN (f1) = f2;
1989 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
1990 TYPE_FIELDS (ctx->record_type) = f1;
1991 if (ctx->srecord_type)
1993 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
1994 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
1995 p = &TYPE_FIELDS (ctx->srecord_type);
1996 while (*p)
1997 if (*p == f1 || *p == f2)
1998 *p = DECL_CHAIN (*p);
1999 else
2000 p = &DECL_CHAIN (*p);
2001 DECL_CHAIN (f1) = f2;
2002 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2003 TYPE_FIELDS (ctx->srecord_type) = f1;
2006 layout_type (ctx->record_type);
2007 fixup_child_record_type (ctx);
2008 if (ctx->srecord_type)
2009 layout_type (ctx->srecord_type);
2010 tree t = fold_convert_loc (loc, long_integer_type_node,
2011 TYPE_SIZE_UNIT (ctx->record_type));
2012 if (TREE_CODE (t) != INTEGER_CST)
2014 t = unshare_expr (t);
2015 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2017 gimple_omp_task_set_arg_size (ctx->stmt, t);
2018 t = build_int_cst (long_integer_type_node,
2019 TYPE_ALIGN_UNIT (ctx->record_type));
2020 gimple_omp_task_set_arg_align (ctx->stmt, t);
2024 /* Find the enclosing offload context. */
2026 static omp_context *
2027 enclosing_target_ctx (omp_context *ctx)
2029 for (; ctx; ctx = ctx->outer)
2030 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2031 break;
2033 return ctx;
2036 /* Return true if ctx is part of an oacc kernels region. */
2038 static bool
2039 ctx_in_oacc_kernels_region (omp_context *ctx)
2041 for (;ctx != NULL; ctx = ctx->outer)
2043 gimple *stmt = ctx->stmt;
2044 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2045 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2046 return true;
2049 return false;
2052 /* Check the parallelism clauses inside a kernels regions.
2053 Until kernels handling moves to use the same loop indirection
2054 scheme as parallel, we need to do this checking early. */
2056 static unsigned
2057 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2059 bool checking = true;
2060 unsigned outer_mask = 0;
2061 unsigned this_mask = 0;
2062 bool has_seq = false, has_auto = false;
2064 if (ctx->outer)
2065 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2066 if (!stmt)
2068 checking = false;
2069 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2070 return outer_mask;
2071 stmt = as_a <gomp_for *> (ctx->stmt);
2074 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2076 switch (OMP_CLAUSE_CODE (c))
2078 case OMP_CLAUSE_GANG:
2079 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2080 break;
2081 case OMP_CLAUSE_WORKER:
2082 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2083 break;
2084 case OMP_CLAUSE_VECTOR:
2085 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2086 break;
2087 case OMP_CLAUSE_SEQ:
2088 has_seq = true;
2089 break;
2090 case OMP_CLAUSE_AUTO:
2091 has_auto = true;
2092 break;
2093 default:
2094 break;
2098 if (checking)
2100 if (has_seq && (this_mask || has_auto))
2101 error_at (gimple_location (stmt), "%<seq%> overrides other"
2102 " OpenACC loop specifiers");
2103 else if (has_auto && this_mask)
2104 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2105 " OpenACC loop specifiers");
2107 if (this_mask & outer_mask)
2108 error_at (gimple_location (stmt), "inner loop uses same"
2109 " OpenACC parallelism as containing loop");
2112 return outer_mask | this_mask;
2115 /* Scan a GIMPLE_OMP_FOR. */
2117 static omp_context *
2118 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2120 omp_context *ctx;
2121 size_t i;
2122 tree clauses = gimple_omp_for_clauses (stmt);
2124 ctx = new_omp_context (stmt, outer_ctx);
2126 if (is_gimple_omp_oacc (stmt))
2128 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2130 if (!tgt || is_oacc_parallel (tgt))
2131 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2133 char const *check = NULL;
2135 switch (OMP_CLAUSE_CODE (c))
2137 case OMP_CLAUSE_GANG:
2138 check = "gang";
2139 break;
2141 case OMP_CLAUSE_WORKER:
2142 check = "worker";
2143 break;
2145 case OMP_CLAUSE_VECTOR:
2146 check = "vector";
2147 break;
2149 default:
2150 break;
2153 if (check && OMP_CLAUSE_OPERAND (c, 0))
2154 error_at (gimple_location (stmt),
2155 "argument not permitted on %qs clause in"
2156 " OpenACC %<parallel%>", check);
2159 if (tgt && is_oacc_kernels (tgt))
2161 /* Strip out reductions, as they are not handled yet. */
2162 tree *prev_ptr = &clauses;
2164 while (tree probe = *prev_ptr)
2166 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2168 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2169 *prev_ptr = *next_ptr;
2170 else
2171 prev_ptr = next_ptr;
2174 gimple_omp_for_set_clauses (stmt, clauses);
2175 check_oacc_kernel_gwv (stmt, ctx);
2179 scan_sharing_clauses (clauses, ctx);
2181 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2182 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2184 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2185 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2186 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2187 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2189 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2190 return ctx;
2193 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2195 static void
2196 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2197 omp_context *outer_ctx)
2199 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2200 gsi_replace (gsi, bind, false);
2201 gimple_seq seq = NULL;
2202 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2203 tree cond = create_tmp_var_raw (integer_type_node);
2204 DECL_CONTEXT (cond) = current_function_decl;
2205 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2206 gimple_bind_set_vars (bind, cond);
2207 gimple_call_set_lhs (g, cond);
2208 gimple_seq_add_stmt (&seq, g);
2209 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2210 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2211 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2212 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2213 gimple_seq_add_stmt (&seq, g);
2214 g = gimple_build_label (lab1);
2215 gimple_seq_add_stmt (&seq, g);
2216 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2217 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2218 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2219 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2220 gimple_omp_for_set_clauses (new_stmt, clause);
2221 gimple_seq_add_stmt (&seq, new_stmt);
2222 g = gimple_build_goto (lab3);
2223 gimple_seq_add_stmt (&seq, g);
2224 g = gimple_build_label (lab2);
2225 gimple_seq_add_stmt (&seq, g);
2226 gimple_seq_add_stmt (&seq, stmt);
2227 g = gimple_build_label (lab3);
2228 gimple_seq_add_stmt (&seq, g);
2229 gimple_bind_set_body (bind, seq);
2230 update_stmt (bind);
2231 scan_omp_for (new_stmt, outer_ctx);
2232 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2235 /* Scan an OpenMP sections directive. */
2237 static void
2238 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2240 omp_context *ctx;
2242 ctx = new_omp_context (stmt, outer_ctx);
2243 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2244 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2247 /* Scan an OpenMP single directive. */
2249 static void
2250 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2252 omp_context *ctx;
2253 tree name;
2255 ctx = new_omp_context (stmt, outer_ctx);
2256 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2257 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2258 name = create_tmp_var_name (".omp_copy_s");
2259 name = build_decl (gimple_location (stmt),
2260 TYPE_DECL, name, ctx->record_type);
2261 TYPE_NAME (ctx->record_type) = name;
2263 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2264 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2266 if (TYPE_FIELDS (ctx->record_type) == NULL)
2267 ctx->record_type = NULL;
2268 else
2269 layout_type (ctx->record_type);
2272 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2273 used in the corresponding offloaded function are restrict. */
2275 static bool
2276 omp_target_base_pointers_restrict_p (tree clauses)
2278 /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2279 used by OpenACC. */
2280 if (flag_openacc == 0)
2281 return false;
2283 /* I. Basic example:
2285 void foo (void)
2287 unsigned int a[2], b[2];
2289 #pragma acc kernels \
2290 copyout (a) \
2291 copyout (b)
2293 a[0] = 0;
2294 b[0] = 1;
2298 After gimplification, we have:
2300 #pragma omp target oacc_kernels \
2301 map(force_from:a [len: 8]) \
2302 map(force_from:b [len: 8])
2304 a[0] = 0;
2305 b[0] = 1;
2308 Because both mappings have the force prefix, we know that they will be
2309 allocated when calling the corresponding offloaded function, which means we
2310 can mark the base pointers for a and b in the offloaded function as
2311 restrict. */
2313 tree c;
2314 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2316 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
2317 return false;
2319 switch (OMP_CLAUSE_MAP_KIND (c))
2321 case GOMP_MAP_FORCE_ALLOC:
2322 case GOMP_MAP_FORCE_TO:
2323 case GOMP_MAP_FORCE_FROM:
2324 case GOMP_MAP_FORCE_TOFROM:
2325 break;
2326 default:
2327 return false;
2331 return true;
2334 /* Scan a GIMPLE_OMP_TARGET. */
2336 static void
2337 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2339 omp_context *ctx;
2340 tree name;
2341 bool offloaded = is_gimple_omp_offloaded (stmt);
2342 tree clauses = gimple_omp_target_clauses (stmt);
2344 ctx = new_omp_context (stmt, outer_ctx);
2345 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2346 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2347 name = create_tmp_var_name (".omp_data_t");
2348 name = build_decl (gimple_location (stmt),
2349 TYPE_DECL, name, ctx->record_type);
2350 DECL_ARTIFICIAL (name) = 1;
2351 DECL_NAMELESS (name) = 1;
2352 TYPE_NAME (ctx->record_type) = name;
2353 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2355 bool base_pointers_restrict = false;
2356 if (offloaded)
2358 create_omp_child_function (ctx, false);
2359 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2361 base_pointers_restrict = omp_target_base_pointers_restrict_p (clauses);
2362 if (base_pointers_restrict
2363 && dump_file && (dump_flags & TDF_DETAILS))
2364 fprintf (dump_file,
2365 "Base pointers in offloaded function are restrict\n");
2368 scan_sharing_clauses (clauses, ctx, base_pointers_restrict);
2369 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2371 if (TYPE_FIELDS (ctx->record_type) == NULL)
2372 ctx->record_type = ctx->receiver_decl = NULL;
2373 else
2375 TYPE_FIELDS (ctx->record_type)
2376 = nreverse (TYPE_FIELDS (ctx->record_type));
2377 if (flag_checking)
2379 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2380 for (tree field = TYPE_FIELDS (ctx->record_type);
2381 field;
2382 field = DECL_CHAIN (field))
2383 gcc_assert (DECL_ALIGN (field) == align);
2385 layout_type (ctx->record_type);
2386 if (offloaded)
2387 fixup_child_record_type (ctx);
2391 /* Scan an OpenMP teams directive. */
2393 static void
2394 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2396 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2397 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2398 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2401 /* Check nesting restrictions. */
2402 static bool
2403 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2405 tree c;
2407 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2408 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2409 the original copy of its contents. */
2410 return true;
2412 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2413 inside an OpenACC CTX. */
2414 if (!(is_gimple_omp (stmt)
2415 && is_gimple_omp_oacc (stmt))
2416 /* Except for atomic codes that we share with OpenMP. */
2417 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2418 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2420 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2422 error_at (gimple_location (stmt),
2423 "non-OpenACC construct inside of OpenACC routine");
2424 return false;
2426 else
2427 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2428 if (is_gimple_omp (octx->stmt)
2429 && is_gimple_omp_oacc (octx->stmt))
2431 error_at (gimple_location (stmt),
2432 "non-OpenACC construct inside of OpenACC region");
2433 return false;
2437 if (ctx != NULL)
2439 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2440 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2442 c = NULL_TREE;
2443 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2445 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2446 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2448 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2449 && (ctx->outer == NULL
2450 || !gimple_omp_for_combined_into_p (ctx->stmt)
2451 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2452 || (gimple_omp_for_kind (ctx->outer->stmt)
2453 != GF_OMP_FOR_KIND_FOR)
2454 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2456 error_at (gimple_location (stmt),
2457 "%<ordered simd threads%> must be closely "
2458 "nested inside of %<for simd%> region");
2459 return false;
2461 return true;
2464 error_at (gimple_location (stmt),
2465 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2466 " may not be nested inside %<simd%> region");
2467 return false;
2469 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2471 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2472 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2473 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2474 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2476 error_at (gimple_location (stmt),
2477 "only %<distribute%> or %<parallel%> regions are "
2478 "allowed to be strictly nested inside %<teams%> "
2479 "region");
2480 return false;
2484 switch (gimple_code (stmt))
2486 case GIMPLE_OMP_FOR:
2487 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2488 return true;
2489 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2491 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2493 error_at (gimple_location (stmt),
2494 "%<distribute%> region must be strictly nested "
2495 "inside %<teams%> construct");
2496 return false;
2498 return true;
2500 /* We split taskloop into task and nested taskloop in it. */
2501 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2502 return true;
2503 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2505 bool ok = false;
2507 if (ctx)
2508 switch (gimple_code (ctx->stmt))
2510 case GIMPLE_OMP_FOR:
2511 ok = (gimple_omp_for_kind (ctx->stmt)
2512 == GF_OMP_FOR_KIND_OACC_LOOP);
2513 break;
2515 case GIMPLE_OMP_TARGET:
2516 switch (gimple_omp_target_kind (ctx->stmt))
2518 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2519 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2520 ok = true;
2521 break;
2523 default:
2524 break;
2527 default:
2528 break;
2530 else if (oacc_get_fn_attrib (current_function_decl))
2531 ok = true;
2532 if (!ok)
2534 error_at (gimple_location (stmt),
2535 "OpenACC loop directive must be associated with"
2536 " an OpenACC compute region");
2537 return false;
2540 /* FALLTHRU */
2541 case GIMPLE_CALL:
2542 if (is_gimple_call (stmt)
2543 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2544 == BUILT_IN_GOMP_CANCEL
2545 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2546 == BUILT_IN_GOMP_CANCELLATION_POINT))
2548 const char *bad = NULL;
2549 const char *kind = NULL;
2550 const char *construct
2551 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2552 == BUILT_IN_GOMP_CANCEL)
2553 ? "#pragma omp cancel"
2554 : "#pragma omp cancellation point";
2555 if (ctx == NULL)
2557 error_at (gimple_location (stmt), "orphaned %qs construct",
2558 construct);
2559 return false;
2561 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2562 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2563 : 0)
2565 case 1:
2566 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2567 bad = "#pragma omp parallel";
2568 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2569 == BUILT_IN_GOMP_CANCEL
2570 && !integer_zerop (gimple_call_arg (stmt, 1)))
2571 ctx->cancellable = true;
2572 kind = "parallel";
2573 break;
2574 case 2:
2575 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2576 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2577 bad = "#pragma omp for";
2578 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2579 == BUILT_IN_GOMP_CANCEL
2580 && !integer_zerop (gimple_call_arg (stmt, 1)))
2582 ctx->cancellable = true;
2583 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2584 OMP_CLAUSE_NOWAIT))
2585 warning_at (gimple_location (stmt), 0,
2586 "%<#pragma omp cancel for%> inside "
2587 "%<nowait%> for construct");
2588 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2589 OMP_CLAUSE_ORDERED))
2590 warning_at (gimple_location (stmt), 0,
2591 "%<#pragma omp cancel for%> inside "
2592 "%<ordered%> for construct");
2594 kind = "for";
2595 break;
2596 case 4:
2597 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2598 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2599 bad = "#pragma omp sections";
2600 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2601 == BUILT_IN_GOMP_CANCEL
2602 && !integer_zerop (gimple_call_arg (stmt, 1)))
2604 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2606 ctx->cancellable = true;
2607 if (omp_find_clause (gimple_omp_sections_clauses
2608 (ctx->stmt),
2609 OMP_CLAUSE_NOWAIT))
2610 warning_at (gimple_location (stmt), 0,
2611 "%<#pragma omp cancel sections%> inside "
2612 "%<nowait%> sections construct");
2614 else
2616 gcc_assert (ctx->outer
2617 && gimple_code (ctx->outer->stmt)
2618 == GIMPLE_OMP_SECTIONS);
2619 ctx->outer->cancellable = true;
2620 if (omp_find_clause (gimple_omp_sections_clauses
2621 (ctx->outer->stmt),
2622 OMP_CLAUSE_NOWAIT))
2623 warning_at (gimple_location (stmt), 0,
2624 "%<#pragma omp cancel sections%> inside "
2625 "%<nowait%> sections construct");
2628 kind = "sections";
2629 break;
2630 case 8:
2631 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2632 bad = "#pragma omp task";
2633 else
2635 for (omp_context *octx = ctx->outer;
2636 octx; octx = octx->outer)
2638 switch (gimple_code (octx->stmt))
2640 case GIMPLE_OMP_TASKGROUP:
2641 break;
2642 case GIMPLE_OMP_TARGET:
2643 if (gimple_omp_target_kind (octx->stmt)
2644 != GF_OMP_TARGET_KIND_REGION)
2645 continue;
2646 /* FALLTHRU */
2647 case GIMPLE_OMP_PARALLEL:
2648 case GIMPLE_OMP_TEAMS:
2649 error_at (gimple_location (stmt),
2650 "%<%s taskgroup%> construct not closely "
2651 "nested inside of %<taskgroup%> region",
2652 construct);
2653 return false;
2654 default:
2655 continue;
2657 break;
2659 ctx->cancellable = true;
2661 kind = "taskgroup";
2662 break;
2663 default:
2664 error_at (gimple_location (stmt), "invalid arguments");
2665 return false;
2667 if (bad)
2669 error_at (gimple_location (stmt),
2670 "%<%s %s%> construct not closely nested inside of %qs",
2671 construct, kind, bad);
2672 return false;
2675 /* FALLTHRU */
2676 case GIMPLE_OMP_SECTIONS:
2677 case GIMPLE_OMP_SINGLE:
2678 for (; ctx != NULL; ctx = ctx->outer)
2679 switch (gimple_code (ctx->stmt))
2681 case GIMPLE_OMP_FOR:
2682 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2683 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2684 break;
2685 /* FALLTHRU */
2686 case GIMPLE_OMP_SECTIONS:
2687 case GIMPLE_OMP_SINGLE:
2688 case GIMPLE_OMP_ORDERED:
2689 case GIMPLE_OMP_MASTER:
2690 case GIMPLE_OMP_TASK:
2691 case GIMPLE_OMP_CRITICAL:
2692 if (is_gimple_call (stmt))
2694 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2695 != BUILT_IN_GOMP_BARRIER)
2696 return true;
2697 error_at (gimple_location (stmt),
2698 "barrier region may not be closely nested inside "
2699 "of work-sharing, %<critical%>, %<ordered%>, "
2700 "%<master%>, explicit %<task%> or %<taskloop%> "
2701 "region");
2702 return false;
2704 error_at (gimple_location (stmt),
2705 "work-sharing region may not be closely nested inside "
2706 "of work-sharing, %<critical%>, %<ordered%>, "
2707 "%<master%>, explicit %<task%> or %<taskloop%> region");
2708 return false;
2709 case GIMPLE_OMP_PARALLEL:
2710 case GIMPLE_OMP_TEAMS:
2711 return true;
2712 case GIMPLE_OMP_TARGET:
2713 if (gimple_omp_target_kind (ctx->stmt)
2714 == GF_OMP_TARGET_KIND_REGION)
2715 return true;
2716 break;
2717 default:
2718 break;
2720 break;
2721 case GIMPLE_OMP_MASTER:
2722 for (; ctx != NULL; ctx = ctx->outer)
2723 switch (gimple_code (ctx->stmt))
2725 case GIMPLE_OMP_FOR:
2726 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2727 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2728 break;
2729 /* FALLTHRU */
2730 case GIMPLE_OMP_SECTIONS:
2731 case GIMPLE_OMP_SINGLE:
2732 case GIMPLE_OMP_TASK:
2733 error_at (gimple_location (stmt),
2734 "%<master%> region may not be closely nested inside "
2735 "of work-sharing, explicit %<task%> or %<taskloop%> "
2736 "region");
2737 return false;
2738 case GIMPLE_OMP_PARALLEL:
2739 case GIMPLE_OMP_TEAMS:
2740 return true;
2741 case GIMPLE_OMP_TARGET:
2742 if (gimple_omp_target_kind (ctx->stmt)
2743 == GF_OMP_TARGET_KIND_REGION)
2744 return true;
2745 break;
2746 default:
2747 break;
2749 break;
2750 case GIMPLE_OMP_TASK:
2751 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2752 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2753 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2754 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2756 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2757 error_at (OMP_CLAUSE_LOCATION (c),
2758 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2759 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2760 return false;
2762 break;
2763 case GIMPLE_OMP_ORDERED:
2764 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2765 c; c = OMP_CLAUSE_CHAIN (c))
2767 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2769 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2770 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2771 continue;
2773 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2774 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2775 || kind == OMP_CLAUSE_DEPEND_SINK)
2777 tree oclause;
2778 /* Look for containing ordered(N) loop. */
2779 if (ctx == NULL
2780 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2781 || (oclause
2782 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2783 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2785 error_at (OMP_CLAUSE_LOCATION (c),
2786 "%<ordered%> construct with %<depend%> clause "
2787 "must be closely nested inside an %<ordered%> "
2788 "loop");
2789 return false;
2791 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2793 error_at (OMP_CLAUSE_LOCATION (c),
2794 "%<ordered%> construct with %<depend%> clause "
2795 "must be closely nested inside a loop with "
2796 "%<ordered%> clause with a parameter");
2797 return false;
2800 else
2802 error_at (OMP_CLAUSE_LOCATION (c),
2803 "invalid depend kind in omp %<ordered%> %<depend%>");
2804 return false;
2807 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2808 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2810 /* ordered simd must be closely nested inside of simd region,
2811 and simd region must not encounter constructs other than
2812 ordered simd, therefore ordered simd may be either orphaned,
2813 or ctx->stmt must be simd. The latter case is handled already
2814 earlier. */
2815 if (ctx != NULL)
2817 error_at (gimple_location (stmt),
2818 "%<ordered%> %<simd%> must be closely nested inside "
2819 "%<simd%> region");
2820 return false;
2823 for (; ctx != NULL; ctx = ctx->outer)
2824 switch (gimple_code (ctx->stmt))
2826 case GIMPLE_OMP_CRITICAL:
2827 case GIMPLE_OMP_TASK:
2828 case GIMPLE_OMP_ORDERED:
2829 ordered_in_taskloop:
2830 error_at (gimple_location (stmt),
2831 "%<ordered%> region may not be closely nested inside "
2832 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2833 "%<taskloop%> region");
2834 return false;
2835 case GIMPLE_OMP_FOR:
2836 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2837 goto ordered_in_taskloop;
2838 tree o;
2839 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2840 OMP_CLAUSE_ORDERED);
2841 if (o == NULL)
2843 error_at (gimple_location (stmt),
2844 "%<ordered%> region must be closely nested inside "
2845 "a loop region with an %<ordered%> clause");
2846 return false;
2848 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
2849 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
2851 error_at (gimple_location (stmt),
2852 "%<ordered%> region without %<depend%> clause may "
2853 "not be closely nested inside a loop region with "
2854 "an %<ordered%> clause with a parameter");
2855 return false;
2857 return true;
2858 case GIMPLE_OMP_TARGET:
2859 if (gimple_omp_target_kind (ctx->stmt)
2860 != GF_OMP_TARGET_KIND_REGION)
2861 break;
2862 /* FALLTHRU */
2863 case GIMPLE_OMP_PARALLEL:
2864 case GIMPLE_OMP_TEAMS:
2865 error_at (gimple_location (stmt),
2866 "%<ordered%> region must be closely nested inside "
2867 "a loop region with an %<ordered%> clause");
2868 return false;
2869 default:
2870 break;
2872 break;
2873 case GIMPLE_OMP_CRITICAL:
2875 tree this_stmt_name
2876 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2877 for (; ctx != NULL; ctx = ctx->outer)
2878 if (gomp_critical *other_crit
2879 = dyn_cast <gomp_critical *> (ctx->stmt))
2880 if (this_stmt_name == gimple_omp_critical_name (other_crit))
2882 error_at (gimple_location (stmt),
2883 "%<critical%> region may not be nested inside "
2884 "a %<critical%> region with the same name");
2885 return false;
2888 break;
2889 case GIMPLE_OMP_TEAMS:
2890 if (ctx == NULL
2891 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2892 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2894 error_at (gimple_location (stmt),
2895 "%<teams%> construct not closely nested inside of "
2896 "%<target%> construct");
2897 return false;
2899 break;
2900 case GIMPLE_OMP_TARGET:
2901 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2902 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2903 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2904 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2906 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2907 error_at (OMP_CLAUSE_LOCATION (c),
2908 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2909 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2910 return false;
2912 if (is_gimple_omp_offloaded (stmt)
2913 && oacc_get_fn_attrib (cfun->decl) != NULL)
2915 error_at (gimple_location (stmt),
2916 "OpenACC region inside of OpenACC routine, nested "
2917 "parallelism not supported yet");
2918 return false;
2920 for (; ctx != NULL; ctx = ctx->outer)
2922 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2924 if (is_gimple_omp (stmt)
2925 && is_gimple_omp_oacc (stmt)
2926 && is_gimple_omp (ctx->stmt))
2928 error_at (gimple_location (stmt),
2929 "OpenACC construct inside of non-OpenACC region");
2930 return false;
2932 continue;
2935 const char *stmt_name, *ctx_stmt_name;
2936 switch (gimple_omp_target_kind (stmt))
2938 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2939 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2940 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2941 case GF_OMP_TARGET_KIND_ENTER_DATA:
2942 stmt_name = "target enter data"; break;
2943 case GF_OMP_TARGET_KIND_EXIT_DATA:
2944 stmt_name = "target exit data"; break;
2945 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2946 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2947 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2948 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
2949 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2950 stmt_name = "enter/exit data"; break;
2951 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2952 break;
2953 default: gcc_unreachable ();
2955 switch (gimple_omp_target_kind (ctx->stmt))
2957 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2958 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
2959 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2960 ctx_stmt_name = "parallel"; break;
2961 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2962 ctx_stmt_name = "kernels"; break;
2963 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
2964 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2965 ctx_stmt_name = "host_data"; break;
2966 default: gcc_unreachable ();
2969 /* OpenACC/OpenMP mismatch? */
2970 if (is_gimple_omp_oacc (stmt)
2971 != is_gimple_omp_oacc (ctx->stmt))
2973 error_at (gimple_location (stmt),
2974 "%s %qs construct inside of %s %qs region",
2975 (is_gimple_omp_oacc (stmt)
2976 ? "OpenACC" : "OpenMP"), stmt_name,
2977 (is_gimple_omp_oacc (ctx->stmt)
2978 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
2979 return false;
2981 if (is_gimple_omp_offloaded (ctx->stmt))
2983 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
2984 if (is_gimple_omp_oacc (ctx->stmt))
2986 error_at (gimple_location (stmt),
2987 "%qs construct inside of %qs region",
2988 stmt_name, ctx_stmt_name);
2989 return false;
2991 else
2993 warning_at (gimple_location (stmt), 0,
2994 "%qs construct inside of %qs region",
2995 stmt_name, ctx_stmt_name);
2999 break;
3000 default:
3001 break;
3003 return true;
3007 /* Helper function scan_omp.
3009 Callback for walk_tree or operators in walk_gimple_stmt used to
3010 scan for OMP directives in TP. */
3012 static tree
3013 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3015 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3016 omp_context *ctx = (omp_context *) wi->info;
3017 tree t = *tp;
3019 switch (TREE_CODE (t))
3021 case VAR_DECL:
3022 case PARM_DECL:
3023 case LABEL_DECL:
3024 case RESULT_DECL:
3025 if (ctx)
3027 tree repl = remap_decl (t, &ctx->cb);
3028 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3029 *tp = repl;
3031 break;
3033 default:
3034 if (ctx && TYPE_P (t))
3035 *tp = remap_type (t, &ctx->cb);
3036 else if (!DECL_P (t))
3038 *walk_subtrees = 1;
3039 if (ctx)
3041 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3042 if (tem != TREE_TYPE (t))
3044 if (TREE_CODE (t) == INTEGER_CST)
3045 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3046 else
3047 TREE_TYPE (t) = tem;
3051 break;
3054 return NULL_TREE;
3057 /* Return true if FNDECL is a setjmp or a longjmp. */
3059 static bool
3060 setjmp_or_longjmp_p (const_tree fndecl)
3062 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3063 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
3064 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
3065 return true;
3067 tree declname = DECL_NAME (fndecl);
3068 if (!declname)
3069 return false;
3070 const char *name = IDENTIFIER_POINTER (declname);
3071 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3075 /* Helper function for scan_omp.
3077 Callback for walk_gimple_stmt used to scan for OMP directives in
3078 the current statement in GSI. */
3080 static tree
3081 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3082 struct walk_stmt_info *wi)
3084 gimple *stmt = gsi_stmt (*gsi);
3085 omp_context *ctx = (omp_context *) wi->info;
3087 if (gimple_has_location (stmt))
3088 input_location = gimple_location (stmt);
3090 /* Check the nesting restrictions. */
3091 bool remove = false;
3092 if (is_gimple_omp (stmt))
3093 remove = !check_omp_nesting_restrictions (stmt, ctx);
3094 else if (is_gimple_call (stmt))
3096 tree fndecl = gimple_call_fndecl (stmt);
3097 if (fndecl)
3099 if (setjmp_or_longjmp_p (fndecl)
3100 && ctx
3101 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3102 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3104 remove = true;
3105 error_at (gimple_location (stmt),
3106 "setjmp/longjmp inside simd construct");
3108 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3109 switch (DECL_FUNCTION_CODE (fndecl))
3111 case BUILT_IN_GOMP_BARRIER:
3112 case BUILT_IN_GOMP_CANCEL:
3113 case BUILT_IN_GOMP_CANCELLATION_POINT:
3114 case BUILT_IN_GOMP_TASKYIELD:
3115 case BUILT_IN_GOMP_TASKWAIT:
3116 case BUILT_IN_GOMP_TASKGROUP_START:
3117 case BUILT_IN_GOMP_TASKGROUP_END:
3118 remove = !check_omp_nesting_restrictions (stmt, ctx);
3119 break;
3120 default:
3121 break;
3125 if (remove)
3127 stmt = gimple_build_nop ();
3128 gsi_replace (gsi, stmt, false);
3131 *handled_ops_p = true;
3133 switch (gimple_code (stmt))
3135 case GIMPLE_OMP_PARALLEL:
3136 taskreg_nesting_level++;
3137 scan_omp_parallel (gsi, ctx);
3138 taskreg_nesting_level--;
3139 break;
3141 case GIMPLE_OMP_TASK:
3142 taskreg_nesting_level++;
3143 scan_omp_task (gsi, ctx);
3144 taskreg_nesting_level--;
3145 break;
3147 case GIMPLE_OMP_FOR:
3148 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3149 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3150 && omp_maybe_offloaded_ctx (ctx)
3151 && omp_max_simt_vf ())
3152 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3153 else
3154 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3155 break;
3157 case GIMPLE_OMP_SECTIONS:
3158 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3159 break;
3161 case GIMPLE_OMP_SINGLE:
3162 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3163 break;
3165 case GIMPLE_OMP_SECTION:
3166 case GIMPLE_OMP_MASTER:
3167 case GIMPLE_OMP_TASKGROUP:
3168 case GIMPLE_OMP_ORDERED:
3169 case GIMPLE_OMP_CRITICAL:
3170 case GIMPLE_OMP_GRID_BODY:
3171 ctx = new_omp_context (stmt, ctx);
3172 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3173 break;
3175 case GIMPLE_OMP_TARGET:
3176 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3177 break;
3179 case GIMPLE_OMP_TEAMS:
3180 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3181 break;
3183 case GIMPLE_BIND:
3185 tree var;
3187 *handled_ops_p = false;
3188 if (ctx)
3189 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3190 var ;
3191 var = DECL_CHAIN (var))
3192 insert_decl_map (&ctx->cb, var, var);
3194 break;
3195 default:
3196 *handled_ops_p = false;
3197 break;
3200 return NULL_TREE;
3204 /* Scan all the statements starting at the current statement. CTX
3205 contains context information about the OMP directives and
3206 clauses found during the scan. */
3208 static void
3209 scan_omp (gimple_seq *body_p, omp_context *ctx)
3211 location_t saved_location;
3212 struct walk_stmt_info wi;
3214 memset (&wi, 0, sizeof (wi));
3215 wi.info = ctx;
3216 wi.want_locations = true;
3218 saved_location = input_location;
3219 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3220 input_location = saved_location;
3223 /* Re-gimplification and code generation routines. */
3225 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3226 of BIND if in a method. */
3228 static void
3229 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3231 if (DECL_ARGUMENTS (current_function_decl)
3232 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3233 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3234 == POINTER_TYPE))
3236 tree vars = gimple_bind_vars (bind);
3237 for (tree *pvar = &vars; *pvar; )
3238 if (omp_member_access_dummy_var (*pvar))
3239 *pvar = DECL_CHAIN (*pvar);
3240 else
3241 pvar = &DECL_CHAIN (*pvar);
3242 gimple_bind_set_vars (bind, vars);
3246 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3247 block and its subblocks. */
3249 static void
3250 remove_member_access_dummy_vars (tree block)
3252 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3253 if (omp_member_access_dummy_var (*pvar))
3254 *pvar = DECL_CHAIN (*pvar);
3255 else
3256 pvar = &DECL_CHAIN (*pvar);
3258 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3259 remove_member_access_dummy_vars (block);
3262 /* If a context was created for STMT when it was scanned, return it. */
3264 static omp_context *
3265 maybe_lookup_ctx (gimple *stmt)
3267 splay_tree_node n;
3268 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3269 return n ? (omp_context *) n->value : NULL;
3273 /* Find the mapping for DECL in CTX or the immediately enclosing
3274 context that has a mapping for DECL.
3276 If CTX is a nested parallel directive, we may have to use the decl
3277 mappings created in CTX's parent context. Suppose that we have the
3278 following parallel nesting (variable UIDs showed for clarity):
3280 iD.1562 = 0;
3281 #omp parallel shared(iD.1562) -> outer parallel
3282 iD.1562 = iD.1562 + 1;
3284 #omp parallel shared (iD.1562) -> inner parallel
3285 iD.1562 = iD.1562 - 1;
3287 Each parallel structure will create a distinct .omp_data_s structure
3288 for copying iD.1562 in/out of the directive:
3290 outer parallel .omp_data_s.1.i -> iD.1562
3291 inner parallel .omp_data_s.2.i -> iD.1562
3293 A shared variable mapping will produce a copy-out operation before
3294 the parallel directive and a copy-in operation after it. So, in
3295 this case we would have:
3297 iD.1562 = 0;
3298 .omp_data_o.1.i = iD.1562;
3299 #omp parallel shared(iD.1562) -> outer parallel
3300 .omp_data_i.1 = &.omp_data_o.1
3301 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3303 .omp_data_o.2.i = iD.1562; -> **
3304 #omp parallel shared(iD.1562) -> inner parallel
3305 .omp_data_i.2 = &.omp_data_o.2
3306 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3309 ** This is a problem. The symbol iD.1562 cannot be referenced
3310 inside the body of the outer parallel region. But since we are
3311 emitting this copy operation while expanding the inner parallel
3312 directive, we need to access the CTX structure of the outer
3313 parallel directive to get the correct mapping:
3315 .omp_data_o.2.i = .omp_data_i.1->i
3317 Since there may be other workshare or parallel directives enclosing
3318 the parallel directive, it may be necessary to walk up the context
3319 parent chain. This is not a problem in general because nested
3320 parallelism happens only rarely. */
3322 static tree
3323 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3325 tree t;
3326 omp_context *up;
3328 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3329 t = maybe_lookup_decl (decl, up);
3331 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3333 return t ? t : decl;
3337 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3338 in outer contexts. */
3340 static tree
3341 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3343 tree t = NULL;
3344 omp_context *up;
3346 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3347 t = maybe_lookup_decl (decl, up);
3349 return t ? t : decl;
3353 /* Construct the initialization value for reduction operation OP. */
3355 tree
3356 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3358 switch (op)
3360 case PLUS_EXPR:
3361 case MINUS_EXPR:
3362 case BIT_IOR_EXPR:
3363 case BIT_XOR_EXPR:
3364 case TRUTH_OR_EXPR:
3365 case TRUTH_ORIF_EXPR:
3366 case TRUTH_XOR_EXPR:
3367 case NE_EXPR:
3368 return build_zero_cst (type);
3370 case MULT_EXPR:
3371 case TRUTH_AND_EXPR:
3372 case TRUTH_ANDIF_EXPR:
3373 case EQ_EXPR:
3374 return fold_convert_loc (loc, type, integer_one_node);
3376 case BIT_AND_EXPR:
3377 return fold_convert_loc (loc, type, integer_minus_one_node);
3379 case MAX_EXPR:
3380 if (SCALAR_FLOAT_TYPE_P (type))
3382 REAL_VALUE_TYPE max, min;
3383 if (HONOR_INFINITIES (type))
3385 real_inf (&max);
3386 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3388 else
3389 real_maxval (&min, 1, TYPE_MODE (type));
3390 return build_real (type, min);
3392 else if (POINTER_TYPE_P (type))
3394 wide_int min
3395 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3396 return wide_int_to_tree (type, min);
3398 else
3400 gcc_assert (INTEGRAL_TYPE_P (type));
3401 return TYPE_MIN_VALUE (type);
3404 case MIN_EXPR:
3405 if (SCALAR_FLOAT_TYPE_P (type))
3407 REAL_VALUE_TYPE max;
3408 if (HONOR_INFINITIES (type))
3409 real_inf (&max);
3410 else
3411 real_maxval (&max, 0, TYPE_MODE (type));
3412 return build_real (type, max);
3414 else if (POINTER_TYPE_P (type))
3416 wide_int max
3417 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3418 return wide_int_to_tree (type, max);
3420 else
3422 gcc_assert (INTEGRAL_TYPE_P (type));
3423 return TYPE_MAX_VALUE (type);
3426 default:
3427 gcc_unreachable ();
3431 /* Construct the initialization value for reduction CLAUSE. */
3433 tree
3434 omp_reduction_init (tree clause, tree type)
3436 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3437 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3440 /* Return alignment to be assumed for var in CLAUSE, which should be
3441 OMP_CLAUSE_ALIGNED. */
3443 static tree
3444 omp_clause_aligned_alignment (tree clause)
3446 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3447 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3449 /* Otherwise return implementation defined alignment. */
3450 unsigned int al = 1;
3451 opt_scalar_mode mode_iter;
3452 auto_vector_sizes sizes;
3453 targetm.vectorize.autovectorize_vector_sizes (&sizes);
3454 poly_uint64 vs = 0;
3455 for (unsigned int i = 0; i < sizes.length (); ++i)
3456 vs = ordered_max (vs, sizes[i]);
3457 static enum mode_class classes[]
3458 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3459 for (int i = 0; i < 4; i += 2)
3460 /* The for loop above dictates that we only walk through scalar classes. */
3461 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3463 scalar_mode mode = mode_iter.require ();
3464 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3465 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3466 continue;
3467 while (maybe_ne (vs, 0U)
3468 && known_lt (GET_MODE_SIZE (vmode), vs)
3469 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3470 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3472 tree type = lang_hooks.types.type_for_mode (mode, 1);
3473 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3474 continue;
3475 poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3476 GET_MODE_SIZE (mode));
3477 type = build_vector_type (type, nelts);
3478 if (TYPE_MODE (type) != vmode)
3479 continue;
3480 if (TYPE_ALIGN_UNIT (type) > al)
3481 al = TYPE_ALIGN_UNIT (type);
3483 return build_int_cst (integer_type_node, al);
3487 /* This structure is part of the interface between lower_rec_simd_input_clauses
3488 and lower_rec_input_clauses. */
3490 struct omplow_simd_context {
3491 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3492 tree idx;
3493 tree lane;
3494 vec<tree, va_heap> simt_eargs;
3495 gimple_seq simt_dlist;
3496 poly_uint64_pod max_vf;
3497 bool is_simt;
3500 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3501 privatization. */
3503 static bool
3504 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3505 omplow_simd_context *sctx, tree &ivar, tree &lvar)
3507 if (known_eq (sctx->max_vf, 0U))
3509 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3510 if (maybe_gt (sctx->max_vf, 1U))
3512 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3513 OMP_CLAUSE_SAFELEN);
3514 if (c)
3516 poly_uint64 safe_len;
3517 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3518 || maybe_lt (safe_len, 1U))
3519 sctx->max_vf = 1;
3520 else
3521 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3524 if (maybe_gt (sctx->max_vf, 1U))
3526 sctx->idx = create_tmp_var (unsigned_type_node);
3527 sctx->lane = create_tmp_var (unsigned_type_node);
3530 if (known_eq (sctx->max_vf, 1U))
3531 return false;
3533 if (sctx->is_simt)
3535 if (is_gimple_reg (new_var))
3537 ivar = lvar = new_var;
3538 return true;
3540 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3541 ivar = lvar = create_tmp_var (type);
3542 TREE_ADDRESSABLE (ivar) = 1;
3543 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3544 NULL, DECL_ATTRIBUTES (ivar));
3545 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3546 tree clobber = build_constructor (type, NULL);
3547 TREE_THIS_VOLATILE (clobber) = 1;
3548 gimple *g = gimple_build_assign (ivar, clobber);
3549 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3551 else
3553 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3554 tree avar = create_tmp_var_raw (atype);
3555 if (TREE_ADDRESSABLE (new_var))
3556 TREE_ADDRESSABLE (avar) = 1;
3557 DECL_ATTRIBUTES (avar)
3558 = tree_cons (get_identifier ("omp simd array"), NULL,
3559 DECL_ATTRIBUTES (avar));
3560 gimple_add_tmp_var (avar);
3561 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3562 NULL_TREE, NULL_TREE);
3563 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3564 NULL_TREE, NULL_TREE);
3566 if (DECL_P (new_var))
3568 SET_DECL_VALUE_EXPR (new_var, lvar);
3569 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3571 return true;
3574 /* Helper function of lower_rec_input_clauses. For a reference
3575 in simd reduction, add an underlying variable it will reference. */
3577 static void
3578 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3580 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3581 if (TREE_CONSTANT (z))
3583 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3584 get_name (new_vard));
3585 gimple_add_tmp_var (z);
3586 TREE_ADDRESSABLE (z) = 1;
3587 z = build_fold_addr_expr_loc (loc, z);
3588 gimplify_assign (new_vard, z, ilist);
3592 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3593 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3594 private variables. Initialization statements go in ILIST, while calls
3595 to destructors go in DLIST. */
3597 static void
3598 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3599 omp_context *ctx, struct omp_for_data *fd)
3601 tree c, dtor, copyin_seq, x, ptr;
3602 bool copyin_by_ref = false;
3603 bool lastprivate_firstprivate = false;
3604 bool reduction_omp_orig_ref = false;
3605 int pass;
3606 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3607 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3608 omplow_simd_context sctx = omplow_simd_context ();
3609 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3610 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3611 gimple_seq llist[3] = { };
3613 copyin_seq = NULL;
3614 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3616 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3617 with data sharing clauses referencing variable sized vars. That
3618 is unnecessarily hard to support and very unlikely to result in
3619 vectorized code anyway. */
3620 if (is_simd)
3621 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3622 switch (OMP_CLAUSE_CODE (c))
3624 case OMP_CLAUSE_LINEAR:
3625 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3626 sctx.max_vf = 1;
3627 /* FALLTHRU */
3628 case OMP_CLAUSE_PRIVATE:
3629 case OMP_CLAUSE_FIRSTPRIVATE:
3630 case OMP_CLAUSE_LASTPRIVATE:
3631 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3632 sctx.max_vf = 1;
3633 break;
3634 case OMP_CLAUSE_REDUCTION:
3635 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3636 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3637 sctx.max_vf = 1;
3638 break;
3639 default:
3640 continue;
3643 /* Add a placeholder for simduid. */
3644 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
3645 sctx.simt_eargs.safe_push (NULL_TREE);
3647 /* Do all the fixed sized types in the first pass, and the variable sized
3648 types in the second pass. This makes sure that the scalar arguments to
3649 the variable sized types are processed before we use them in the
3650 variable sized operations. */
3651 for (pass = 0; pass < 2; ++pass)
3653 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3655 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3656 tree var, new_var;
3657 bool by_ref;
3658 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3660 switch (c_kind)
3662 case OMP_CLAUSE_PRIVATE:
3663 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3664 continue;
3665 break;
3666 case OMP_CLAUSE_SHARED:
3667 /* Ignore shared directives in teams construct. */
3668 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3669 continue;
3670 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3672 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3673 || is_global_var (OMP_CLAUSE_DECL (c)));
3674 continue;
3676 case OMP_CLAUSE_FIRSTPRIVATE:
3677 case OMP_CLAUSE_COPYIN:
3678 break;
3679 case OMP_CLAUSE_LINEAR:
3680 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3681 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3682 lastprivate_firstprivate = true;
3683 break;
3684 case OMP_CLAUSE_REDUCTION:
3685 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3686 reduction_omp_orig_ref = true;
3687 break;
3688 case OMP_CLAUSE__LOOPTEMP_:
3689 /* Handle _looptemp_ clauses only on parallel/task. */
3690 if (fd)
3691 continue;
3692 break;
3693 case OMP_CLAUSE_LASTPRIVATE:
3694 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3696 lastprivate_firstprivate = true;
3697 if (pass != 0 || is_taskloop_ctx (ctx))
3698 continue;
3700 /* Even without corresponding firstprivate, if
3701 decl is Fortran allocatable, it needs outer var
3702 reference. */
3703 else if (pass == 0
3704 && lang_hooks.decls.omp_private_outer_ref
3705 (OMP_CLAUSE_DECL (c)))
3706 lastprivate_firstprivate = true;
3707 break;
3708 case OMP_CLAUSE_ALIGNED:
3709 if (pass == 0)
3710 continue;
3711 var = OMP_CLAUSE_DECL (c);
3712 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3713 && !is_global_var (var))
3715 new_var = maybe_lookup_decl (var, ctx);
3716 if (new_var == NULL_TREE)
3717 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3718 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3719 tree alarg = omp_clause_aligned_alignment (c);
3720 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3721 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3722 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3723 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3724 gimplify_and_add (x, ilist);
3726 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3727 && is_global_var (var))
3729 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3730 new_var = lookup_decl (var, ctx);
3731 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3732 t = build_fold_addr_expr_loc (clause_loc, t);
3733 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3734 tree alarg = omp_clause_aligned_alignment (c);
3735 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3736 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3737 t = fold_convert_loc (clause_loc, ptype, t);
3738 x = create_tmp_var (ptype);
3739 t = build2 (MODIFY_EXPR, ptype, x, t);
3740 gimplify_and_add (t, ilist);
3741 t = build_simple_mem_ref_loc (clause_loc, x);
3742 SET_DECL_VALUE_EXPR (new_var, t);
3743 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3745 continue;
3746 default:
3747 continue;
3750 new_var = var = OMP_CLAUSE_DECL (c);
3751 if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3753 var = TREE_OPERAND (var, 0);
3754 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3755 var = TREE_OPERAND (var, 0);
3756 if (TREE_CODE (var) == INDIRECT_REF
3757 || TREE_CODE (var) == ADDR_EXPR)
3758 var = TREE_OPERAND (var, 0);
3759 if (is_variable_sized (var))
3761 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3762 var = DECL_VALUE_EXPR (var);
3763 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3764 var = TREE_OPERAND (var, 0);
3765 gcc_assert (DECL_P (var));
3767 new_var = var;
3769 if (c_kind != OMP_CLAUSE_COPYIN)
3770 new_var = lookup_decl (var, ctx);
3772 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3774 if (pass != 0)
3775 continue;
3777 /* C/C++ array section reductions. */
3778 else if (c_kind == OMP_CLAUSE_REDUCTION
3779 && var != OMP_CLAUSE_DECL (c))
3781 if (pass == 0)
3782 continue;
3784 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3785 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3786 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3788 tree b = TREE_OPERAND (orig_var, 1);
3789 b = maybe_lookup_decl (b, ctx);
3790 if (b == NULL)
3792 b = TREE_OPERAND (orig_var, 1);
3793 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3795 if (integer_zerop (bias))
3796 bias = b;
3797 else
3799 bias = fold_convert_loc (clause_loc,
3800 TREE_TYPE (b), bias);
3801 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3802 TREE_TYPE (b), b, bias);
3804 orig_var = TREE_OPERAND (orig_var, 0);
3806 if (TREE_CODE (orig_var) == INDIRECT_REF
3807 || TREE_CODE (orig_var) == ADDR_EXPR)
3808 orig_var = TREE_OPERAND (orig_var, 0);
3809 tree d = OMP_CLAUSE_DECL (c);
3810 tree type = TREE_TYPE (d);
3811 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3812 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3813 const char *name = get_name (orig_var);
3814 if (TREE_CONSTANT (v))
3816 x = create_tmp_var_raw (type, name);
3817 gimple_add_tmp_var (x);
3818 TREE_ADDRESSABLE (x) = 1;
3819 x = build_fold_addr_expr_loc (clause_loc, x);
3821 else
3823 tree atmp
3824 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3825 tree t = maybe_lookup_decl (v, ctx);
3826 if (t)
3827 v = t;
3828 else
3829 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3830 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3831 t = fold_build2_loc (clause_loc, PLUS_EXPR,
3832 TREE_TYPE (v), v,
3833 build_int_cst (TREE_TYPE (v), 1));
3834 t = fold_build2_loc (clause_loc, MULT_EXPR,
3835 TREE_TYPE (v), t,
3836 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3837 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3838 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3841 tree ptype = build_pointer_type (TREE_TYPE (type));
3842 x = fold_convert_loc (clause_loc, ptype, x);
3843 tree y = create_tmp_var (ptype, name);
3844 gimplify_assign (y, x, ilist);
3845 x = y;
3846 tree yb = y;
3848 if (!integer_zerop (bias))
3850 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3851 bias);
3852 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3854 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3855 pointer_sized_int_node, yb, bias);
3856 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3857 yb = create_tmp_var (ptype, name);
3858 gimplify_assign (yb, x, ilist);
3859 x = yb;
3862 d = TREE_OPERAND (d, 0);
3863 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3864 d = TREE_OPERAND (d, 0);
3865 if (TREE_CODE (d) == ADDR_EXPR)
3867 if (orig_var != var)
3869 gcc_assert (is_variable_sized (orig_var));
3870 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3872 gimplify_assign (new_var, x, ilist);
3873 tree new_orig_var = lookup_decl (orig_var, ctx);
3874 tree t = build_fold_indirect_ref (new_var);
3875 DECL_IGNORED_P (new_var) = 0;
3876 TREE_THIS_NOTRAP (t);
3877 SET_DECL_VALUE_EXPR (new_orig_var, t);
3878 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3880 else
3882 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3883 build_int_cst (ptype, 0));
3884 SET_DECL_VALUE_EXPR (new_var, x);
3885 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3888 else
3890 gcc_assert (orig_var == var);
3891 if (TREE_CODE (d) == INDIRECT_REF)
3893 x = create_tmp_var (ptype, name);
3894 TREE_ADDRESSABLE (x) = 1;
3895 gimplify_assign (x, yb, ilist);
3896 x = build_fold_addr_expr_loc (clause_loc, x);
3898 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3899 gimplify_assign (new_var, x, ilist);
3901 tree y1 = create_tmp_var (ptype, NULL);
3902 gimplify_assign (y1, y, ilist);
3903 tree i2 = NULL_TREE, y2 = NULL_TREE;
3904 tree body2 = NULL_TREE, end2 = NULL_TREE;
3905 tree y3 = NULL_TREE, y4 = NULL_TREE;
3906 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3908 y2 = create_tmp_var (ptype, NULL);
3909 gimplify_assign (y2, y, ilist);
3910 tree ref = build_outer_var_ref (var, ctx);
3911 /* For ref build_outer_var_ref already performs this. */
3912 if (TREE_CODE (d) == INDIRECT_REF)
3913 gcc_assert (omp_is_reference (var));
3914 else if (TREE_CODE (d) == ADDR_EXPR)
3915 ref = build_fold_addr_expr (ref);
3916 else if (omp_is_reference (var))
3917 ref = build_fold_addr_expr (ref);
3918 ref = fold_convert_loc (clause_loc, ptype, ref);
3919 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3920 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3922 y3 = create_tmp_var (ptype, NULL);
3923 gimplify_assign (y3, unshare_expr (ref), ilist);
3925 if (is_simd)
3927 y4 = create_tmp_var (ptype, NULL);
3928 gimplify_assign (y4, ref, dlist);
3931 tree i = create_tmp_var (TREE_TYPE (v), NULL);
3932 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3933 tree body = create_artificial_label (UNKNOWN_LOCATION);
3934 tree end = create_artificial_label (UNKNOWN_LOCATION);
3935 gimple_seq_add_stmt (ilist, gimple_build_label (body));
3936 if (y2)
3938 i2 = create_tmp_var (TREE_TYPE (v), NULL);
3939 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3940 body2 = create_artificial_label (UNKNOWN_LOCATION);
3941 end2 = create_artificial_label (UNKNOWN_LOCATION);
3942 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3944 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3946 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3947 tree decl_placeholder
3948 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3949 SET_DECL_VALUE_EXPR (decl_placeholder,
3950 build_simple_mem_ref (y1));
3951 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3952 SET_DECL_VALUE_EXPR (placeholder,
3953 y3 ? build_simple_mem_ref (y3)
3954 : error_mark_node);
3955 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3956 x = lang_hooks.decls.omp_clause_default_ctor
3957 (c, build_simple_mem_ref (y1),
3958 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3959 if (x)
3960 gimplify_and_add (x, ilist);
3961 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3963 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3964 lower_omp (&tseq, ctx);
3965 gimple_seq_add_seq (ilist, tseq);
3967 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3968 if (is_simd)
3970 SET_DECL_VALUE_EXPR (decl_placeholder,
3971 build_simple_mem_ref (y2));
3972 SET_DECL_VALUE_EXPR (placeholder,
3973 build_simple_mem_ref (y4));
3974 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3975 lower_omp (&tseq, ctx);
3976 gimple_seq_add_seq (dlist, tseq);
3977 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3979 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3980 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3981 x = lang_hooks.decls.omp_clause_dtor
3982 (c, build_simple_mem_ref (y2));
3983 if (x)
3985 gimple_seq tseq = NULL;
3986 dtor = x;
3987 gimplify_stmt (&dtor, &tseq);
3988 gimple_seq_add_seq (dlist, tseq);
3991 else
3993 x = omp_reduction_init (c, TREE_TYPE (type));
3994 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3996 /* reduction(-:var) sums up the partial results, so it
3997 acts identically to reduction(+:var). */
3998 if (code == MINUS_EXPR)
3999 code = PLUS_EXPR;
4001 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4002 if (is_simd)
4004 x = build2 (code, TREE_TYPE (type),
4005 build_simple_mem_ref (y4),
4006 build_simple_mem_ref (y2));
4007 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4010 gimple *g
4011 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4012 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4013 gimple_seq_add_stmt (ilist, g);
4014 if (y3)
4016 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4017 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4018 gimple_seq_add_stmt (ilist, g);
4020 g = gimple_build_assign (i, PLUS_EXPR, i,
4021 build_int_cst (TREE_TYPE (i), 1));
4022 gimple_seq_add_stmt (ilist, g);
4023 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4024 gimple_seq_add_stmt (ilist, g);
4025 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4026 if (y2)
4028 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4029 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4030 gimple_seq_add_stmt (dlist, g);
4031 if (y4)
4033 g = gimple_build_assign
4034 (y4, POINTER_PLUS_EXPR, y4,
4035 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4036 gimple_seq_add_stmt (dlist, g);
4038 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4039 build_int_cst (TREE_TYPE (i2), 1));
4040 gimple_seq_add_stmt (dlist, g);
4041 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4042 gimple_seq_add_stmt (dlist, g);
4043 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4045 continue;
4047 else if (is_variable_sized (var))
4049 /* For variable sized types, we need to allocate the
4050 actual storage here. Call alloca and store the
4051 result in the pointer decl that we created elsewhere. */
4052 if (pass == 0)
4053 continue;
4055 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4057 gcall *stmt;
4058 tree tmp, atmp;
4060 ptr = DECL_VALUE_EXPR (new_var);
4061 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4062 ptr = TREE_OPERAND (ptr, 0);
4063 gcc_assert (DECL_P (ptr));
4064 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4066 /* void *tmp = __builtin_alloca */
4067 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4068 stmt = gimple_build_call (atmp, 2, x,
4069 size_int (DECL_ALIGN (var)));
4070 tmp = create_tmp_var_raw (ptr_type_node);
4071 gimple_add_tmp_var (tmp);
4072 gimple_call_set_lhs (stmt, tmp);
4074 gimple_seq_add_stmt (ilist, stmt);
4076 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4077 gimplify_assign (ptr, x, ilist);
4080 else if (omp_is_reference (var))
4082 /* For references that are being privatized for Fortran,
4083 allocate new backing storage for the new pointer
4084 variable. This allows us to avoid changing all the
4085 code that expects a pointer to something that expects
4086 a direct variable. */
4087 if (pass == 0)
4088 continue;
4090 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4091 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4093 x = build_receiver_ref (var, false, ctx);
4094 x = build_fold_addr_expr_loc (clause_loc, x);
4096 else if (TREE_CONSTANT (x))
4098 /* For reduction in SIMD loop, defer adding the
4099 initialization of the reference, because if we decide
4100 to use SIMD array for it, the initilization could cause
4101 expansion ICE. */
4102 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4103 x = NULL_TREE;
4104 else
4106 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4107 get_name (var));
4108 gimple_add_tmp_var (x);
4109 TREE_ADDRESSABLE (x) = 1;
4110 x = build_fold_addr_expr_loc (clause_loc, x);
4113 else
4115 tree atmp
4116 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4117 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4118 tree al = size_int (TYPE_ALIGN (rtype));
4119 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4122 if (x)
4124 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4125 gimplify_assign (new_var, x, ilist);
4128 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4130 else if (c_kind == OMP_CLAUSE_REDUCTION
4131 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4133 if (pass == 0)
4134 continue;
4136 else if (pass != 0)
4137 continue;
4139 switch (OMP_CLAUSE_CODE (c))
4141 case OMP_CLAUSE_SHARED:
4142 /* Ignore shared directives in teams construct. */
4143 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4144 continue;
4145 /* Shared global vars are just accessed directly. */
4146 if (is_global_var (new_var))
4147 break;
4148 /* For taskloop firstprivate/lastprivate, represented
4149 as firstprivate and shared clause on the task, new_var
4150 is the firstprivate var. */
4151 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4152 break;
4153 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4154 needs to be delayed until after fixup_child_record_type so
4155 that we get the correct type during the dereference. */
4156 by_ref = use_pointer_for_field (var, ctx);
4157 x = build_receiver_ref (var, by_ref, ctx);
4158 SET_DECL_VALUE_EXPR (new_var, x);
4159 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4161 /* ??? If VAR is not passed by reference, and the variable
4162 hasn't been initialized yet, then we'll get a warning for
4163 the store into the omp_data_s structure. Ideally, we'd be
4164 able to notice this and not store anything at all, but
4165 we're generating code too early. Suppress the warning. */
4166 if (!by_ref)
4167 TREE_NO_WARNING (var) = 1;
4168 break;
4170 case OMP_CLAUSE_LASTPRIVATE:
4171 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4172 break;
4173 /* FALLTHRU */
4175 case OMP_CLAUSE_PRIVATE:
4176 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4177 x = build_outer_var_ref (var, ctx);
4178 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4180 if (is_task_ctx (ctx))
4181 x = build_receiver_ref (var, false, ctx);
4182 else
4183 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4185 else
4186 x = NULL;
4187 do_private:
4188 tree nx;
4189 nx = lang_hooks.decls.omp_clause_default_ctor
4190 (c, unshare_expr (new_var), x);
4191 if (is_simd)
4193 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4194 if ((TREE_ADDRESSABLE (new_var) || nx || y
4195 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4196 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4197 ivar, lvar))
4199 if (nx)
4200 x = lang_hooks.decls.omp_clause_default_ctor
4201 (c, unshare_expr (ivar), x);
4202 if (nx && x)
4203 gimplify_and_add (x, &llist[0]);
4204 if (y)
4206 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4207 if (y)
4209 gimple_seq tseq = NULL;
4211 dtor = y;
4212 gimplify_stmt (&dtor, &tseq);
4213 gimple_seq_add_seq (&llist[1], tseq);
4216 break;
4219 if (nx)
4220 gimplify_and_add (nx, ilist);
4221 /* FALLTHRU */
4223 do_dtor:
4224 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4225 if (x)
4227 gimple_seq tseq = NULL;
4229 dtor = x;
4230 gimplify_stmt (&dtor, &tseq);
4231 gimple_seq_add_seq (dlist, tseq);
4233 break;
4235 case OMP_CLAUSE_LINEAR:
4236 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4237 goto do_firstprivate;
4238 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4239 x = NULL;
4240 else
4241 x = build_outer_var_ref (var, ctx);
4242 goto do_private;
4244 case OMP_CLAUSE_FIRSTPRIVATE:
4245 if (is_task_ctx (ctx))
4247 if (omp_is_reference (var) || is_variable_sized (var))
4248 goto do_dtor;
4249 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4250 ctx))
4251 || use_pointer_for_field (var, NULL))
4253 x = build_receiver_ref (var, false, ctx);
4254 SET_DECL_VALUE_EXPR (new_var, x);
4255 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4256 goto do_dtor;
4259 do_firstprivate:
4260 x = build_outer_var_ref (var, ctx);
4261 if (is_simd)
4263 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4264 && gimple_omp_for_combined_into_p (ctx->stmt))
4266 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4267 tree stept = TREE_TYPE (t);
4268 tree ct = omp_find_clause (clauses,
4269 OMP_CLAUSE__LOOPTEMP_);
4270 gcc_assert (ct);
4271 tree l = OMP_CLAUSE_DECL (ct);
4272 tree n1 = fd->loop.n1;
4273 tree step = fd->loop.step;
4274 tree itype = TREE_TYPE (l);
4275 if (POINTER_TYPE_P (itype))
4276 itype = signed_type_for (itype);
4277 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4278 if (TYPE_UNSIGNED (itype)
4279 && fd->loop.cond_code == GT_EXPR)
4280 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4281 fold_build1 (NEGATE_EXPR, itype, l),
4282 fold_build1 (NEGATE_EXPR,
4283 itype, step));
4284 else
4285 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4286 t = fold_build2 (MULT_EXPR, stept,
4287 fold_convert (stept, l), t);
4289 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4291 x = lang_hooks.decls.omp_clause_linear_ctor
4292 (c, new_var, x, t);
4293 gimplify_and_add (x, ilist);
4294 goto do_dtor;
4297 if (POINTER_TYPE_P (TREE_TYPE (x)))
4298 x = fold_build2 (POINTER_PLUS_EXPR,
4299 TREE_TYPE (x), x, t);
4300 else
4301 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4304 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4305 || TREE_ADDRESSABLE (new_var))
4306 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4307 ivar, lvar))
4309 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4311 tree iv = create_tmp_var (TREE_TYPE (new_var));
4312 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4313 gimplify_and_add (x, ilist);
4314 gimple_stmt_iterator gsi
4315 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4316 gassign *g
4317 = gimple_build_assign (unshare_expr (lvar), iv);
4318 gsi_insert_before_without_update (&gsi, g,
4319 GSI_SAME_STMT);
4320 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4321 enum tree_code code = PLUS_EXPR;
4322 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4323 code = POINTER_PLUS_EXPR;
4324 g = gimple_build_assign (iv, code, iv, t);
4325 gsi_insert_before_without_update (&gsi, g,
4326 GSI_SAME_STMT);
4327 break;
4329 x = lang_hooks.decls.omp_clause_copy_ctor
4330 (c, unshare_expr (ivar), x);
4331 gimplify_and_add (x, &llist[0]);
4332 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4333 if (x)
4335 gimple_seq tseq = NULL;
4337 dtor = x;
4338 gimplify_stmt (&dtor, &tseq);
4339 gimple_seq_add_seq (&llist[1], tseq);
4341 break;
4344 x = lang_hooks.decls.omp_clause_copy_ctor
4345 (c, unshare_expr (new_var), x);
4346 gimplify_and_add (x, ilist);
4347 goto do_dtor;
4349 case OMP_CLAUSE__LOOPTEMP_:
4350 gcc_assert (is_taskreg_ctx (ctx));
4351 x = build_outer_var_ref (var, ctx);
4352 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4353 gimplify_and_add (x, ilist);
4354 break;
4356 case OMP_CLAUSE_COPYIN:
4357 by_ref = use_pointer_for_field (var, NULL);
4358 x = build_receiver_ref (var, by_ref, ctx);
4359 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4360 append_to_statement_list (x, &copyin_seq);
4361 copyin_by_ref |= by_ref;
4362 break;
4364 case OMP_CLAUSE_REDUCTION:
4365 /* OpenACC reductions are initialized using the
4366 GOACC_REDUCTION internal function. */
4367 if (is_gimple_omp_oacc (ctx->stmt))
4368 break;
4369 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4371 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4372 gimple *tseq;
4373 x = build_outer_var_ref (var, ctx);
4375 if (omp_is_reference (var)
4376 && !useless_type_conversion_p (TREE_TYPE (placeholder),
4377 TREE_TYPE (x)))
4378 x = build_fold_addr_expr_loc (clause_loc, x);
4379 SET_DECL_VALUE_EXPR (placeholder, x);
4380 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4381 tree new_vard = new_var;
4382 if (omp_is_reference (var))
4384 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4385 new_vard = TREE_OPERAND (new_var, 0);
4386 gcc_assert (DECL_P (new_vard));
4388 if (is_simd
4389 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4390 ivar, lvar))
4392 if (new_vard == new_var)
4394 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4395 SET_DECL_VALUE_EXPR (new_var, ivar);
4397 else
4399 SET_DECL_VALUE_EXPR (new_vard,
4400 build_fold_addr_expr (ivar));
4401 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4403 x = lang_hooks.decls.omp_clause_default_ctor
4404 (c, unshare_expr (ivar),
4405 build_outer_var_ref (var, ctx));
4406 if (x)
4407 gimplify_and_add (x, &llist[0]);
4408 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4410 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4411 lower_omp (&tseq, ctx);
4412 gimple_seq_add_seq (&llist[0], tseq);
4414 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4415 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4416 lower_omp (&tseq, ctx);
4417 gimple_seq_add_seq (&llist[1], tseq);
4418 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4419 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4420 if (new_vard == new_var)
4421 SET_DECL_VALUE_EXPR (new_var, lvar);
4422 else
4423 SET_DECL_VALUE_EXPR (new_vard,
4424 build_fold_addr_expr (lvar));
4425 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4426 if (x)
4428 tseq = NULL;
4429 dtor = x;
4430 gimplify_stmt (&dtor, &tseq);
4431 gimple_seq_add_seq (&llist[1], tseq);
4433 break;
4435 /* If this is a reference to constant size reduction var
4436 with placeholder, we haven't emitted the initializer
4437 for it because it is undesirable if SIMD arrays are used.
4438 But if they aren't used, we need to emit the deferred
4439 initialization now. */
4440 else if (omp_is_reference (var) && is_simd)
4441 handle_simd_reference (clause_loc, new_vard, ilist);
4442 x = lang_hooks.decls.omp_clause_default_ctor
4443 (c, unshare_expr (new_var),
4444 build_outer_var_ref (var, ctx));
4445 if (x)
4446 gimplify_and_add (x, ilist);
4447 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4449 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4450 lower_omp (&tseq, ctx);
4451 gimple_seq_add_seq (ilist, tseq);
4453 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4454 if (is_simd)
4456 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4457 lower_omp (&tseq, ctx);
4458 gimple_seq_add_seq (dlist, tseq);
4459 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4461 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4462 goto do_dtor;
4464 else
4466 x = omp_reduction_init (c, TREE_TYPE (new_var));
4467 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4468 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4470 /* reduction(-:var) sums up the partial results, so it
4471 acts identically to reduction(+:var). */
4472 if (code == MINUS_EXPR)
4473 code = PLUS_EXPR;
4475 tree new_vard = new_var;
4476 if (is_simd && omp_is_reference (var))
4478 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4479 new_vard = TREE_OPERAND (new_var, 0);
4480 gcc_assert (DECL_P (new_vard));
4482 if (is_simd
4483 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4484 ivar, lvar))
4486 tree ref = build_outer_var_ref (var, ctx);
4488 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4490 if (sctx.is_simt)
4492 if (!simt_lane)
4493 simt_lane = create_tmp_var (unsigned_type_node);
4494 x = build_call_expr_internal_loc
4495 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4496 TREE_TYPE (ivar), 2, ivar, simt_lane);
4497 x = build2 (code, TREE_TYPE (ivar), ivar, x);
4498 gimplify_assign (ivar, x, &llist[2]);
4500 x = build2 (code, TREE_TYPE (ref), ref, ivar);
4501 ref = build_outer_var_ref (var, ctx);
4502 gimplify_assign (ref, x, &llist[1]);
4504 if (new_vard != new_var)
4506 SET_DECL_VALUE_EXPR (new_vard,
4507 build_fold_addr_expr (lvar));
4508 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4511 else
4513 if (omp_is_reference (var) && is_simd)
4514 handle_simd_reference (clause_loc, new_vard, ilist);
4515 gimplify_assign (new_var, x, ilist);
4516 if (is_simd)
4518 tree ref = build_outer_var_ref (var, ctx);
4520 x = build2 (code, TREE_TYPE (ref), ref, new_var);
4521 ref = build_outer_var_ref (var, ctx);
4522 gimplify_assign (ref, x, dlist);
4526 break;
4528 default:
4529 gcc_unreachable ();
4534 if (known_eq (sctx.max_vf, 1U))
4535 sctx.is_simt = false;
4537 if (sctx.lane || sctx.is_simt)
4539 uid = create_tmp_var (ptr_type_node, "simduid");
4540 /* Don't want uninit warnings on simduid, it is always uninitialized,
4541 but we use it not for the value, but for the DECL_UID only. */
4542 TREE_NO_WARNING (uid) = 1;
4543 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4544 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4545 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4546 gimple_omp_for_set_clauses (ctx->stmt, c);
4548 /* Emit calls denoting privatized variables and initializing a pointer to
4549 structure that holds private variables as fields after ompdevlow pass. */
4550 if (sctx.is_simt)
4552 sctx.simt_eargs[0] = uid;
4553 gimple *g
4554 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4555 gimple_call_set_lhs (g, uid);
4556 gimple_seq_add_stmt (ilist, g);
4557 sctx.simt_eargs.release ();
4559 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4560 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4561 gimple_call_set_lhs (g, simtrec);
4562 gimple_seq_add_stmt (ilist, g);
4564 if (sctx.lane)
4566 gimple *g
4567 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4568 gimple_call_set_lhs (g, sctx.lane);
4569 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4570 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4571 g = gimple_build_assign (sctx.lane, INTEGER_CST,
4572 build_int_cst (unsigned_type_node, 0));
4573 gimple_seq_add_stmt (ilist, g);
4574 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4575 if (llist[2])
4577 tree simt_vf = create_tmp_var (unsigned_type_node);
4578 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4579 gimple_call_set_lhs (g, simt_vf);
4580 gimple_seq_add_stmt (dlist, g);
4582 tree t = build_int_cst (unsigned_type_node, 1);
4583 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4584 gimple_seq_add_stmt (dlist, g);
4586 t = build_int_cst (unsigned_type_node, 0);
4587 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4588 gimple_seq_add_stmt (dlist, g);
4590 tree body = create_artificial_label (UNKNOWN_LOCATION);
4591 tree header = create_artificial_label (UNKNOWN_LOCATION);
4592 tree end = create_artificial_label (UNKNOWN_LOCATION);
4593 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4594 gimple_seq_add_stmt (dlist, gimple_build_label (body));
4596 gimple_seq_add_seq (dlist, llist[2]);
4598 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4599 gimple_seq_add_stmt (dlist, g);
4601 gimple_seq_add_stmt (dlist, gimple_build_label (header));
4602 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4603 gimple_seq_add_stmt (dlist, g);
4605 gimple_seq_add_stmt (dlist, gimple_build_label (end));
4607 for (int i = 0; i < 2; i++)
4608 if (llist[i])
4610 tree vf = create_tmp_var (unsigned_type_node);
4611 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4612 gimple_call_set_lhs (g, vf);
4613 gimple_seq *seq = i == 0 ? ilist : dlist;
4614 gimple_seq_add_stmt (seq, g);
4615 tree t = build_int_cst (unsigned_type_node, 0);
4616 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4617 gimple_seq_add_stmt (seq, g);
4618 tree body = create_artificial_label (UNKNOWN_LOCATION);
4619 tree header = create_artificial_label (UNKNOWN_LOCATION);
4620 tree end = create_artificial_label (UNKNOWN_LOCATION);
4621 gimple_seq_add_stmt (seq, gimple_build_goto (header));
4622 gimple_seq_add_stmt (seq, gimple_build_label (body));
4623 gimple_seq_add_seq (seq, llist[i]);
4624 t = build_int_cst (unsigned_type_node, 1);
4625 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
4626 gimple_seq_add_stmt (seq, g);
4627 gimple_seq_add_stmt (seq, gimple_build_label (header));
4628 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
4629 gimple_seq_add_stmt (seq, g);
4630 gimple_seq_add_stmt (seq, gimple_build_label (end));
4633 if (sctx.is_simt)
4635 gimple_seq_add_seq (dlist, sctx.simt_dlist);
4636 gimple *g
4637 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4638 gimple_seq_add_stmt (dlist, g);
4641 /* The copyin sequence is not to be executed by the main thread, since
4642 that would result in self-copies. Perhaps not visible to scalars,
4643 but it certainly is to C++ operator=. */
4644 if (copyin_seq)
4646 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4648 x = build2 (NE_EXPR, boolean_type_node, x,
4649 build_int_cst (TREE_TYPE (x), 0));
4650 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4651 gimplify_and_add (x, ilist);
4654 /* If any copyin variable is passed by reference, we must ensure the
4655 master thread doesn't modify it before it is copied over in all
4656 threads. Similarly for variables in both firstprivate and
4657 lastprivate clauses we need to ensure the lastprivate copying
4658 happens after firstprivate copying in all threads. And similarly
4659 for UDRs if initializer expression refers to omp_orig. */
4660 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4662 /* Don't add any barrier for #pragma omp simd or
4663 #pragma omp distribute. */
4664 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4665 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4666 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4669 /* If max_vf is non-zero, then we can use only a vectorization factor
4670 up to the max_vf we chose. So stick it into the safelen clause. */
4671 if (maybe_ne (sctx.max_vf, 0U))
4673 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4674 OMP_CLAUSE_SAFELEN);
4675 poly_uint64 safe_len;
4676 if (c == NULL_TREE
4677 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4678 && maybe_gt (safe_len, sctx.max_vf)))
4680 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4681 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4682 sctx.max_vf);
4683 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4684 gimple_omp_for_set_clauses (ctx->stmt, c);
4690 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4691 both parallel and workshare constructs. PREDICATE may be NULL if it's
4692 always true. */
4694 static void
4695 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4696 omp_context *ctx)
4698 tree x, c, label = NULL, orig_clauses = clauses;
4699 bool par_clauses = false;
4700 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4702 /* Early exit if there are no lastprivate or linear clauses. */
4703 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4704 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4705 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4706 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4707 break;
4708 if (clauses == NULL)
4710 /* If this was a workshare clause, see if it had been combined
4711 with its parallel. In that case, look for the clauses on the
4712 parallel statement itself. */
4713 if (is_parallel_ctx (ctx))
4714 return;
4716 ctx = ctx->outer;
4717 if (ctx == NULL || !is_parallel_ctx (ctx))
4718 return;
4720 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4721 OMP_CLAUSE_LASTPRIVATE);
4722 if (clauses == NULL)
4723 return;
4724 par_clauses = true;
4727 bool maybe_simt = false;
4728 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4729 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4731 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4732 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4733 if (simduid)
4734 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4737 if (predicate)
4739 gcond *stmt;
4740 tree label_true, arm1, arm2;
4741 enum tree_code pred_code = TREE_CODE (predicate);
4743 label = create_artificial_label (UNKNOWN_LOCATION);
4744 label_true = create_artificial_label (UNKNOWN_LOCATION);
4745 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4747 arm1 = TREE_OPERAND (predicate, 0);
4748 arm2 = TREE_OPERAND (predicate, 1);
4749 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4750 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4752 else
4754 arm1 = predicate;
4755 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4756 arm2 = boolean_false_node;
4757 pred_code = NE_EXPR;
4759 if (maybe_simt)
4761 c = build2 (pred_code, boolean_type_node, arm1, arm2);
4762 c = fold_convert (integer_type_node, c);
4763 simtcond = create_tmp_var (integer_type_node);
4764 gimplify_assign (simtcond, c, stmt_list);
4765 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4766 1, simtcond);
4767 c = create_tmp_var (integer_type_node);
4768 gimple_call_set_lhs (g, c);
4769 gimple_seq_add_stmt (stmt_list, g);
4770 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4771 label_true, label);
4773 else
4774 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4775 gimple_seq_add_stmt (stmt_list, stmt);
4776 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4779 for (c = clauses; c ;)
4781 tree var, new_var;
4782 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4784 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4785 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4786 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4788 var = OMP_CLAUSE_DECL (c);
4789 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4790 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4791 && is_taskloop_ctx (ctx))
4793 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4794 new_var = lookup_decl (var, ctx->outer);
4796 else
4798 new_var = lookup_decl (var, ctx);
4799 /* Avoid uninitialized warnings for lastprivate and
4800 for linear iterators. */
4801 if (predicate
4802 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4803 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4804 TREE_NO_WARNING (new_var) = 1;
4807 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4809 tree val = DECL_VALUE_EXPR (new_var);
4810 if (TREE_CODE (val) == ARRAY_REF
4811 && VAR_P (TREE_OPERAND (val, 0))
4812 && lookup_attribute ("omp simd array",
4813 DECL_ATTRIBUTES (TREE_OPERAND (val,
4814 0))))
4816 if (lastlane == NULL)
4818 lastlane = create_tmp_var (unsigned_type_node);
4819 gcall *g
4820 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4821 2, simduid,
4822 TREE_OPERAND (val, 1));
4823 gimple_call_set_lhs (g, lastlane);
4824 gimple_seq_add_stmt (stmt_list, g);
4826 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4827 TREE_OPERAND (val, 0), lastlane,
4828 NULL_TREE, NULL_TREE);
4831 else if (maybe_simt)
4833 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
4834 ? DECL_VALUE_EXPR (new_var)
4835 : new_var);
4836 if (simtlast == NULL)
4838 simtlast = create_tmp_var (unsigned_type_node);
4839 gcall *g = gimple_build_call_internal
4840 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4841 gimple_call_set_lhs (g, simtlast);
4842 gimple_seq_add_stmt (stmt_list, g);
4844 x = build_call_expr_internal_loc
4845 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4846 TREE_TYPE (val), 2, val, simtlast);
4847 new_var = unshare_expr (new_var);
4848 gimplify_assign (new_var, x, stmt_list);
4849 new_var = unshare_expr (new_var);
4852 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4853 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4855 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4856 gimple_seq_add_seq (stmt_list,
4857 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4858 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4860 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4861 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4863 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4864 gimple_seq_add_seq (stmt_list,
4865 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4866 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4869 x = NULL_TREE;
4870 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4871 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4873 gcc_checking_assert (is_taskloop_ctx (ctx));
4874 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4875 ctx->outer->outer);
4876 if (is_global_var (ovar))
4877 x = ovar;
4879 if (!x)
4880 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4881 if (omp_is_reference (var))
4882 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4883 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4884 gimplify_and_add (x, stmt_list);
4886 c = OMP_CLAUSE_CHAIN (c);
4887 if (c == NULL && !par_clauses)
4889 /* If this was a workshare clause, see if it had been combined
4890 with its parallel. In that case, continue looking for the
4891 clauses also on the parallel statement itself. */
4892 if (is_parallel_ctx (ctx))
4893 break;
4895 ctx = ctx->outer;
4896 if (ctx == NULL || !is_parallel_ctx (ctx))
4897 break;
4899 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4900 OMP_CLAUSE_LASTPRIVATE);
4901 par_clauses = true;
4905 if (label)
4906 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4909 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4910 (which might be a placeholder). INNER is true if this is an inner
4911 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4912 join markers. Generate the before-loop forking sequence in
4913 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4914 general form of these sequences is
4916 GOACC_REDUCTION_SETUP
4917 GOACC_FORK
4918 GOACC_REDUCTION_INIT
4920 GOACC_REDUCTION_FINI
4921 GOACC_JOIN
4922 GOACC_REDUCTION_TEARDOWN. */
4924 static void
4925 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4926 gcall *fork, gcall *join, gimple_seq *fork_seq,
4927 gimple_seq *join_seq, omp_context *ctx)
4929 gimple_seq before_fork = NULL;
4930 gimple_seq after_fork = NULL;
4931 gimple_seq before_join = NULL;
4932 gimple_seq after_join = NULL;
4933 tree init_code = NULL_TREE, fini_code = NULL_TREE,
4934 setup_code = NULL_TREE, teardown_code = NULL_TREE;
4935 unsigned offset = 0;
4937 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4938 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4940 tree orig = OMP_CLAUSE_DECL (c);
4941 tree var = maybe_lookup_decl (orig, ctx);
4942 tree ref_to_res = NULL_TREE;
4943 tree incoming, outgoing, v1, v2, v3;
4944 bool is_private = false;
4946 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4947 if (rcode == MINUS_EXPR)
4948 rcode = PLUS_EXPR;
4949 else if (rcode == TRUTH_ANDIF_EXPR)
4950 rcode = BIT_AND_EXPR;
4951 else if (rcode == TRUTH_ORIF_EXPR)
4952 rcode = BIT_IOR_EXPR;
4953 tree op = build_int_cst (unsigned_type_node, rcode);
4955 if (!var)
4956 var = orig;
4958 incoming = outgoing = var;
4960 if (!inner)
4962 /* See if an outer construct also reduces this variable. */
4963 omp_context *outer = ctx;
4965 while (omp_context *probe = outer->outer)
4967 enum gimple_code type = gimple_code (probe->stmt);
4968 tree cls;
4970 switch (type)
4972 case GIMPLE_OMP_FOR:
4973 cls = gimple_omp_for_clauses (probe->stmt);
4974 break;
4976 case GIMPLE_OMP_TARGET:
4977 if (gimple_omp_target_kind (probe->stmt)
4978 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
4979 goto do_lookup;
4981 cls = gimple_omp_target_clauses (probe->stmt);
4982 break;
4984 default:
4985 goto do_lookup;
4988 outer = probe;
4989 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
4990 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4991 && orig == OMP_CLAUSE_DECL (cls))
4993 incoming = outgoing = lookup_decl (orig, probe);
4994 goto has_outer_reduction;
4996 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4997 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4998 && orig == OMP_CLAUSE_DECL (cls))
5000 is_private = true;
5001 goto do_lookup;
5005 do_lookup:
5006 /* This is the outermost construct with this reduction,
5007 see if there's a mapping for it. */
5008 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
5009 && maybe_lookup_field (orig, outer) && !is_private)
5011 ref_to_res = build_receiver_ref (orig, false, outer);
5012 if (omp_is_reference (orig))
5013 ref_to_res = build_simple_mem_ref (ref_to_res);
5015 tree type = TREE_TYPE (var);
5016 if (POINTER_TYPE_P (type))
5017 type = TREE_TYPE (type);
5019 outgoing = var;
5020 incoming = omp_reduction_init_op (loc, rcode, type);
5022 else
5024 /* Try to look at enclosing contexts for reduction var,
5025 use original if no mapping found. */
5026 tree t = NULL_TREE;
5027 omp_context *c = ctx->outer;
5028 while (c && !t)
5030 t = maybe_lookup_decl (orig, c);
5031 c = c->outer;
5033 incoming = outgoing = (t ? t : orig);
5036 has_outer_reduction:;
5039 if (!ref_to_res)
5040 ref_to_res = integer_zero_node;
5042 if (omp_is_reference (orig))
5044 tree type = TREE_TYPE (var);
5045 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
5047 if (!inner)
5049 tree x = create_tmp_var (TREE_TYPE (type), id);
5050 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
5053 v1 = create_tmp_var (type, id);
5054 v2 = create_tmp_var (type, id);
5055 v3 = create_tmp_var (type, id);
5057 gimplify_assign (v1, var, fork_seq);
5058 gimplify_assign (v2, var, fork_seq);
5059 gimplify_assign (v3, var, fork_seq);
5061 var = build_simple_mem_ref (var);
5062 v1 = build_simple_mem_ref (v1);
5063 v2 = build_simple_mem_ref (v2);
5064 v3 = build_simple_mem_ref (v3);
5065 outgoing = build_simple_mem_ref (outgoing);
5067 if (!TREE_CONSTANT (incoming))
5068 incoming = build_simple_mem_ref (incoming);
5070 else
5071 v1 = v2 = v3 = var;
5073 /* Determine position in reduction buffer, which may be used
5074 by target. The parser has ensured that this is not a
5075 variable-sized type. */
5076 fixed_size_mode mode
5077 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
5078 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
5079 offset = (offset + align - 1) & ~(align - 1);
5080 tree off = build_int_cst (sizetype, offset);
5081 offset += GET_MODE_SIZE (mode);
5083 if (!init_code)
5085 init_code = build_int_cst (integer_type_node,
5086 IFN_GOACC_REDUCTION_INIT);
5087 fini_code = build_int_cst (integer_type_node,
5088 IFN_GOACC_REDUCTION_FINI);
5089 setup_code = build_int_cst (integer_type_node,
5090 IFN_GOACC_REDUCTION_SETUP);
5091 teardown_code = build_int_cst (integer_type_node,
5092 IFN_GOACC_REDUCTION_TEARDOWN);
5095 tree setup_call
5096 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5097 TREE_TYPE (var), 6, setup_code,
5098 unshare_expr (ref_to_res),
5099 incoming, level, op, off);
5100 tree init_call
5101 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5102 TREE_TYPE (var), 6, init_code,
5103 unshare_expr (ref_to_res),
5104 v1, level, op, off);
5105 tree fini_call
5106 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5107 TREE_TYPE (var), 6, fini_code,
5108 unshare_expr (ref_to_res),
5109 v2, level, op, off);
5110 tree teardown_call
5111 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5112 TREE_TYPE (var), 6, teardown_code,
5113 ref_to_res, v3, level, op, off);
5115 gimplify_assign (v1, setup_call, &before_fork);
5116 gimplify_assign (v2, init_call, &after_fork);
5117 gimplify_assign (v3, fini_call, &before_join);
5118 gimplify_assign (outgoing, teardown_call, &after_join);
5121 /* Now stitch things together. */
5122 gimple_seq_add_seq (fork_seq, before_fork);
5123 if (fork)
5124 gimple_seq_add_stmt (fork_seq, fork);
5125 gimple_seq_add_seq (fork_seq, after_fork);
5127 gimple_seq_add_seq (join_seq, before_join);
5128 if (join)
5129 gimple_seq_add_stmt (join_seq, join);
5130 gimple_seq_add_seq (join_seq, after_join);
5133 /* Generate code to implement the REDUCTION clauses. */
5135 static void
5136 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5138 gimple_seq sub_seq = NULL;
5139 gimple *stmt;
5140 tree x, c;
5141 int count = 0;
5143 /* OpenACC loop reductions are handled elsewhere. */
5144 if (is_gimple_omp_oacc (ctx->stmt))
5145 return;
5147 /* SIMD reductions are handled in lower_rec_input_clauses. */
5148 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5149 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5150 return;
5152 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5153 update in that case, otherwise use a lock. */
5154 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5155 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5157 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5158 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5160 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5161 count = -1;
5162 break;
5164 count++;
5167 if (count == 0)
5168 return;
5170 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5172 tree var, ref, new_var, orig_var;
5173 enum tree_code code;
5174 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5176 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5177 continue;
5179 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
5180 orig_var = var = OMP_CLAUSE_DECL (c);
5181 if (TREE_CODE (var) == MEM_REF)
5183 var = TREE_OPERAND (var, 0);
5184 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5185 var = TREE_OPERAND (var, 0);
5186 if (TREE_CODE (var) == ADDR_EXPR)
5187 var = TREE_OPERAND (var, 0);
5188 else
5190 /* If this is a pointer or referenced based array
5191 section, the var could be private in the outer
5192 context e.g. on orphaned loop construct. Pretend this
5193 is private variable's outer reference. */
5194 ccode = OMP_CLAUSE_PRIVATE;
5195 if (TREE_CODE (var) == INDIRECT_REF)
5196 var = TREE_OPERAND (var, 0);
5198 orig_var = var;
5199 if (is_variable_sized (var))
5201 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5202 var = DECL_VALUE_EXPR (var);
5203 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5204 var = TREE_OPERAND (var, 0);
5205 gcc_assert (DECL_P (var));
5208 new_var = lookup_decl (var, ctx);
5209 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5210 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5211 ref = build_outer_var_ref (var, ctx, ccode);
5212 code = OMP_CLAUSE_REDUCTION_CODE (c);
5214 /* reduction(-:var) sums up the partial results, so it acts
5215 identically to reduction(+:var). */
5216 if (code == MINUS_EXPR)
5217 code = PLUS_EXPR;
5219 if (count == 1)
5221 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5223 addr = save_expr (addr);
5224 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5225 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5226 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5227 gimplify_and_add (x, stmt_seqp);
5228 return;
5230 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5232 tree d = OMP_CLAUSE_DECL (c);
5233 tree type = TREE_TYPE (d);
5234 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5235 tree i = create_tmp_var (TREE_TYPE (v), NULL);
5236 tree ptype = build_pointer_type (TREE_TYPE (type));
5237 tree bias = TREE_OPERAND (d, 1);
5238 d = TREE_OPERAND (d, 0);
5239 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5241 tree b = TREE_OPERAND (d, 1);
5242 b = maybe_lookup_decl (b, ctx);
5243 if (b == NULL)
5245 b = TREE_OPERAND (d, 1);
5246 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5248 if (integer_zerop (bias))
5249 bias = b;
5250 else
5252 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5253 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5254 TREE_TYPE (b), b, bias);
5256 d = TREE_OPERAND (d, 0);
5258 /* For ref build_outer_var_ref already performs this, so
5259 only new_var needs a dereference. */
5260 if (TREE_CODE (d) == INDIRECT_REF)
5262 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5263 gcc_assert (omp_is_reference (var) && var == orig_var);
5265 else if (TREE_CODE (d) == ADDR_EXPR)
5267 if (orig_var == var)
5269 new_var = build_fold_addr_expr (new_var);
5270 ref = build_fold_addr_expr (ref);
5273 else
5275 gcc_assert (orig_var == var);
5276 if (omp_is_reference (var))
5277 ref = build_fold_addr_expr (ref);
5279 if (DECL_P (v))
5281 tree t = maybe_lookup_decl (v, ctx);
5282 if (t)
5283 v = t;
5284 else
5285 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5286 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5288 if (!integer_zerop (bias))
5290 bias = fold_convert_loc (clause_loc, sizetype, bias);
5291 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5292 TREE_TYPE (new_var), new_var,
5293 unshare_expr (bias));
5294 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5295 TREE_TYPE (ref), ref, bias);
5297 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5298 ref = fold_convert_loc (clause_loc, ptype, ref);
5299 tree m = create_tmp_var (ptype, NULL);
5300 gimplify_assign (m, new_var, stmt_seqp);
5301 new_var = m;
5302 m = create_tmp_var (ptype, NULL);
5303 gimplify_assign (m, ref, stmt_seqp);
5304 ref = m;
5305 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5306 tree body = create_artificial_label (UNKNOWN_LOCATION);
5307 tree end = create_artificial_label (UNKNOWN_LOCATION);
5308 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5309 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5310 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5311 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5313 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5314 tree decl_placeholder
5315 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5316 SET_DECL_VALUE_EXPR (placeholder, out);
5317 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5318 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5319 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5320 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5321 gimple_seq_add_seq (&sub_seq,
5322 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5323 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5324 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5325 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5327 else
5329 x = build2 (code, TREE_TYPE (out), out, priv);
5330 out = unshare_expr (out);
5331 gimplify_assign (out, x, &sub_seq);
5333 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5334 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5335 gimple_seq_add_stmt (&sub_seq, g);
5336 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5337 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5338 gimple_seq_add_stmt (&sub_seq, g);
5339 g = gimple_build_assign (i, PLUS_EXPR, i,
5340 build_int_cst (TREE_TYPE (i), 1));
5341 gimple_seq_add_stmt (&sub_seq, g);
5342 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5343 gimple_seq_add_stmt (&sub_seq, g);
5344 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5346 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5348 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5350 if (omp_is_reference (var)
5351 && !useless_type_conversion_p (TREE_TYPE (placeholder),
5352 TREE_TYPE (ref)))
5353 ref = build_fold_addr_expr_loc (clause_loc, ref);
5354 SET_DECL_VALUE_EXPR (placeholder, ref);
5355 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5356 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5357 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5358 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5359 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5361 else
5363 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5364 ref = build_outer_var_ref (var, ctx);
5365 gimplify_assign (ref, x, &sub_seq);
5369 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5371 gimple_seq_add_stmt (stmt_seqp, stmt);
5373 gimple_seq_add_seq (stmt_seqp, sub_seq);
5375 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5377 gimple_seq_add_stmt (stmt_seqp, stmt);
5381 /* Generate code to implement the COPYPRIVATE clauses. */
5383 static void
5384 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5385 omp_context *ctx)
5387 tree c;
5389 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5391 tree var, new_var, ref, x;
5392 bool by_ref;
5393 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5395 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5396 continue;
5398 var = OMP_CLAUSE_DECL (c);
5399 by_ref = use_pointer_for_field (var, NULL);
5401 ref = build_sender_ref (var, ctx);
5402 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5403 if (by_ref)
5405 x = build_fold_addr_expr_loc (clause_loc, new_var);
5406 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5408 gimplify_assign (ref, x, slist);
5410 ref = build_receiver_ref (var, false, ctx);
5411 if (by_ref)
5413 ref = fold_convert_loc (clause_loc,
5414 build_pointer_type (TREE_TYPE (new_var)),
5415 ref);
5416 ref = build_fold_indirect_ref_loc (clause_loc, ref);
5418 if (omp_is_reference (var))
5420 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5421 ref = build_simple_mem_ref_loc (clause_loc, ref);
5422 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5424 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5425 gimplify_and_add (x, rlist);
5430 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5431 and REDUCTION from the sender (aka parent) side. */
5433 static void
5434 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5435 omp_context *ctx)
5437 tree c, t;
5438 int ignored_looptemp = 0;
5439 bool is_taskloop = false;
5441 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5442 by GOMP_taskloop. */
5443 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5445 ignored_looptemp = 2;
5446 is_taskloop = true;
5449 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5451 tree val, ref, x, var;
5452 bool by_ref, do_in = false, do_out = false;
5453 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5455 switch (OMP_CLAUSE_CODE (c))
5457 case OMP_CLAUSE_PRIVATE:
5458 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5459 break;
5460 continue;
5461 case OMP_CLAUSE_FIRSTPRIVATE:
5462 case OMP_CLAUSE_COPYIN:
5463 case OMP_CLAUSE_LASTPRIVATE:
5464 case OMP_CLAUSE_REDUCTION:
5465 break;
5466 case OMP_CLAUSE_SHARED:
5467 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5468 break;
5469 continue;
5470 case OMP_CLAUSE__LOOPTEMP_:
5471 if (ignored_looptemp)
5473 ignored_looptemp--;
5474 continue;
5476 break;
5477 default:
5478 continue;
5481 val = OMP_CLAUSE_DECL (c);
5482 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5483 && TREE_CODE (val) == MEM_REF)
5485 val = TREE_OPERAND (val, 0);
5486 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5487 val = TREE_OPERAND (val, 0);
5488 if (TREE_CODE (val) == INDIRECT_REF
5489 || TREE_CODE (val) == ADDR_EXPR)
5490 val = TREE_OPERAND (val, 0);
5491 if (is_variable_sized (val))
5492 continue;
5495 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5496 outer taskloop region. */
5497 omp_context *ctx_for_o = ctx;
5498 if (is_taskloop
5499 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5500 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5501 ctx_for_o = ctx->outer;
5503 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5505 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5506 && is_global_var (var))
5507 continue;
5509 t = omp_member_access_dummy_var (var);
5510 if (t)
5512 var = DECL_VALUE_EXPR (var);
5513 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5514 if (o != t)
5515 var = unshare_and_remap (var, t, o);
5516 else
5517 var = unshare_expr (var);
5520 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5522 /* Handle taskloop firstprivate/lastprivate, where the
5523 lastprivate on GIMPLE_OMP_TASK is represented as
5524 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5525 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5526 x = omp_build_component_ref (ctx->sender_decl, f);
5527 if (use_pointer_for_field (val, ctx))
5528 var = build_fold_addr_expr (var);
5529 gimplify_assign (x, var, ilist);
5530 DECL_ABSTRACT_ORIGIN (f) = NULL;
5531 continue;
5534 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5535 || val == OMP_CLAUSE_DECL (c))
5536 && is_variable_sized (val))
5537 continue;
5538 by_ref = use_pointer_for_field (val, NULL);
5540 switch (OMP_CLAUSE_CODE (c))
5542 case OMP_CLAUSE_FIRSTPRIVATE:
5543 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5544 && !by_ref
5545 && is_task_ctx (ctx))
5546 TREE_NO_WARNING (var) = 1;
5547 do_in = true;
5548 break;
5550 case OMP_CLAUSE_PRIVATE:
5551 case OMP_CLAUSE_COPYIN:
5552 case OMP_CLAUSE__LOOPTEMP_:
5553 do_in = true;
5554 break;
5556 case OMP_CLAUSE_LASTPRIVATE:
5557 if (by_ref || omp_is_reference (val))
5559 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5560 continue;
5561 do_in = true;
5563 else
5565 do_out = true;
5566 if (lang_hooks.decls.omp_private_outer_ref (val))
5567 do_in = true;
5569 break;
5571 case OMP_CLAUSE_REDUCTION:
5572 do_in = true;
5573 if (val == OMP_CLAUSE_DECL (c))
5574 do_out = !(by_ref || omp_is_reference (val));
5575 else
5576 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5577 break;
5579 default:
5580 gcc_unreachable ();
5583 if (do_in)
5585 ref = build_sender_ref (val, ctx);
5586 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5587 gimplify_assign (ref, x, ilist);
5588 if (is_task_ctx (ctx))
5589 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5592 if (do_out)
5594 ref = build_sender_ref (val, ctx);
5595 gimplify_assign (var, ref, olist);
5600 /* Generate code to implement SHARED from the sender (aka parent)
5601 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5602 list things that got automatically shared. */
5604 static void
5605 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5607 tree var, ovar, nvar, t, f, x, record_type;
5609 if (ctx->record_type == NULL)
5610 return;
5612 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5613 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5615 ovar = DECL_ABSTRACT_ORIGIN (f);
5616 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5617 continue;
5619 nvar = maybe_lookup_decl (ovar, ctx);
5620 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5621 continue;
5623 /* If CTX is a nested parallel directive. Find the immediately
5624 enclosing parallel or workshare construct that contains a
5625 mapping for OVAR. */
5626 var = lookup_decl_in_outer_ctx (ovar, ctx);
5628 t = omp_member_access_dummy_var (var);
5629 if (t)
5631 var = DECL_VALUE_EXPR (var);
5632 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5633 if (o != t)
5634 var = unshare_and_remap (var, t, o);
5635 else
5636 var = unshare_expr (var);
5639 if (use_pointer_for_field (ovar, ctx))
5641 x = build_sender_ref (ovar, ctx);
5642 var = build_fold_addr_expr (var);
5643 gimplify_assign (x, var, ilist);
5645 else
5647 x = build_sender_ref (ovar, ctx);
5648 gimplify_assign (x, var, ilist);
5650 if (!TREE_READONLY (var)
5651 /* We don't need to receive a new reference to a result
5652 or parm decl. In fact we may not store to it as we will
5653 invalidate any pending RSO and generate wrong gimple
5654 during inlining. */
5655 && !((TREE_CODE (var) == RESULT_DECL
5656 || TREE_CODE (var) == PARM_DECL)
5657 && DECL_BY_REFERENCE (var)))
5659 x = build_sender_ref (ovar, ctx);
5660 gimplify_assign (var, x, olist);
5666 /* Emit an OpenACC head marker call, encapulating the partitioning and
5667 other information that must be processed by the target compiler.
5668 Return the maximum number of dimensions the associated loop might
5669 be partitioned over. */
5671 static unsigned
5672 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5673 gimple_seq *seq, omp_context *ctx)
5675 unsigned levels = 0;
5676 unsigned tag = 0;
5677 tree gang_static = NULL_TREE;
5678 auto_vec<tree, 5> args;
5680 args.quick_push (build_int_cst
5681 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5682 args.quick_push (ddvar);
5683 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5685 switch (OMP_CLAUSE_CODE (c))
5687 case OMP_CLAUSE_GANG:
5688 tag |= OLF_DIM_GANG;
5689 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5690 /* static:* is represented by -1, and we can ignore it, as
5691 scheduling is always static. */
5692 if (gang_static && integer_minus_onep (gang_static))
5693 gang_static = NULL_TREE;
5694 levels++;
5695 break;
5697 case OMP_CLAUSE_WORKER:
5698 tag |= OLF_DIM_WORKER;
5699 levels++;
5700 break;
5702 case OMP_CLAUSE_VECTOR:
5703 tag |= OLF_DIM_VECTOR;
5704 levels++;
5705 break;
5707 case OMP_CLAUSE_SEQ:
5708 tag |= OLF_SEQ;
5709 break;
5711 case OMP_CLAUSE_AUTO:
5712 tag |= OLF_AUTO;
5713 break;
5715 case OMP_CLAUSE_INDEPENDENT:
5716 tag |= OLF_INDEPENDENT;
5717 break;
5719 case OMP_CLAUSE_TILE:
5720 tag |= OLF_TILE;
5721 break;
5723 default:
5724 continue;
5728 if (gang_static)
5730 if (DECL_P (gang_static))
5731 gang_static = build_outer_var_ref (gang_static, ctx);
5732 tag |= OLF_GANG_STATIC;
5735 /* In a parallel region, loops are implicitly INDEPENDENT. */
5736 omp_context *tgt = enclosing_target_ctx (ctx);
5737 if (!tgt || is_oacc_parallel (tgt))
5738 tag |= OLF_INDEPENDENT;
5740 if (tag & OLF_TILE)
5741 /* Tiling could use all 3 levels. */
5742 levels = 3;
5743 else
5745 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5746 Ensure at least one level, or 2 for possible auto
5747 partitioning */
5748 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5749 << OLF_DIM_BASE) | OLF_SEQ));
5751 if (levels < 1u + maybe_auto)
5752 levels = 1u + maybe_auto;
5755 args.quick_push (build_int_cst (integer_type_node, levels));
5756 args.quick_push (build_int_cst (integer_type_node, tag));
5757 if (gang_static)
5758 args.quick_push (gang_static);
5760 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5761 gimple_set_location (call, loc);
5762 gimple_set_lhs (call, ddvar);
5763 gimple_seq_add_stmt (seq, call);
5765 return levels;
5768 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5769 partitioning level of the enclosed region. */
5771 static void
5772 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5773 tree tofollow, gimple_seq *seq)
5775 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5776 : IFN_UNIQUE_OACC_TAIL_MARK);
5777 tree marker = build_int_cst (integer_type_node, marker_kind);
5778 int nargs = 2 + (tofollow != NULL_TREE);
5779 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5780 marker, ddvar, tofollow);
5781 gimple_set_location (call, loc);
5782 gimple_set_lhs (call, ddvar);
5783 gimple_seq_add_stmt (seq, call);
5786 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5787 the loop clauses, from which we extract reductions. Initialize
5788 HEAD and TAIL. */
5790 static void
5791 lower_oacc_head_tail (location_t loc, tree clauses,
5792 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5794 bool inner = false;
5795 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5796 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5798 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5799 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5800 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5802 gcc_assert (count);
5803 for (unsigned done = 1; count; count--, done++)
5805 gimple_seq fork_seq = NULL;
5806 gimple_seq join_seq = NULL;
5808 tree place = build_int_cst (integer_type_node, -1);
5809 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5810 fork_kind, ddvar, place);
5811 gimple_set_location (fork, loc);
5812 gimple_set_lhs (fork, ddvar);
5814 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5815 join_kind, ddvar, place);
5816 gimple_set_location (join, loc);
5817 gimple_set_lhs (join, ddvar);
5819 /* Mark the beginning of this level sequence. */
5820 if (inner)
5821 lower_oacc_loop_marker (loc, ddvar, true,
5822 build_int_cst (integer_type_node, count),
5823 &fork_seq);
5824 lower_oacc_loop_marker (loc, ddvar, false,
5825 build_int_cst (integer_type_node, done),
5826 &join_seq);
5828 lower_oacc_reductions (loc, clauses, place, inner,
5829 fork, join, &fork_seq, &join_seq, ctx);
5831 /* Append this level to head. */
5832 gimple_seq_add_seq (head, fork_seq);
5833 /* Prepend it to tail. */
5834 gimple_seq_add_seq (&join_seq, *tail);
5835 *tail = join_seq;
5837 inner = true;
5840 /* Mark the end of the sequence. */
5841 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5842 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5845 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5846 catch handler and return it. This prevents programs from violating the
5847 structured block semantics with throws. */
5849 static gimple_seq
5850 maybe_catch_exception (gimple_seq body)
5852 gimple *g;
5853 tree decl;
5855 if (!flag_exceptions)
5856 return body;
5858 if (lang_hooks.eh_protect_cleanup_actions != NULL)
5859 decl = lang_hooks.eh_protect_cleanup_actions ();
5860 else
5861 decl = builtin_decl_explicit (BUILT_IN_TRAP);
5863 g = gimple_build_eh_must_not_throw (decl);
5864 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5865 GIMPLE_TRY_CATCH);
5867 return gimple_seq_alloc_with_stmt (g);
5871 /* Routines to lower OMP directives into OMP-GIMPLE. */
5873 /* If ctx is a worksharing context inside of a cancellable parallel
5874 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5875 and conditional branch to parallel's cancel_label to handle
5876 cancellation in the implicit barrier. */
5878 static void
5879 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5881 gimple *omp_return = gimple_seq_last_stmt (*body);
5882 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5883 if (gimple_omp_return_nowait_p (omp_return))
5884 return;
5885 if (ctx->outer
5886 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5887 && ctx->outer->cancellable)
5889 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5890 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5891 tree lhs = create_tmp_var (c_bool_type);
5892 gimple_omp_return_set_lhs (omp_return, lhs);
5893 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5894 gimple *g = gimple_build_cond (NE_EXPR, lhs,
5895 fold_convert (c_bool_type,
5896 boolean_false_node),
5897 ctx->outer->cancel_label, fallthru_label);
5898 gimple_seq_add_stmt (body, g);
5899 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5903 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5904 CTX is the enclosing OMP context for the current statement. */
5906 static void
5907 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5909 tree block, control;
5910 gimple_stmt_iterator tgsi;
5911 gomp_sections *stmt;
5912 gimple *t;
5913 gbind *new_stmt, *bind;
5914 gimple_seq ilist, dlist, olist, new_body;
5916 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5918 push_gimplify_context ();
5920 dlist = NULL;
5921 ilist = NULL;
5922 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5923 &ilist, &dlist, ctx, NULL);
5925 new_body = gimple_omp_body (stmt);
5926 gimple_omp_set_body (stmt, NULL);
5927 tgsi = gsi_start (new_body);
5928 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5930 omp_context *sctx;
5931 gimple *sec_start;
5933 sec_start = gsi_stmt (tgsi);
5934 sctx = maybe_lookup_ctx (sec_start);
5935 gcc_assert (sctx);
5937 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5938 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5939 GSI_CONTINUE_LINKING);
5940 gimple_omp_set_body (sec_start, NULL);
5942 if (gsi_one_before_end_p (tgsi))
5944 gimple_seq l = NULL;
5945 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5946 &l, ctx);
5947 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5948 gimple_omp_section_set_last (sec_start);
5951 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5952 GSI_CONTINUE_LINKING);
5955 block = make_node (BLOCK);
5956 bind = gimple_build_bind (NULL, new_body, block);
5958 olist = NULL;
5959 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
5961 block = make_node (BLOCK);
5962 new_stmt = gimple_build_bind (NULL, NULL, block);
5963 gsi_replace (gsi_p, new_stmt, true);
5965 pop_gimplify_context (new_stmt);
5966 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5967 BLOCK_VARS (block) = gimple_bind_vars (bind);
5968 if (BLOCK_VARS (block))
5969 TREE_USED (block) = 1;
5971 new_body = NULL;
5972 gimple_seq_add_seq (&new_body, ilist);
5973 gimple_seq_add_stmt (&new_body, stmt);
5974 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5975 gimple_seq_add_stmt (&new_body, bind);
5977 control = create_tmp_var (unsigned_type_node, ".section");
5978 t = gimple_build_omp_continue (control, control);
5979 gimple_omp_sections_set_control (stmt, control);
5980 gimple_seq_add_stmt (&new_body, t);
5982 gimple_seq_add_seq (&new_body, olist);
5983 if (ctx->cancellable)
5984 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5985 gimple_seq_add_seq (&new_body, dlist);
5987 new_body = maybe_catch_exception (new_body);
5989 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5990 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5991 t = gimple_build_omp_return (nowait);
5992 gimple_seq_add_stmt (&new_body, t);
5993 maybe_add_implicit_barrier_cancel (ctx, &new_body);
5995 gimple_bind_set_body (new_stmt, new_body);
5999 /* A subroutine of lower_omp_single. Expand the simple form of
6000 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
6002 if (GOMP_single_start ())
6003 BODY;
6004 [ GOMP_barrier (); ] -> unless 'nowait' is present.
6006 FIXME. It may be better to delay expanding the logic of this until
6007 pass_expand_omp. The expanded logic may make the job more difficult
6008 to a synchronization analysis pass. */
6010 static void
6011 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
6013 location_t loc = gimple_location (single_stmt);
6014 tree tlabel = create_artificial_label (loc);
6015 tree flabel = create_artificial_label (loc);
6016 gimple *call, *cond;
6017 tree lhs, decl;
6019 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
6020 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
6021 call = gimple_build_call (decl, 0);
6022 gimple_call_set_lhs (call, lhs);
6023 gimple_seq_add_stmt (pre_p, call);
6025 cond = gimple_build_cond (EQ_EXPR, lhs,
6026 fold_convert_loc (loc, TREE_TYPE (lhs),
6027 boolean_true_node),
6028 tlabel, flabel);
6029 gimple_seq_add_stmt (pre_p, cond);
6030 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
6031 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6032 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
6036 /* A subroutine of lower_omp_single. Expand the simple form of
6037 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
6039 #pragma omp single copyprivate (a, b, c)
6041 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
6044 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6046 BODY;
6047 copyout.a = a;
6048 copyout.b = b;
6049 copyout.c = c;
6050 GOMP_single_copy_end (&copyout);
6052 else
6054 a = copyout_p->a;
6055 b = copyout_p->b;
6056 c = copyout_p->c;
6058 GOMP_barrier ();
6061 FIXME. It may be better to delay expanding the logic of this until
6062 pass_expand_omp. The expanded logic may make the job more difficult
6063 to a synchronization analysis pass. */
6065 static void
6066 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
6067 omp_context *ctx)
6069 tree ptr_type, t, l0, l1, l2, bfn_decl;
6070 gimple_seq copyin_seq;
6071 location_t loc = gimple_location (single_stmt);
6073 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
6075 ptr_type = build_pointer_type (ctx->record_type);
6076 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
6078 l0 = create_artificial_label (loc);
6079 l1 = create_artificial_label (loc);
6080 l2 = create_artificial_label (loc);
6082 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6083 t = build_call_expr_loc (loc, bfn_decl, 0);
6084 t = fold_convert_loc (loc, ptr_type, t);
6085 gimplify_assign (ctx->receiver_decl, t, pre_p);
6087 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6088 build_int_cst (ptr_type, 0));
6089 t = build3 (COND_EXPR, void_type_node, t,
6090 build_and_jump (&l0), build_and_jump (&l1));
6091 gimplify_and_add (t, pre_p);
6093 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6095 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6097 copyin_seq = NULL;
6098 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6099 &copyin_seq, ctx);
6101 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6102 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6103 t = build_call_expr_loc (loc, bfn_decl, 1, t);
6104 gimplify_and_add (t, pre_p);
6106 t = build_and_jump (&l2);
6107 gimplify_and_add (t, pre_p);
6109 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6111 gimple_seq_add_seq (pre_p, copyin_seq);
6113 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6117 /* Expand code for an OpenMP single directive. */
6119 static void
6120 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6122 tree block;
6123 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6124 gbind *bind;
6125 gimple_seq bind_body, bind_body_tail = NULL, dlist;
6127 push_gimplify_context ();
6129 block = make_node (BLOCK);
6130 bind = gimple_build_bind (NULL, NULL, block);
6131 gsi_replace (gsi_p, bind, true);
6132 bind_body = NULL;
6133 dlist = NULL;
6134 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6135 &bind_body, &dlist, ctx, NULL);
6136 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6138 gimple_seq_add_stmt (&bind_body, single_stmt);
6140 if (ctx->record_type)
6141 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6142 else
6143 lower_omp_single_simple (single_stmt, &bind_body);
6145 gimple_omp_set_body (single_stmt, NULL);
6147 gimple_seq_add_seq (&bind_body, dlist);
6149 bind_body = maybe_catch_exception (bind_body);
6151 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6152 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6153 gimple *g = gimple_build_omp_return (nowait);
6154 gimple_seq_add_stmt (&bind_body_tail, g);
6155 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6156 if (ctx->record_type)
6158 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6159 tree clobber = build_constructor (ctx->record_type, NULL);
6160 TREE_THIS_VOLATILE (clobber) = 1;
6161 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6162 clobber), GSI_SAME_STMT);
6164 gimple_seq_add_seq (&bind_body, bind_body_tail);
6165 gimple_bind_set_body (bind, bind_body);
6167 pop_gimplify_context (bind);
6169 gimple_bind_append_vars (bind, ctx->block_vars);
6170 BLOCK_VARS (block) = ctx->block_vars;
6171 if (BLOCK_VARS (block))
6172 TREE_USED (block) = 1;
6176 /* Expand code for an OpenMP master directive. */
6178 static void
6179 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6181 tree block, lab = NULL, x, bfn_decl;
6182 gimple *stmt = gsi_stmt (*gsi_p);
6183 gbind *bind;
6184 location_t loc = gimple_location (stmt);
6185 gimple_seq tseq;
6187 push_gimplify_context ();
6189 block = make_node (BLOCK);
6190 bind = gimple_build_bind (NULL, NULL, block);
6191 gsi_replace (gsi_p, bind, true);
6192 gimple_bind_add_stmt (bind, stmt);
6194 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6195 x = build_call_expr_loc (loc, bfn_decl, 0);
6196 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6197 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6198 tseq = NULL;
6199 gimplify_and_add (x, &tseq);
6200 gimple_bind_add_seq (bind, tseq);
6202 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6203 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6204 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6205 gimple_omp_set_body (stmt, NULL);
6207 gimple_bind_add_stmt (bind, gimple_build_label (lab));
6209 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6211 pop_gimplify_context (bind);
6213 gimple_bind_append_vars (bind, ctx->block_vars);
6214 BLOCK_VARS (block) = ctx->block_vars;
6218 /* Expand code for an OpenMP taskgroup directive. */
6220 static void
6221 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6223 gimple *stmt = gsi_stmt (*gsi_p);
6224 gcall *x;
6225 gbind *bind;
6226 tree block = make_node (BLOCK);
6228 bind = gimple_build_bind (NULL, NULL, block);
6229 gsi_replace (gsi_p, bind, true);
6230 gimple_bind_add_stmt (bind, stmt);
6232 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6234 gimple_bind_add_stmt (bind, x);
6236 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6237 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6238 gimple_omp_set_body (stmt, NULL);
6240 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6242 gimple_bind_append_vars (bind, ctx->block_vars);
6243 BLOCK_VARS (block) = ctx->block_vars;
6247 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6249 static void
6250 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6251 omp_context *ctx)
6253 struct omp_for_data fd;
6254 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6255 return;
6257 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6258 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6259 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6260 if (!fd.ordered)
6261 return;
6263 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6264 tree c = gimple_omp_ordered_clauses (ord_stmt);
6265 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6266 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6268 /* Merge depend clauses from multiple adjacent
6269 #pragma omp ordered depend(sink:...) constructs
6270 into one #pragma omp ordered depend(sink:...), so that
6271 we can optimize them together. */
6272 gimple_stmt_iterator gsi = *gsi_p;
6273 gsi_next (&gsi);
6274 while (!gsi_end_p (gsi))
6276 gimple *stmt = gsi_stmt (gsi);
6277 if (is_gimple_debug (stmt)
6278 || gimple_code (stmt) == GIMPLE_NOP)
6280 gsi_next (&gsi);
6281 continue;
6283 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6284 break;
6285 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6286 c = gimple_omp_ordered_clauses (ord_stmt2);
6287 if (c == NULL_TREE
6288 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6289 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6290 break;
6291 while (*list_p)
6292 list_p = &OMP_CLAUSE_CHAIN (*list_p);
6293 *list_p = c;
6294 gsi_remove (&gsi, true);
6298 /* Canonicalize sink dependence clauses into one folded clause if
6299 possible.
6301 The basic algorithm is to create a sink vector whose first
6302 element is the GCD of all the first elements, and whose remaining
6303 elements are the minimum of the subsequent columns.
6305 We ignore dependence vectors whose first element is zero because
6306 such dependencies are known to be executed by the same thread.
6308 We take into account the direction of the loop, so a minimum
6309 becomes a maximum if the loop is iterating forwards. We also
6310 ignore sink clauses where the loop direction is unknown, or where
6311 the offsets are clearly invalid because they are not a multiple
6312 of the loop increment.
6314 For example:
6316 #pragma omp for ordered(2)
6317 for (i=0; i < N; ++i)
6318 for (j=0; j < M; ++j)
6320 #pragma omp ordered \
6321 depend(sink:i-8,j-2) \
6322 depend(sink:i,j-1) \ // Completely ignored because i+0.
6323 depend(sink:i-4,j-3) \
6324 depend(sink:i-6,j-4)
6325 #pragma omp ordered depend(source)
6328 Folded clause is:
6330 depend(sink:-gcd(8,4,6),-min(2,3,4))
6331 -or-
6332 depend(sink:-2,-2)
6335 /* FIXME: Computing GCD's where the first element is zero is
6336 non-trivial in the presence of collapsed loops. Do this later. */
6337 if (fd.collapse > 1)
6338 return;
6340 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6342 /* wide_int is not a POD so it must be default-constructed. */
6343 for (unsigned i = 0; i != 2 * len - 1; ++i)
6344 new (static_cast<void*>(folded_deps + i)) wide_int ();
6346 tree folded_dep = NULL_TREE;
6347 /* TRUE if the first dimension's offset is negative. */
6348 bool neg_offset_p = false;
6350 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6351 unsigned int i;
6352 while ((c = *list_p) != NULL)
6354 bool remove = false;
6356 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6357 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6358 goto next_ordered_clause;
6360 tree vec;
6361 for (vec = OMP_CLAUSE_DECL (c), i = 0;
6362 vec && TREE_CODE (vec) == TREE_LIST;
6363 vec = TREE_CHAIN (vec), ++i)
6365 gcc_assert (i < len);
6367 /* omp_extract_for_data has canonicalized the condition. */
6368 gcc_assert (fd.loops[i].cond_code == LT_EXPR
6369 || fd.loops[i].cond_code == GT_EXPR);
6370 bool forward = fd.loops[i].cond_code == LT_EXPR;
6371 bool maybe_lexically_later = true;
6373 /* While the committee makes up its mind, bail if we have any
6374 non-constant steps. */
6375 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6376 goto lower_omp_ordered_ret;
6378 tree itype = TREE_TYPE (TREE_VALUE (vec));
6379 if (POINTER_TYPE_P (itype))
6380 itype = sizetype;
6381 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
6382 TYPE_PRECISION (itype),
6383 TYPE_SIGN (itype));
6385 /* Ignore invalid offsets that are not multiples of the step. */
6386 if (!wi::multiple_of_p (wi::abs (offset),
6387 wi::abs (wi::to_wide (fd.loops[i].step)),
6388 UNSIGNED))
6390 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6391 "ignoring sink clause with offset that is not "
6392 "a multiple of the loop step");
6393 remove = true;
6394 goto next_ordered_clause;
6397 /* Calculate the first dimension. The first dimension of
6398 the folded dependency vector is the GCD of the first
6399 elements, while ignoring any first elements whose offset
6400 is 0. */
6401 if (i == 0)
6403 /* Ignore dependence vectors whose first dimension is 0. */
6404 if (offset == 0)
6406 remove = true;
6407 goto next_ordered_clause;
6409 else
6411 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6413 error_at (OMP_CLAUSE_LOCATION (c),
6414 "first offset must be in opposite direction "
6415 "of loop iterations");
6416 goto lower_omp_ordered_ret;
6418 if (forward)
6419 offset = -offset;
6420 neg_offset_p = forward;
6421 /* Initialize the first time around. */
6422 if (folded_dep == NULL_TREE)
6424 folded_dep = c;
6425 folded_deps[0] = offset;
6427 else
6428 folded_deps[0] = wi::gcd (folded_deps[0],
6429 offset, UNSIGNED);
6432 /* Calculate minimum for the remaining dimensions. */
6433 else
6435 folded_deps[len + i - 1] = offset;
6436 if (folded_dep == c)
6437 folded_deps[i] = offset;
6438 else if (maybe_lexically_later
6439 && !wi::eq_p (folded_deps[i], offset))
6441 if (forward ^ wi::gts_p (folded_deps[i], offset))
6443 unsigned int j;
6444 folded_dep = c;
6445 for (j = 1; j <= i; j++)
6446 folded_deps[j] = folded_deps[len + j - 1];
6448 else
6449 maybe_lexically_later = false;
6453 gcc_assert (i == len);
6455 remove = true;
6457 next_ordered_clause:
6458 if (remove)
6459 *list_p = OMP_CLAUSE_CHAIN (c);
6460 else
6461 list_p = &OMP_CLAUSE_CHAIN (c);
6464 if (folded_dep)
6466 if (neg_offset_p)
6467 folded_deps[0] = -folded_deps[0];
6469 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6470 if (POINTER_TYPE_P (itype))
6471 itype = sizetype;
6473 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6474 = wide_int_to_tree (itype, folded_deps[0]);
6475 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6476 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6479 lower_omp_ordered_ret:
6481 /* Ordered without clauses is #pragma omp threads, while we want
6482 a nop instead if we remove all clauses. */
6483 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6484 gsi_replace (gsi_p, gimple_build_nop (), true);
6488 /* Expand code for an OpenMP ordered directive. */
6490 static void
6491 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6493 tree block;
6494 gimple *stmt = gsi_stmt (*gsi_p), *g;
6495 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6496 gcall *x;
6497 gbind *bind;
6498 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6499 OMP_CLAUSE_SIMD);
6500 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6501 loop. */
6502 bool maybe_simt
6503 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6504 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6505 OMP_CLAUSE_THREADS);
6507 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6508 OMP_CLAUSE_DEPEND))
6510 /* FIXME: This is needs to be moved to the expansion to verify various
6511 conditions only testable on cfg with dominators computed, and also
6512 all the depend clauses to be merged still might need to be available
6513 for the runtime checks. */
6514 if (0)
6515 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6516 return;
6519 push_gimplify_context ();
6521 block = make_node (BLOCK);
6522 bind = gimple_build_bind (NULL, NULL, block);
6523 gsi_replace (gsi_p, bind, true);
6524 gimple_bind_add_stmt (bind, stmt);
6526 if (simd)
6528 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6529 build_int_cst (NULL_TREE, threads));
6530 cfun->has_simduid_loops = true;
6532 else
6533 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6535 gimple_bind_add_stmt (bind, x);
6537 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6538 if (maybe_simt)
6540 counter = create_tmp_var (integer_type_node);
6541 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6542 gimple_call_set_lhs (g, counter);
6543 gimple_bind_add_stmt (bind, g);
6545 body = create_artificial_label (UNKNOWN_LOCATION);
6546 test = create_artificial_label (UNKNOWN_LOCATION);
6547 gimple_bind_add_stmt (bind, gimple_build_label (body));
6549 tree simt_pred = create_tmp_var (integer_type_node);
6550 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6551 gimple_call_set_lhs (g, simt_pred);
6552 gimple_bind_add_stmt (bind, g);
6554 tree t = create_artificial_label (UNKNOWN_LOCATION);
6555 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6556 gimple_bind_add_stmt (bind, g);
6558 gimple_bind_add_stmt (bind, gimple_build_label (t));
6560 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6561 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6562 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6563 gimple_omp_set_body (stmt, NULL);
6565 if (maybe_simt)
6567 gimple_bind_add_stmt (bind, gimple_build_label (test));
6568 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6569 gimple_bind_add_stmt (bind, g);
6571 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6572 tree nonneg = create_tmp_var (integer_type_node);
6573 gimple_seq tseq = NULL;
6574 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6575 gimple_bind_add_seq (bind, tseq);
6577 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6578 gimple_call_set_lhs (g, nonneg);
6579 gimple_bind_add_stmt (bind, g);
6581 tree end = create_artificial_label (UNKNOWN_LOCATION);
6582 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6583 gimple_bind_add_stmt (bind, g);
6585 gimple_bind_add_stmt (bind, gimple_build_label (end));
6587 if (simd)
6588 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6589 build_int_cst (NULL_TREE, threads));
6590 else
6591 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6593 gimple_bind_add_stmt (bind, x);
6595 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6597 pop_gimplify_context (bind);
6599 gimple_bind_append_vars (bind, ctx->block_vars);
6600 BLOCK_VARS (block) = gimple_bind_vars (bind);
6604 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6605 substitution of a couple of function calls. But in the NAMED case,
6606 requires that languages coordinate a symbol name. It is therefore
6607 best put here in common code. */
6609 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6611 static void
6612 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6614 tree block;
6615 tree name, lock, unlock;
6616 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6617 gbind *bind;
6618 location_t loc = gimple_location (stmt);
6619 gimple_seq tbody;
6621 name = gimple_omp_critical_name (stmt);
6622 if (name)
6624 tree decl;
6626 if (!critical_name_mutexes)
6627 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6629 tree *n = critical_name_mutexes->get (name);
6630 if (n == NULL)
6632 char *new_str;
6634 decl = create_tmp_var_raw (ptr_type_node);
6636 new_str = ACONCAT ((".gomp_critical_user_",
6637 IDENTIFIER_POINTER (name), NULL));
6638 DECL_NAME (decl) = get_identifier (new_str);
6639 TREE_PUBLIC (decl) = 1;
6640 TREE_STATIC (decl) = 1;
6641 DECL_COMMON (decl) = 1;
6642 DECL_ARTIFICIAL (decl) = 1;
6643 DECL_IGNORED_P (decl) = 1;
6645 varpool_node::finalize_decl (decl);
6647 critical_name_mutexes->put (name, decl);
6649 else
6650 decl = *n;
6652 /* If '#pragma omp critical' is inside offloaded region or
6653 inside function marked as offloadable, the symbol must be
6654 marked as offloadable too. */
6655 omp_context *octx;
6656 if (cgraph_node::get (current_function_decl)->offloadable)
6657 varpool_node::get_create (decl)->offloadable = 1;
6658 else
6659 for (octx = ctx->outer; octx; octx = octx->outer)
6660 if (is_gimple_omp_offloaded (octx->stmt))
6662 varpool_node::get_create (decl)->offloadable = 1;
6663 break;
6666 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6667 lock = build_call_expr_loc (loc, lock, 1,
6668 build_fold_addr_expr_loc (loc, decl));
6670 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6671 unlock = build_call_expr_loc (loc, unlock, 1,
6672 build_fold_addr_expr_loc (loc, decl));
6674 else
6676 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6677 lock = build_call_expr_loc (loc, lock, 0);
6679 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6680 unlock = build_call_expr_loc (loc, unlock, 0);
6683 push_gimplify_context ();
6685 block = make_node (BLOCK);
6686 bind = gimple_build_bind (NULL, NULL, block);
6687 gsi_replace (gsi_p, bind, true);
6688 gimple_bind_add_stmt (bind, stmt);
6690 tbody = gimple_bind_body (bind);
6691 gimplify_and_add (lock, &tbody);
6692 gimple_bind_set_body (bind, tbody);
6694 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6695 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6696 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6697 gimple_omp_set_body (stmt, NULL);
6699 tbody = gimple_bind_body (bind);
6700 gimplify_and_add (unlock, &tbody);
6701 gimple_bind_set_body (bind, tbody);
6703 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6705 pop_gimplify_context (bind);
6706 gimple_bind_append_vars (bind, ctx->block_vars);
6707 BLOCK_VARS (block) = gimple_bind_vars (bind);
6710 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6711 for a lastprivate clause. Given a loop control predicate of (V
6712 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6713 is appended to *DLIST, iterator initialization is appended to
6714 *BODY_P. */
6716 static void
6717 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6718 gimple_seq *dlist, struct omp_context *ctx)
6720 tree clauses, cond, vinit;
6721 enum tree_code cond_code;
6722 gimple_seq stmts;
6724 cond_code = fd->loop.cond_code;
6725 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6727 /* When possible, use a strict equality expression. This can let VRP
6728 type optimizations deduce the value and remove a copy. */
6729 if (tree_fits_shwi_p (fd->loop.step))
6731 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6732 if (step == 1 || step == -1)
6733 cond_code = EQ_EXPR;
6736 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6737 || gimple_omp_for_grid_phony (fd->for_stmt))
6738 cond = omp_grid_lastprivate_predicate (fd);
6739 else
6741 tree n2 = fd->loop.n2;
6742 if (fd->collapse > 1
6743 && TREE_CODE (n2) != INTEGER_CST
6744 && gimple_omp_for_combined_into_p (fd->for_stmt))
6746 struct omp_context *taskreg_ctx = NULL;
6747 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6749 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6750 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6751 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6753 if (gimple_omp_for_combined_into_p (gfor))
6755 gcc_assert (ctx->outer->outer
6756 && is_parallel_ctx (ctx->outer->outer));
6757 taskreg_ctx = ctx->outer->outer;
6759 else
6761 struct omp_for_data outer_fd;
6762 omp_extract_for_data (gfor, &outer_fd, NULL);
6763 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6766 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6767 taskreg_ctx = ctx->outer->outer;
6769 else if (is_taskreg_ctx (ctx->outer))
6770 taskreg_ctx = ctx->outer;
6771 if (taskreg_ctx)
6773 int i;
6774 tree taskreg_clauses
6775 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6776 tree innerc = omp_find_clause (taskreg_clauses,
6777 OMP_CLAUSE__LOOPTEMP_);
6778 gcc_assert (innerc);
6779 for (i = 0; i < fd->collapse; i++)
6781 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6782 OMP_CLAUSE__LOOPTEMP_);
6783 gcc_assert (innerc);
6785 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6786 OMP_CLAUSE__LOOPTEMP_);
6787 if (innerc)
6788 n2 = fold_convert (TREE_TYPE (n2),
6789 lookup_decl (OMP_CLAUSE_DECL (innerc),
6790 taskreg_ctx));
6793 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6796 clauses = gimple_omp_for_clauses (fd->for_stmt);
6797 stmts = NULL;
6798 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6799 if (!gimple_seq_empty_p (stmts))
6801 gimple_seq_add_seq (&stmts, *dlist);
6802 *dlist = stmts;
6804 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6805 vinit = fd->loop.n1;
6806 if (cond_code == EQ_EXPR
6807 && tree_fits_shwi_p (fd->loop.n2)
6808 && ! integer_zerop (fd->loop.n2))
6809 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6810 else
6811 vinit = unshare_expr (vinit);
6813 /* Initialize the iterator variable, so that threads that don't execute
6814 any iterations don't execute the lastprivate clauses by accident. */
6815 gimplify_assign (fd->loop.v, vinit, body_p);
6820 /* Lower code for an OMP loop directive. */
6822 static void
6823 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6825 tree *rhs_p, block;
6826 struct omp_for_data fd, *fdp = NULL;
6827 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6828 gbind *new_stmt;
6829 gimple_seq omp_for_body, body, dlist;
6830 gimple_seq oacc_head = NULL, oacc_tail = NULL;
6831 size_t i;
6833 push_gimplify_context ();
6835 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6837 block = make_node (BLOCK);
6838 new_stmt = gimple_build_bind (NULL, NULL, block);
6839 /* Replace at gsi right away, so that 'stmt' is no member
6840 of a sequence anymore as we're going to add to a different
6841 one below. */
6842 gsi_replace (gsi_p, new_stmt, true);
6844 /* Move declaration of temporaries in the loop body before we make
6845 it go away. */
6846 omp_for_body = gimple_omp_body (stmt);
6847 if (!gimple_seq_empty_p (omp_for_body)
6848 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6850 gbind *inner_bind
6851 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6852 tree vars = gimple_bind_vars (inner_bind);
6853 gimple_bind_append_vars (new_stmt, vars);
6854 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6855 keep them on the inner_bind and it's block. */
6856 gimple_bind_set_vars (inner_bind, NULL_TREE);
6857 if (gimple_bind_block (inner_bind))
6858 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6861 if (gimple_omp_for_combined_into_p (stmt))
6863 omp_extract_for_data (stmt, &fd, NULL);
6864 fdp = &fd;
6866 /* We need two temporaries with fd.loop.v type (istart/iend)
6867 and then (fd.collapse - 1) temporaries with the same
6868 type for count2 ... countN-1 vars if not constant. */
6869 size_t count = 2;
6870 tree type = fd.iter_type;
6871 if (fd.collapse > 1
6872 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6873 count += fd.collapse - 1;
6874 bool taskreg_for
6875 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6876 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6877 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6878 tree simtc = NULL;
6879 tree clauses = *pc;
6880 if (taskreg_for)
6881 outerc
6882 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6883 OMP_CLAUSE__LOOPTEMP_);
6884 if (ctx->simt_stmt)
6885 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6886 OMP_CLAUSE__LOOPTEMP_);
6887 for (i = 0; i < count; i++)
6889 tree temp;
6890 if (taskreg_for)
6892 gcc_assert (outerc);
6893 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6894 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6895 OMP_CLAUSE__LOOPTEMP_);
6897 else
6899 /* If there are 2 adjacent SIMD stmts, one with _simt_
6900 clause, another without, make sure they have the same
6901 decls in _looptemp_ clauses, because the outer stmt
6902 they are combined into will look up just one inner_stmt. */
6903 if (ctx->simt_stmt)
6904 temp = OMP_CLAUSE_DECL (simtc);
6905 else
6906 temp = create_tmp_var (type);
6907 insert_decl_map (&ctx->outer->cb, temp, temp);
6909 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6910 OMP_CLAUSE_DECL (*pc) = temp;
6911 pc = &OMP_CLAUSE_CHAIN (*pc);
6912 if (ctx->simt_stmt)
6913 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6914 OMP_CLAUSE__LOOPTEMP_);
6916 *pc = clauses;
6919 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6920 dlist = NULL;
6921 body = NULL;
6922 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6923 fdp);
6924 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6926 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6928 /* Lower the header expressions. At this point, we can assume that
6929 the header is of the form:
6931 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6933 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6934 using the .omp_data_s mapping, if needed. */
6935 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6937 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6938 if (!is_gimple_min_invariant (*rhs_p))
6939 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6940 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6941 recompute_tree_invariant_for_addr_expr (*rhs_p);
6943 rhs_p = gimple_omp_for_final_ptr (stmt, i);
6944 if (!is_gimple_min_invariant (*rhs_p))
6945 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6946 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6947 recompute_tree_invariant_for_addr_expr (*rhs_p);
6949 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6950 if (!is_gimple_min_invariant (*rhs_p))
6951 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6954 /* Once lowered, extract the bounds and clauses. */
6955 omp_extract_for_data (stmt, &fd, NULL);
6957 if (is_gimple_omp_oacc (ctx->stmt)
6958 && !ctx_in_oacc_kernels_region (ctx))
6959 lower_oacc_head_tail (gimple_location (stmt),
6960 gimple_omp_for_clauses (stmt),
6961 &oacc_head, &oacc_tail, ctx);
6963 /* Add OpenACC partitioning and reduction markers just before the loop. */
6964 if (oacc_head)
6965 gimple_seq_add_seq (&body, oacc_head);
6967 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
6969 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6970 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
6971 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6972 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6974 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6975 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6976 OMP_CLAUSE_LINEAR_STEP (c)
6977 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6978 ctx);
6981 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6982 && gimple_omp_for_grid_phony (stmt));
6983 if (!phony_loop)
6984 gimple_seq_add_stmt (&body, stmt);
6985 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6987 if (!phony_loop)
6988 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6989 fd.loop.v));
6991 /* After the loop, add exit clauses. */
6992 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
6994 if (ctx->cancellable)
6995 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
6997 gimple_seq_add_seq (&body, dlist);
6999 body = maybe_catch_exception (body);
7001 if (!phony_loop)
7003 /* Region exit marker goes at the end of the loop body. */
7004 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
7005 maybe_add_implicit_barrier_cancel (ctx, &body);
7008 /* Add OpenACC joining and reduction markers just after the loop. */
7009 if (oacc_tail)
7010 gimple_seq_add_seq (&body, oacc_tail);
7012 pop_gimplify_context (new_stmt);
7014 gimple_bind_append_vars (new_stmt, ctx->block_vars);
7015 maybe_remove_omp_member_access_dummy_vars (new_stmt);
7016 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
7017 if (BLOCK_VARS (block))
7018 TREE_USED (block) = 1;
7020 gimple_bind_set_body (new_stmt, body);
7021 gimple_omp_set_body (stmt, NULL);
7022 gimple_omp_for_set_pre_body (stmt, NULL);
7025 /* Callback for walk_stmts. Check if the current statement only contains
7026 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
7028 static tree
7029 check_combined_parallel (gimple_stmt_iterator *gsi_p,
7030 bool *handled_ops_p,
7031 struct walk_stmt_info *wi)
7033 int *info = (int *) wi->info;
7034 gimple *stmt = gsi_stmt (*gsi_p);
7036 *handled_ops_p = true;
7037 switch (gimple_code (stmt))
7039 WALK_SUBSTMTS;
7041 case GIMPLE_DEBUG:
7042 break;
7043 case GIMPLE_OMP_FOR:
7044 case GIMPLE_OMP_SECTIONS:
7045 *info = *info == 0 ? 1 : -1;
7046 break;
7047 default:
7048 *info = -1;
7049 break;
7051 return NULL;
7054 struct omp_taskcopy_context
7056 /* This field must be at the beginning, as we do "inheritance": Some
7057 callback functions for tree-inline.c (e.g., omp_copy_decl)
7058 receive a copy_body_data pointer that is up-casted to an
7059 omp_context pointer. */
7060 copy_body_data cb;
7061 omp_context *ctx;
7064 static tree
7065 task_copyfn_copy_decl (tree var, copy_body_data *cb)
7067 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
7069 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
7070 return create_tmp_var (TREE_TYPE (var));
7072 return var;
7075 static tree
7076 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
7078 tree name, new_fields = NULL, type, f;
7080 type = lang_hooks.types.make_type (RECORD_TYPE);
7081 name = DECL_NAME (TYPE_NAME (orig_type));
7082 name = build_decl (gimple_location (tcctx->ctx->stmt),
7083 TYPE_DECL, name, type);
7084 TYPE_NAME (type) = name;
7086 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
7088 tree new_f = copy_node (f);
7089 DECL_CONTEXT (new_f) = type;
7090 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7091 TREE_CHAIN (new_f) = new_fields;
7092 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7093 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7094 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7095 &tcctx->cb, NULL);
7096 new_fields = new_f;
7097 tcctx->cb.decl_map->put (f, new_f);
7099 TYPE_FIELDS (type) = nreverse (new_fields);
7100 layout_type (type);
7101 return type;
7104 /* Create task copyfn. */
7106 static void
7107 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7109 struct function *child_cfun;
7110 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7111 tree record_type, srecord_type, bind, list;
7112 bool record_needs_remap = false, srecord_needs_remap = false;
7113 splay_tree_node n;
7114 struct omp_taskcopy_context tcctx;
7115 location_t loc = gimple_location (task_stmt);
7116 size_t looptempno = 0;
7118 child_fn = gimple_omp_task_copy_fn (task_stmt);
7119 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7120 gcc_assert (child_cfun->cfg == NULL);
7121 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
7123 /* Reset DECL_CONTEXT on function arguments. */
7124 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7125 DECL_CONTEXT (t) = child_fn;
7127 /* Populate the function. */
7128 push_gimplify_context ();
7129 push_cfun (child_cfun);
7131 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7132 TREE_SIDE_EFFECTS (bind) = 1;
7133 list = NULL;
7134 DECL_SAVED_TREE (child_fn) = bind;
7135 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
7137 /* Remap src and dst argument types if needed. */
7138 record_type = ctx->record_type;
7139 srecord_type = ctx->srecord_type;
7140 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7141 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7143 record_needs_remap = true;
7144 break;
7146 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7147 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7149 srecord_needs_remap = true;
7150 break;
7153 if (record_needs_remap || srecord_needs_remap)
7155 memset (&tcctx, '\0', sizeof (tcctx));
7156 tcctx.cb.src_fn = ctx->cb.src_fn;
7157 tcctx.cb.dst_fn = child_fn;
7158 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7159 gcc_checking_assert (tcctx.cb.src_node);
7160 tcctx.cb.dst_node = tcctx.cb.src_node;
7161 tcctx.cb.src_cfun = ctx->cb.src_cfun;
7162 tcctx.cb.copy_decl = task_copyfn_copy_decl;
7163 tcctx.cb.eh_lp_nr = 0;
7164 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7165 tcctx.cb.decl_map = new hash_map<tree, tree>;
7166 tcctx.ctx = ctx;
7168 if (record_needs_remap)
7169 record_type = task_copyfn_remap_type (&tcctx, record_type);
7170 if (srecord_needs_remap)
7171 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7173 else
7174 tcctx.cb.decl_map = NULL;
7176 arg = DECL_ARGUMENTS (child_fn);
7177 TREE_TYPE (arg) = build_pointer_type (record_type);
7178 sarg = DECL_CHAIN (arg);
7179 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7181 /* First pass: initialize temporaries used in record_type and srecord_type
7182 sizes and field offsets. */
7183 if (tcctx.cb.decl_map)
7184 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7185 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7187 tree *p;
7189 decl = OMP_CLAUSE_DECL (c);
7190 p = tcctx.cb.decl_map->get (decl);
7191 if (p == NULL)
7192 continue;
7193 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7194 sf = (tree) n->value;
7195 sf = *tcctx.cb.decl_map->get (sf);
7196 src = build_simple_mem_ref_loc (loc, sarg);
7197 src = omp_build_component_ref (src, sf);
7198 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7199 append_to_statement_list (t, &list);
7202 /* Second pass: copy shared var pointers and copy construct non-VLA
7203 firstprivate vars. */
7204 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7205 switch (OMP_CLAUSE_CODE (c))
7207 splay_tree_key key;
7208 case OMP_CLAUSE_SHARED:
7209 decl = OMP_CLAUSE_DECL (c);
7210 key = (splay_tree_key) decl;
7211 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7212 key = (splay_tree_key) &DECL_UID (decl);
7213 n = splay_tree_lookup (ctx->field_map, key);
7214 if (n == NULL)
7215 break;
7216 f = (tree) n->value;
7217 if (tcctx.cb.decl_map)
7218 f = *tcctx.cb.decl_map->get (f);
7219 n = splay_tree_lookup (ctx->sfield_map, key);
7220 sf = (tree) n->value;
7221 if (tcctx.cb.decl_map)
7222 sf = *tcctx.cb.decl_map->get (sf);
7223 src = build_simple_mem_ref_loc (loc, sarg);
7224 src = omp_build_component_ref (src, sf);
7225 dst = build_simple_mem_ref_loc (loc, arg);
7226 dst = omp_build_component_ref (dst, f);
7227 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7228 append_to_statement_list (t, &list);
7229 break;
7230 case OMP_CLAUSE__LOOPTEMP_:
7231 /* Fields for first two _looptemp_ clauses are initialized by
7232 GOMP_taskloop*, the rest are handled like firstprivate. */
7233 if (looptempno < 2)
7235 looptempno++;
7236 break;
7238 /* FALLTHRU */
7239 case OMP_CLAUSE_FIRSTPRIVATE:
7240 decl = OMP_CLAUSE_DECL (c);
7241 if (is_variable_sized (decl))
7242 break;
7243 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7244 if (n == NULL)
7245 break;
7246 f = (tree) n->value;
7247 if (tcctx.cb.decl_map)
7248 f = *tcctx.cb.decl_map->get (f);
7249 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7250 if (n != NULL)
7252 sf = (tree) n->value;
7253 if (tcctx.cb.decl_map)
7254 sf = *tcctx.cb.decl_map->get (sf);
7255 src = build_simple_mem_ref_loc (loc, sarg);
7256 src = omp_build_component_ref (src, sf);
7257 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7258 src = build_simple_mem_ref_loc (loc, src);
7260 else
7261 src = decl;
7262 dst = build_simple_mem_ref_loc (loc, arg);
7263 dst = omp_build_component_ref (dst, f);
7264 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__LOOPTEMP_)
7265 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7266 else
7267 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7268 append_to_statement_list (t, &list);
7269 break;
7270 case OMP_CLAUSE_PRIVATE:
7271 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7272 break;
7273 decl = OMP_CLAUSE_DECL (c);
7274 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7275 f = (tree) n->value;
7276 if (tcctx.cb.decl_map)
7277 f = *tcctx.cb.decl_map->get (f);
7278 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7279 if (n != NULL)
7281 sf = (tree) n->value;
7282 if (tcctx.cb.decl_map)
7283 sf = *tcctx.cb.decl_map->get (sf);
7284 src = build_simple_mem_ref_loc (loc, sarg);
7285 src = omp_build_component_ref (src, sf);
7286 if (use_pointer_for_field (decl, NULL))
7287 src = build_simple_mem_ref_loc (loc, src);
7289 else
7290 src = decl;
7291 dst = build_simple_mem_ref_loc (loc, arg);
7292 dst = omp_build_component_ref (dst, f);
7293 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7294 append_to_statement_list (t, &list);
7295 break;
7296 default:
7297 break;
7300 /* Last pass: handle VLA firstprivates. */
7301 if (tcctx.cb.decl_map)
7302 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7303 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7305 tree ind, ptr, df;
7307 decl = OMP_CLAUSE_DECL (c);
7308 if (!is_variable_sized (decl))
7309 continue;
7310 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7311 if (n == NULL)
7312 continue;
7313 f = (tree) n->value;
7314 f = *tcctx.cb.decl_map->get (f);
7315 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7316 ind = DECL_VALUE_EXPR (decl);
7317 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7318 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7319 n = splay_tree_lookup (ctx->sfield_map,
7320 (splay_tree_key) TREE_OPERAND (ind, 0));
7321 sf = (tree) n->value;
7322 sf = *tcctx.cb.decl_map->get (sf);
7323 src = build_simple_mem_ref_loc (loc, sarg);
7324 src = omp_build_component_ref (src, sf);
7325 src = build_simple_mem_ref_loc (loc, src);
7326 dst = build_simple_mem_ref_loc (loc, arg);
7327 dst = omp_build_component_ref (dst, f);
7328 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7329 append_to_statement_list (t, &list);
7330 n = splay_tree_lookup (ctx->field_map,
7331 (splay_tree_key) TREE_OPERAND (ind, 0));
7332 df = (tree) n->value;
7333 df = *tcctx.cb.decl_map->get (df);
7334 ptr = build_simple_mem_ref_loc (loc, arg);
7335 ptr = omp_build_component_ref (ptr, df);
7336 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7337 build_fold_addr_expr_loc (loc, dst));
7338 append_to_statement_list (t, &list);
7341 t = build1 (RETURN_EXPR, void_type_node, NULL);
7342 append_to_statement_list (t, &list);
7344 if (tcctx.cb.decl_map)
7345 delete tcctx.cb.decl_map;
7346 pop_gimplify_context (NULL);
7347 BIND_EXPR_BODY (bind) = list;
7348 pop_cfun ();
7351 static void
7352 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7354 tree c, clauses;
7355 gimple *g;
7356 size_t n_in = 0, n_out = 0, idx = 2, i;
7358 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7359 gcc_assert (clauses);
7360 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7361 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7362 switch (OMP_CLAUSE_DEPEND_KIND (c))
7364 case OMP_CLAUSE_DEPEND_IN:
7365 n_in++;
7366 break;
7367 case OMP_CLAUSE_DEPEND_OUT:
7368 case OMP_CLAUSE_DEPEND_INOUT:
7369 n_out++;
7370 break;
7371 case OMP_CLAUSE_DEPEND_SOURCE:
7372 case OMP_CLAUSE_DEPEND_SINK:
7373 /* FALLTHRU */
7374 default:
7375 gcc_unreachable ();
7377 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7378 tree array = create_tmp_var (type);
7379 TREE_ADDRESSABLE (array) = 1;
7380 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7381 NULL_TREE);
7382 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7383 gimple_seq_add_stmt (iseq, g);
7384 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7385 NULL_TREE);
7386 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7387 gimple_seq_add_stmt (iseq, g);
7388 for (i = 0; i < 2; i++)
7390 if ((i ? n_in : n_out) == 0)
7391 continue;
7392 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7393 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7394 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7396 tree t = OMP_CLAUSE_DECL (c);
7397 t = fold_convert (ptr_type_node, t);
7398 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7399 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7400 NULL_TREE, NULL_TREE);
7401 g = gimple_build_assign (r, t);
7402 gimple_seq_add_stmt (iseq, g);
7405 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7406 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7407 OMP_CLAUSE_CHAIN (c) = *pclauses;
7408 *pclauses = c;
7409 tree clobber = build_constructor (type, NULL);
7410 TREE_THIS_VOLATILE (clobber) = 1;
7411 g = gimple_build_assign (array, clobber);
7412 gimple_seq_add_stmt (oseq, g);
7415 /* Lower the OpenMP parallel or task directive in the current statement
7416 in GSI_P. CTX holds context information for the directive. */
7418 static void
7419 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7421 tree clauses;
7422 tree child_fn, t;
7423 gimple *stmt = gsi_stmt (*gsi_p);
7424 gbind *par_bind, *bind, *dep_bind = NULL;
7425 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7426 location_t loc = gimple_location (stmt);
7428 clauses = gimple_omp_taskreg_clauses (stmt);
7429 par_bind
7430 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7431 par_body = gimple_bind_body (par_bind);
7432 child_fn = ctx->cb.dst_fn;
7433 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7434 && !gimple_omp_parallel_combined_p (stmt))
7436 struct walk_stmt_info wi;
7437 int ws_num = 0;
7439 memset (&wi, 0, sizeof (wi));
7440 wi.info = &ws_num;
7441 wi.val_only = true;
7442 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7443 if (ws_num == 1)
7444 gimple_omp_parallel_set_combined_p (stmt, true);
7446 gimple_seq dep_ilist = NULL;
7447 gimple_seq dep_olist = NULL;
7448 if (gimple_code (stmt) == GIMPLE_OMP_TASK
7449 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7451 push_gimplify_context ();
7452 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7453 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7454 &dep_ilist, &dep_olist);
7457 if (ctx->srecord_type)
7458 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7460 push_gimplify_context ();
7462 par_olist = NULL;
7463 par_ilist = NULL;
7464 par_rlist = NULL;
7465 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7466 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7467 if (phony_construct && ctx->record_type)
7469 gcc_checking_assert (!ctx->receiver_decl);
7470 ctx->receiver_decl = create_tmp_var
7471 (build_reference_type (ctx->record_type), ".omp_rec");
7473 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7474 lower_omp (&par_body, ctx);
7475 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7476 lower_reduction_clauses (clauses, &par_rlist, ctx);
7478 /* Declare all the variables created by mapping and the variables
7479 declared in the scope of the parallel body. */
7480 record_vars_into (ctx->block_vars, child_fn);
7481 maybe_remove_omp_member_access_dummy_vars (par_bind);
7482 record_vars_into (gimple_bind_vars (par_bind), child_fn);
7484 if (ctx->record_type)
7486 ctx->sender_decl
7487 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7488 : ctx->record_type, ".omp_data_o");
7489 DECL_NAMELESS (ctx->sender_decl) = 1;
7490 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7491 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7494 olist = NULL;
7495 ilist = NULL;
7496 lower_send_clauses (clauses, &ilist, &olist, ctx);
7497 lower_send_shared_vars (&ilist, &olist, ctx);
7499 if (ctx->record_type)
7501 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7502 TREE_THIS_VOLATILE (clobber) = 1;
7503 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7504 clobber));
7507 /* Once all the expansions are done, sequence all the different
7508 fragments inside gimple_omp_body. */
7510 new_body = NULL;
7512 if (ctx->record_type)
7514 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7515 /* fixup_child_record_type might have changed receiver_decl's type. */
7516 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7517 gimple_seq_add_stmt (&new_body,
7518 gimple_build_assign (ctx->receiver_decl, t));
7521 gimple_seq_add_seq (&new_body, par_ilist);
7522 gimple_seq_add_seq (&new_body, par_body);
7523 gimple_seq_add_seq (&new_body, par_rlist);
7524 if (ctx->cancellable)
7525 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7526 gimple_seq_add_seq (&new_body, par_olist);
7527 new_body = maybe_catch_exception (new_body);
7528 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7529 gimple_seq_add_stmt (&new_body,
7530 gimple_build_omp_continue (integer_zero_node,
7531 integer_zero_node));
7532 if (!phony_construct)
7534 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7535 gimple_omp_set_body (stmt, new_body);
7538 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7539 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7540 gimple_bind_add_seq (bind, ilist);
7541 if (!phony_construct)
7542 gimple_bind_add_stmt (bind, stmt);
7543 else
7544 gimple_bind_add_seq (bind, new_body);
7545 gimple_bind_add_seq (bind, olist);
7547 pop_gimplify_context (NULL);
7549 if (dep_bind)
7551 gimple_bind_add_seq (dep_bind, dep_ilist);
7552 gimple_bind_add_stmt (dep_bind, bind);
7553 gimple_bind_add_seq (dep_bind, dep_olist);
7554 pop_gimplify_context (dep_bind);
7558 /* Lower the GIMPLE_OMP_TARGET in the current statement
7559 in GSI_P. CTX holds context information for the directive. */
7561 static void
7562 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7564 tree clauses;
7565 tree child_fn, t, c;
7566 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7567 gbind *tgt_bind, *bind, *dep_bind = NULL;
7568 gimple_seq tgt_body, olist, ilist, fplist, new_body;
7569 location_t loc = gimple_location (stmt);
7570 bool offloaded, data_region;
7571 unsigned int map_cnt = 0;
7573 offloaded = is_gimple_omp_offloaded (stmt);
7574 switch (gimple_omp_target_kind (stmt))
7576 case GF_OMP_TARGET_KIND_REGION:
7577 case GF_OMP_TARGET_KIND_UPDATE:
7578 case GF_OMP_TARGET_KIND_ENTER_DATA:
7579 case GF_OMP_TARGET_KIND_EXIT_DATA:
7580 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7581 case GF_OMP_TARGET_KIND_OACC_KERNELS:
7582 case GF_OMP_TARGET_KIND_OACC_UPDATE:
7583 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7584 case GF_OMP_TARGET_KIND_OACC_DECLARE:
7585 data_region = false;
7586 break;
7587 case GF_OMP_TARGET_KIND_DATA:
7588 case GF_OMP_TARGET_KIND_OACC_DATA:
7589 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7590 data_region = true;
7591 break;
7592 default:
7593 gcc_unreachable ();
7596 clauses = gimple_omp_target_clauses (stmt);
7598 gimple_seq dep_ilist = NULL;
7599 gimple_seq dep_olist = NULL;
7600 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7602 push_gimplify_context ();
7603 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7604 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7605 &dep_ilist, &dep_olist);
7608 tgt_bind = NULL;
7609 tgt_body = NULL;
7610 if (offloaded)
7612 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7613 tgt_body = gimple_bind_body (tgt_bind);
7615 else if (data_region)
7616 tgt_body = gimple_omp_body (stmt);
7617 child_fn = ctx->cb.dst_fn;
7619 push_gimplify_context ();
7620 fplist = NULL;
7622 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7623 switch (OMP_CLAUSE_CODE (c))
7625 tree var, x;
7627 default:
7628 break;
7629 case OMP_CLAUSE_MAP:
7630 #if CHECKING_P
7631 /* First check what we're prepared to handle in the following. */
7632 switch (OMP_CLAUSE_MAP_KIND (c))
7634 case GOMP_MAP_ALLOC:
7635 case GOMP_MAP_TO:
7636 case GOMP_MAP_FROM:
7637 case GOMP_MAP_TOFROM:
7638 case GOMP_MAP_POINTER:
7639 case GOMP_MAP_TO_PSET:
7640 case GOMP_MAP_DELETE:
7641 case GOMP_MAP_RELEASE:
7642 case GOMP_MAP_ALWAYS_TO:
7643 case GOMP_MAP_ALWAYS_FROM:
7644 case GOMP_MAP_ALWAYS_TOFROM:
7645 case GOMP_MAP_FIRSTPRIVATE_POINTER:
7646 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7647 case GOMP_MAP_STRUCT:
7648 case GOMP_MAP_ALWAYS_POINTER:
7649 break;
7650 case GOMP_MAP_FORCE_ALLOC:
7651 case GOMP_MAP_FORCE_TO:
7652 case GOMP_MAP_FORCE_FROM:
7653 case GOMP_MAP_FORCE_TOFROM:
7654 case GOMP_MAP_FORCE_PRESENT:
7655 case GOMP_MAP_FORCE_DEVICEPTR:
7656 case GOMP_MAP_DEVICE_RESIDENT:
7657 case GOMP_MAP_LINK:
7658 gcc_assert (is_gimple_omp_oacc (stmt));
7659 break;
7660 default:
7661 gcc_unreachable ();
7663 #endif
7664 /* FALLTHRU */
7665 case OMP_CLAUSE_TO:
7666 case OMP_CLAUSE_FROM:
7667 oacc_firstprivate:
7668 var = OMP_CLAUSE_DECL (c);
7669 if (!DECL_P (var))
7671 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7672 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7673 && (OMP_CLAUSE_MAP_KIND (c)
7674 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
7675 map_cnt++;
7676 continue;
7679 if (DECL_SIZE (var)
7680 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7682 tree var2 = DECL_VALUE_EXPR (var);
7683 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7684 var2 = TREE_OPERAND (var2, 0);
7685 gcc_assert (DECL_P (var2));
7686 var = var2;
7689 if (offloaded
7690 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7691 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7692 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7694 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7696 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7697 && varpool_node::get_create (var)->offloadable)
7698 continue;
7700 tree type = build_pointer_type (TREE_TYPE (var));
7701 tree new_var = lookup_decl (var, ctx);
7702 x = create_tmp_var_raw (type, get_name (new_var));
7703 gimple_add_tmp_var (x);
7704 x = build_simple_mem_ref (x);
7705 SET_DECL_VALUE_EXPR (new_var, x);
7706 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7708 continue;
7711 if (!maybe_lookup_field (var, ctx))
7712 continue;
7714 /* Don't remap oacc parallel reduction variables, because the
7715 intermediate result must be local to each gang. */
7716 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7717 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7719 x = build_receiver_ref (var, true, ctx);
7720 tree new_var = lookup_decl (var, ctx);
7722 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7723 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7724 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7725 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7726 x = build_simple_mem_ref (x);
7727 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7729 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7730 if (omp_is_reference (new_var))
7732 /* Create a local object to hold the instance
7733 value. */
7734 tree type = TREE_TYPE (TREE_TYPE (new_var));
7735 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7736 tree inst = create_tmp_var (type, id);
7737 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7738 x = build_fold_addr_expr (inst);
7740 gimplify_assign (new_var, x, &fplist);
7742 else if (DECL_P (new_var))
7744 SET_DECL_VALUE_EXPR (new_var, x);
7745 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7747 else
7748 gcc_unreachable ();
7750 map_cnt++;
7751 break;
7753 case OMP_CLAUSE_FIRSTPRIVATE:
7754 if (is_oacc_parallel (ctx))
7755 goto oacc_firstprivate;
7756 map_cnt++;
7757 var = OMP_CLAUSE_DECL (c);
7758 if (!omp_is_reference (var)
7759 && !is_gimple_reg_type (TREE_TYPE (var)))
7761 tree new_var = lookup_decl (var, ctx);
7762 if (is_variable_sized (var))
7764 tree pvar = DECL_VALUE_EXPR (var);
7765 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7766 pvar = TREE_OPERAND (pvar, 0);
7767 gcc_assert (DECL_P (pvar));
7768 tree new_pvar = lookup_decl (pvar, ctx);
7769 x = build_fold_indirect_ref (new_pvar);
7770 TREE_THIS_NOTRAP (x) = 1;
7772 else
7773 x = build_receiver_ref (var, true, ctx);
7774 SET_DECL_VALUE_EXPR (new_var, x);
7775 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7777 break;
7779 case OMP_CLAUSE_PRIVATE:
7780 if (is_gimple_omp_oacc (ctx->stmt))
7781 break;
7782 var = OMP_CLAUSE_DECL (c);
7783 if (is_variable_sized (var))
7785 tree new_var = lookup_decl (var, ctx);
7786 tree pvar = DECL_VALUE_EXPR (var);
7787 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7788 pvar = TREE_OPERAND (pvar, 0);
7789 gcc_assert (DECL_P (pvar));
7790 tree new_pvar = lookup_decl (pvar, ctx);
7791 x = build_fold_indirect_ref (new_pvar);
7792 TREE_THIS_NOTRAP (x) = 1;
7793 SET_DECL_VALUE_EXPR (new_var, x);
7794 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7796 break;
7798 case OMP_CLAUSE_USE_DEVICE_PTR:
7799 case OMP_CLAUSE_IS_DEVICE_PTR:
7800 var = OMP_CLAUSE_DECL (c);
7801 map_cnt++;
7802 if (is_variable_sized (var))
7804 tree new_var = lookup_decl (var, ctx);
7805 tree pvar = DECL_VALUE_EXPR (var);
7806 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7807 pvar = TREE_OPERAND (pvar, 0);
7808 gcc_assert (DECL_P (pvar));
7809 tree new_pvar = lookup_decl (pvar, ctx);
7810 x = build_fold_indirect_ref (new_pvar);
7811 TREE_THIS_NOTRAP (x) = 1;
7812 SET_DECL_VALUE_EXPR (new_var, x);
7813 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7815 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7817 tree new_var = lookup_decl (var, ctx);
7818 tree type = build_pointer_type (TREE_TYPE (var));
7819 x = create_tmp_var_raw (type, get_name (new_var));
7820 gimple_add_tmp_var (x);
7821 x = build_simple_mem_ref (x);
7822 SET_DECL_VALUE_EXPR (new_var, x);
7823 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7825 else
7827 tree new_var = lookup_decl (var, ctx);
7828 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7829 gimple_add_tmp_var (x);
7830 SET_DECL_VALUE_EXPR (new_var, x);
7831 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7833 break;
7836 if (offloaded)
7838 target_nesting_level++;
7839 lower_omp (&tgt_body, ctx);
7840 target_nesting_level--;
7842 else if (data_region)
7843 lower_omp (&tgt_body, ctx);
7845 if (offloaded)
7847 /* Declare all the variables created by mapping and the variables
7848 declared in the scope of the target body. */
7849 record_vars_into (ctx->block_vars, child_fn);
7850 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
7851 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7854 olist = NULL;
7855 ilist = NULL;
7856 if (ctx->record_type)
7858 ctx->sender_decl
7859 = create_tmp_var (ctx->record_type, ".omp_data_arr");
7860 DECL_NAMELESS (ctx->sender_decl) = 1;
7861 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7862 t = make_tree_vec (3);
7863 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7864 TREE_VEC_ELT (t, 1)
7865 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7866 ".omp_data_sizes");
7867 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7868 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7869 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7870 tree tkind_type = short_unsigned_type_node;
7871 int talign_shift = 8;
7872 TREE_VEC_ELT (t, 2)
7873 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7874 ".omp_data_kinds");
7875 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7876 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7877 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7878 gimple_omp_target_set_data_arg (stmt, t);
7880 vec<constructor_elt, va_gc> *vsize;
7881 vec<constructor_elt, va_gc> *vkind;
7882 vec_alloc (vsize, map_cnt);
7883 vec_alloc (vkind, map_cnt);
7884 unsigned int map_idx = 0;
7886 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7887 switch (OMP_CLAUSE_CODE (c))
7889 tree ovar, nc, s, purpose, var, x, type;
7890 unsigned int talign;
7892 default:
7893 break;
7895 case OMP_CLAUSE_MAP:
7896 case OMP_CLAUSE_TO:
7897 case OMP_CLAUSE_FROM:
7898 oacc_firstprivate_map:
7899 nc = c;
7900 ovar = OMP_CLAUSE_DECL (c);
7901 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7902 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7903 || (OMP_CLAUSE_MAP_KIND (c)
7904 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7905 break;
7906 if (!DECL_P (ovar))
7908 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7909 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7911 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7912 == get_base_address (ovar));
7913 nc = OMP_CLAUSE_CHAIN (c);
7914 ovar = OMP_CLAUSE_DECL (nc);
7916 else
7918 tree x = build_sender_ref (ovar, ctx);
7919 tree v
7920 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7921 gimplify_assign (x, v, &ilist);
7922 nc = NULL_TREE;
7925 else
7927 if (DECL_SIZE (ovar)
7928 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7930 tree ovar2 = DECL_VALUE_EXPR (ovar);
7931 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7932 ovar2 = TREE_OPERAND (ovar2, 0);
7933 gcc_assert (DECL_P (ovar2));
7934 ovar = ovar2;
7936 if (!maybe_lookup_field (ovar, ctx))
7937 continue;
7940 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7941 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7942 talign = DECL_ALIGN_UNIT (ovar);
7943 if (nc)
7945 var = lookup_decl_in_outer_ctx (ovar, ctx);
7946 x = build_sender_ref (ovar, ctx);
7948 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7949 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7950 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7951 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7953 gcc_assert (offloaded);
7954 tree avar
7955 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7956 mark_addressable (avar);
7957 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7958 talign = DECL_ALIGN_UNIT (avar);
7959 avar = build_fold_addr_expr (avar);
7960 gimplify_assign (x, avar, &ilist);
7962 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7964 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7965 if (!omp_is_reference (var))
7967 if (is_gimple_reg (var)
7968 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7969 TREE_NO_WARNING (var) = 1;
7970 var = build_fold_addr_expr (var);
7972 else
7973 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7974 gimplify_assign (x, var, &ilist);
7976 else if (is_gimple_reg (var))
7978 gcc_assert (offloaded);
7979 tree avar = create_tmp_var (TREE_TYPE (var));
7980 mark_addressable (avar);
7981 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7982 if (GOMP_MAP_COPY_TO_P (map_kind)
7983 || map_kind == GOMP_MAP_POINTER
7984 || map_kind == GOMP_MAP_TO_PSET
7985 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7987 /* If we need to initialize a temporary
7988 with VAR because it is not addressable, and
7989 the variable hasn't been initialized yet, then
7990 we'll get a warning for the store to avar.
7991 Don't warn in that case, the mapping might
7992 be implicit. */
7993 TREE_NO_WARNING (var) = 1;
7994 gimplify_assign (avar, var, &ilist);
7996 avar = build_fold_addr_expr (avar);
7997 gimplify_assign (x, avar, &ilist);
7998 if ((GOMP_MAP_COPY_FROM_P (map_kind)
7999 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
8000 && !TYPE_READONLY (TREE_TYPE (var)))
8002 x = unshare_expr (x);
8003 x = build_simple_mem_ref (x);
8004 gimplify_assign (var, x, &olist);
8007 else
8009 var = build_fold_addr_expr (var);
8010 gimplify_assign (x, var, &ilist);
8013 s = NULL_TREE;
8014 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
8016 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
8017 s = TREE_TYPE (ovar);
8018 if (TREE_CODE (s) == REFERENCE_TYPE)
8019 s = TREE_TYPE (s);
8020 s = TYPE_SIZE_UNIT (s);
8022 else
8023 s = OMP_CLAUSE_SIZE (c);
8024 if (s == NULL_TREE)
8025 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8026 s = fold_convert (size_type_node, s);
8027 purpose = size_int (map_idx++);
8028 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8029 if (TREE_CODE (s) != INTEGER_CST)
8030 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8032 unsigned HOST_WIDE_INT tkind, tkind_zero;
8033 switch (OMP_CLAUSE_CODE (c))
8035 case OMP_CLAUSE_MAP:
8036 tkind = OMP_CLAUSE_MAP_KIND (c);
8037 tkind_zero = tkind;
8038 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
8039 switch (tkind)
8041 case GOMP_MAP_ALLOC:
8042 case GOMP_MAP_TO:
8043 case GOMP_MAP_FROM:
8044 case GOMP_MAP_TOFROM:
8045 case GOMP_MAP_ALWAYS_TO:
8046 case GOMP_MAP_ALWAYS_FROM:
8047 case GOMP_MAP_ALWAYS_TOFROM:
8048 case GOMP_MAP_RELEASE:
8049 case GOMP_MAP_FORCE_TO:
8050 case GOMP_MAP_FORCE_FROM:
8051 case GOMP_MAP_FORCE_TOFROM:
8052 case GOMP_MAP_FORCE_PRESENT:
8053 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
8054 break;
8055 case GOMP_MAP_DELETE:
8056 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
8057 default:
8058 break;
8060 if (tkind_zero != tkind)
8062 if (integer_zerop (s))
8063 tkind = tkind_zero;
8064 else if (integer_nonzerop (s))
8065 tkind_zero = tkind;
8067 break;
8068 case OMP_CLAUSE_FIRSTPRIVATE:
8069 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
8070 tkind = GOMP_MAP_TO;
8071 tkind_zero = tkind;
8072 break;
8073 case OMP_CLAUSE_TO:
8074 tkind = GOMP_MAP_TO;
8075 tkind_zero = tkind;
8076 break;
8077 case OMP_CLAUSE_FROM:
8078 tkind = GOMP_MAP_FROM;
8079 tkind_zero = tkind;
8080 break;
8081 default:
8082 gcc_unreachable ();
8084 gcc_checking_assert (tkind
8085 < (HOST_WIDE_INT_C (1U) << talign_shift));
8086 gcc_checking_assert (tkind_zero
8087 < (HOST_WIDE_INT_C (1U) << talign_shift));
8088 talign = ceil_log2 (talign);
8089 tkind |= talign << talign_shift;
8090 tkind_zero |= talign << talign_shift;
8091 gcc_checking_assert (tkind
8092 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8093 gcc_checking_assert (tkind_zero
8094 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8095 if (tkind == tkind_zero)
8096 x = build_int_cstu (tkind_type, tkind);
8097 else
8099 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8100 x = build3 (COND_EXPR, tkind_type,
8101 fold_build2 (EQ_EXPR, boolean_type_node,
8102 unshare_expr (s), size_zero_node),
8103 build_int_cstu (tkind_type, tkind_zero),
8104 build_int_cstu (tkind_type, tkind));
8106 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8107 if (nc && nc != c)
8108 c = nc;
8109 break;
8111 case OMP_CLAUSE_FIRSTPRIVATE:
8112 if (is_oacc_parallel (ctx))
8113 goto oacc_firstprivate_map;
8114 ovar = OMP_CLAUSE_DECL (c);
8115 if (omp_is_reference (ovar))
8116 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8117 else
8118 talign = DECL_ALIGN_UNIT (ovar);
8119 var = lookup_decl_in_outer_ctx (ovar, ctx);
8120 x = build_sender_ref (ovar, ctx);
8121 tkind = GOMP_MAP_FIRSTPRIVATE;
8122 type = TREE_TYPE (ovar);
8123 if (omp_is_reference (ovar))
8124 type = TREE_TYPE (type);
8125 if ((INTEGRAL_TYPE_P (type)
8126 && TYPE_PRECISION (type) <= POINTER_SIZE)
8127 || TREE_CODE (type) == POINTER_TYPE)
8129 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8130 tree t = var;
8131 if (omp_is_reference (var))
8132 t = build_simple_mem_ref (var);
8133 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8134 TREE_NO_WARNING (var) = 1;
8135 if (TREE_CODE (type) != POINTER_TYPE)
8136 t = fold_convert (pointer_sized_int_node, t);
8137 t = fold_convert (TREE_TYPE (x), t);
8138 gimplify_assign (x, t, &ilist);
8140 else if (omp_is_reference (var))
8141 gimplify_assign (x, var, &ilist);
8142 else if (is_gimple_reg (var))
8144 tree avar = create_tmp_var (TREE_TYPE (var));
8145 mark_addressable (avar);
8146 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8147 TREE_NO_WARNING (var) = 1;
8148 gimplify_assign (avar, var, &ilist);
8149 avar = build_fold_addr_expr (avar);
8150 gimplify_assign (x, avar, &ilist);
8152 else
8154 var = build_fold_addr_expr (var);
8155 gimplify_assign (x, var, &ilist);
8157 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8158 s = size_int (0);
8159 else if (omp_is_reference (ovar))
8160 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8161 else
8162 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8163 s = fold_convert (size_type_node, s);
8164 purpose = size_int (map_idx++);
8165 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8166 if (TREE_CODE (s) != INTEGER_CST)
8167 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8169 gcc_checking_assert (tkind
8170 < (HOST_WIDE_INT_C (1U) << talign_shift));
8171 talign = ceil_log2 (talign);
8172 tkind |= talign << talign_shift;
8173 gcc_checking_assert (tkind
8174 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8175 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8176 build_int_cstu (tkind_type, tkind));
8177 break;
8179 case OMP_CLAUSE_USE_DEVICE_PTR:
8180 case OMP_CLAUSE_IS_DEVICE_PTR:
8181 ovar = OMP_CLAUSE_DECL (c);
8182 var = lookup_decl_in_outer_ctx (ovar, ctx);
8183 x = build_sender_ref (ovar, ctx);
8184 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8185 tkind = GOMP_MAP_USE_DEVICE_PTR;
8186 else
8187 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8188 type = TREE_TYPE (ovar);
8189 if (TREE_CODE (type) == ARRAY_TYPE)
8190 var = build_fold_addr_expr (var);
8191 else
8193 if (omp_is_reference (ovar))
8195 type = TREE_TYPE (type);
8196 if (TREE_CODE (type) != ARRAY_TYPE)
8197 var = build_simple_mem_ref (var);
8198 var = fold_convert (TREE_TYPE (x), var);
8201 gimplify_assign (x, var, &ilist);
8202 s = size_int (0);
8203 purpose = size_int (map_idx++);
8204 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8205 gcc_checking_assert (tkind
8206 < (HOST_WIDE_INT_C (1U) << talign_shift));
8207 gcc_checking_assert (tkind
8208 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8209 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8210 build_int_cstu (tkind_type, tkind));
8211 break;
8214 gcc_assert (map_idx == map_cnt);
8216 DECL_INITIAL (TREE_VEC_ELT (t, 1))
8217 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8218 DECL_INITIAL (TREE_VEC_ELT (t, 2))
8219 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8220 for (int i = 1; i <= 2; i++)
8221 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8223 gimple_seq initlist = NULL;
8224 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8225 TREE_VEC_ELT (t, i)),
8226 &initlist, true, NULL_TREE);
8227 gimple_seq_add_seq (&ilist, initlist);
8229 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8230 NULL);
8231 TREE_THIS_VOLATILE (clobber) = 1;
8232 gimple_seq_add_stmt (&olist,
8233 gimple_build_assign (TREE_VEC_ELT (t, i),
8234 clobber));
8237 tree clobber = build_constructor (ctx->record_type, NULL);
8238 TREE_THIS_VOLATILE (clobber) = 1;
8239 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8240 clobber));
8243 /* Once all the expansions are done, sequence all the different
8244 fragments inside gimple_omp_body. */
8246 new_body = NULL;
8248 if (offloaded
8249 && ctx->record_type)
8251 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8252 /* fixup_child_record_type might have changed receiver_decl's type. */
8253 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8254 gimple_seq_add_stmt (&new_body,
8255 gimple_build_assign (ctx->receiver_decl, t));
8257 gimple_seq_add_seq (&new_body, fplist);
8259 if (offloaded || data_region)
8261 tree prev = NULL_TREE;
8262 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8263 switch (OMP_CLAUSE_CODE (c))
8265 tree var, x;
8266 default:
8267 break;
8268 case OMP_CLAUSE_FIRSTPRIVATE:
8269 if (is_gimple_omp_oacc (ctx->stmt))
8270 break;
8271 var = OMP_CLAUSE_DECL (c);
8272 if (omp_is_reference (var)
8273 || is_gimple_reg_type (TREE_TYPE (var)))
8275 tree new_var = lookup_decl (var, ctx);
8276 tree type;
8277 type = TREE_TYPE (var);
8278 if (omp_is_reference (var))
8279 type = TREE_TYPE (type);
8280 if ((INTEGRAL_TYPE_P (type)
8281 && TYPE_PRECISION (type) <= POINTER_SIZE)
8282 || TREE_CODE (type) == POINTER_TYPE)
8284 x = build_receiver_ref (var, false, ctx);
8285 if (TREE_CODE (type) != POINTER_TYPE)
8286 x = fold_convert (pointer_sized_int_node, x);
8287 x = fold_convert (type, x);
8288 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8289 fb_rvalue);
8290 if (omp_is_reference (var))
8292 tree v = create_tmp_var_raw (type, get_name (var));
8293 gimple_add_tmp_var (v);
8294 TREE_ADDRESSABLE (v) = 1;
8295 gimple_seq_add_stmt (&new_body,
8296 gimple_build_assign (v, x));
8297 x = build_fold_addr_expr (v);
8299 gimple_seq_add_stmt (&new_body,
8300 gimple_build_assign (new_var, x));
8302 else
8304 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8305 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8306 fb_rvalue);
8307 gimple_seq_add_stmt (&new_body,
8308 gimple_build_assign (new_var, x));
8311 else if (is_variable_sized (var))
8313 tree pvar = DECL_VALUE_EXPR (var);
8314 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8315 pvar = TREE_OPERAND (pvar, 0);
8316 gcc_assert (DECL_P (pvar));
8317 tree new_var = lookup_decl (pvar, ctx);
8318 x = build_receiver_ref (var, false, ctx);
8319 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8320 gimple_seq_add_stmt (&new_body,
8321 gimple_build_assign (new_var, x));
8323 break;
8324 case OMP_CLAUSE_PRIVATE:
8325 if (is_gimple_omp_oacc (ctx->stmt))
8326 break;
8327 var = OMP_CLAUSE_DECL (c);
8328 if (omp_is_reference (var))
8330 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8331 tree new_var = lookup_decl (var, ctx);
8332 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8333 if (TREE_CONSTANT (x))
8335 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8336 get_name (var));
8337 gimple_add_tmp_var (x);
8338 TREE_ADDRESSABLE (x) = 1;
8339 x = build_fold_addr_expr_loc (clause_loc, x);
8341 else
8342 break;
8344 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8345 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8346 gimple_seq_add_stmt (&new_body,
8347 gimple_build_assign (new_var, x));
8349 break;
8350 case OMP_CLAUSE_USE_DEVICE_PTR:
8351 case OMP_CLAUSE_IS_DEVICE_PTR:
8352 var = OMP_CLAUSE_DECL (c);
8353 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8354 x = build_sender_ref (var, ctx);
8355 else
8356 x = build_receiver_ref (var, false, ctx);
8357 if (is_variable_sized (var))
8359 tree pvar = DECL_VALUE_EXPR (var);
8360 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8361 pvar = TREE_OPERAND (pvar, 0);
8362 gcc_assert (DECL_P (pvar));
8363 tree new_var = lookup_decl (pvar, ctx);
8364 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8365 gimple_seq_add_stmt (&new_body,
8366 gimple_build_assign (new_var, x));
8368 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8370 tree new_var = lookup_decl (var, ctx);
8371 new_var = DECL_VALUE_EXPR (new_var);
8372 gcc_assert (TREE_CODE (new_var) == MEM_REF);
8373 new_var = TREE_OPERAND (new_var, 0);
8374 gcc_assert (DECL_P (new_var));
8375 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8376 gimple_seq_add_stmt (&new_body,
8377 gimple_build_assign (new_var, x));
8379 else
8381 tree type = TREE_TYPE (var);
8382 tree new_var = lookup_decl (var, ctx);
8383 if (omp_is_reference (var))
8385 type = TREE_TYPE (type);
8386 if (TREE_CODE (type) != ARRAY_TYPE)
8388 tree v = create_tmp_var_raw (type, get_name (var));
8389 gimple_add_tmp_var (v);
8390 TREE_ADDRESSABLE (v) = 1;
8391 x = fold_convert (type, x);
8392 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8393 fb_rvalue);
8394 gimple_seq_add_stmt (&new_body,
8395 gimple_build_assign (v, x));
8396 x = build_fold_addr_expr (v);
8399 new_var = DECL_VALUE_EXPR (new_var);
8400 x = fold_convert (TREE_TYPE (new_var), x);
8401 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8402 gimple_seq_add_stmt (&new_body,
8403 gimple_build_assign (new_var, x));
8405 break;
8407 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8408 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8409 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8410 or references to VLAs. */
8411 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8412 switch (OMP_CLAUSE_CODE (c))
8414 tree var;
8415 default:
8416 break;
8417 case OMP_CLAUSE_MAP:
8418 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8419 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8421 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8422 poly_int64 offset = 0;
8423 gcc_assert (prev);
8424 var = OMP_CLAUSE_DECL (c);
8425 if (DECL_P (var)
8426 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8427 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8428 ctx))
8429 && varpool_node::get_create (var)->offloadable)
8430 break;
8431 if (TREE_CODE (var) == INDIRECT_REF
8432 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8433 var = TREE_OPERAND (var, 0);
8434 if (TREE_CODE (var) == COMPONENT_REF)
8436 var = get_addr_base_and_unit_offset (var, &offset);
8437 gcc_assert (var != NULL_TREE && DECL_P (var));
8439 else if (DECL_SIZE (var)
8440 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8442 tree var2 = DECL_VALUE_EXPR (var);
8443 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8444 var2 = TREE_OPERAND (var2, 0);
8445 gcc_assert (DECL_P (var2));
8446 var = var2;
8448 tree new_var = lookup_decl (var, ctx), x;
8449 tree type = TREE_TYPE (new_var);
8450 bool is_ref;
8451 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8452 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8453 == COMPONENT_REF))
8455 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8456 is_ref = true;
8457 new_var = build2 (MEM_REF, type,
8458 build_fold_addr_expr (new_var),
8459 build_int_cst (build_pointer_type (type),
8460 offset));
8462 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8464 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8465 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8466 new_var = build2 (MEM_REF, type,
8467 build_fold_addr_expr (new_var),
8468 build_int_cst (build_pointer_type (type),
8469 offset));
8471 else
8472 is_ref = omp_is_reference (var);
8473 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8474 is_ref = false;
8475 bool ref_to_array = false;
8476 if (is_ref)
8478 type = TREE_TYPE (type);
8479 if (TREE_CODE (type) == ARRAY_TYPE)
8481 type = build_pointer_type (type);
8482 ref_to_array = true;
8485 else if (TREE_CODE (type) == ARRAY_TYPE)
8487 tree decl2 = DECL_VALUE_EXPR (new_var);
8488 gcc_assert (TREE_CODE (decl2) == MEM_REF);
8489 decl2 = TREE_OPERAND (decl2, 0);
8490 gcc_assert (DECL_P (decl2));
8491 new_var = decl2;
8492 type = TREE_TYPE (new_var);
8494 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8495 x = fold_convert_loc (clause_loc, type, x);
8496 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8498 tree bias = OMP_CLAUSE_SIZE (c);
8499 if (DECL_P (bias))
8500 bias = lookup_decl (bias, ctx);
8501 bias = fold_convert_loc (clause_loc, sizetype, bias);
8502 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8503 bias);
8504 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8505 TREE_TYPE (x), x, bias);
8507 if (ref_to_array)
8508 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8509 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8510 if (is_ref && !ref_to_array)
8512 tree t = create_tmp_var_raw (type, get_name (var));
8513 gimple_add_tmp_var (t);
8514 TREE_ADDRESSABLE (t) = 1;
8515 gimple_seq_add_stmt (&new_body,
8516 gimple_build_assign (t, x));
8517 x = build_fold_addr_expr_loc (clause_loc, t);
8519 gimple_seq_add_stmt (&new_body,
8520 gimple_build_assign (new_var, x));
8521 prev = NULL_TREE;
8523 else if (OMP_CLAUSE_CHAIN (c)
8524 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8525 == OMP_CLAUSE_MAP
8526 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8527 == GOMP_MAP_FIRSTPRIVATE_POINTER
8528 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8529 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8530 prev = c;
8531 break;
8532 case OMP_CLAUSE_PRIVATE:
8533 var = OMP_CLAUSE_DECL (c);
8534 if (is_variable_sized (var))
8536 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8537 tree new_var = lookup_decl (var, ctx);
8538 tree pvar = DECL_VALUE_EXPR (var);
8539 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8540 pvar = TREE_OPERAND (pvar, 0);
8541 gcc_assert (DECL_P (pvar));
8542 tree new_pvar = lookup_decl (pvar, ctx);
8543 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8544 tree al = size_int (DECL_ALIGN (var));
8545 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8546 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8547 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8548 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8549 gimple_seq_add_stmt (&new_body,
8550 gimple_build_assign (new_pvar, x));
8552 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8554 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8555 tree new_var = lookup_decl (var, ctx);
8556 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8557 if (TREE_CONSTANT (x))
8558 break;
8559 else
8561 tree atmp
8562 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8563 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8564 tree al = size_int (TYPE_ALIGN (rtype));
8565 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8568 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8569 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8570 gimple_seq_add_stmt (&new_body,
8571 gimple_build_assign (new_var, x));
8573 break;
8576 gimple_seq fork_seq = NULL;
8577 gimple_seq join_seq = NULL;
8579 if (is_oacc_parallel (ctx))
8581 /* If there are reductions on the offloaded region itself, treat
8582 them as a dummy GANG loop. */
8583 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8585 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8586 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8589 gimple_seq_add_seq (&new_body, fork_seq);
8590 gimple_seq_add_seq (&new_body, tgt_body);
8591 gimple_seq_add_seq (&new_body, join_seq);
8593 if (offloaded)
8594 new_body = maybe_catch_exception (new_body);
8596 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8597 gimple_omp_set_body (stmt, new_body);
8600 bind = gimple_build_bind (NULL, NULL,
8601 tgt_bind ? gimple_bind_block (tgt_bind)
8602 : NULL_TREE);
8603 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8604 gimple_bind_add_seq (bind, ilist);
8605 gimple_bind_add_stmt (bind, stmt);
8606 gimple_bind_add_seq (bind, olist);
8608 pop_gimplify_context (NULL);
8610 if (dep_bind)
8612 gimple_bind_add_seq (dep_bind, dep_ilist);
8613 gimple_bind_add_stmt (dep_bind, bind);
8614 gimple_bind_add_seq (dep_bind, dep_olist);
8615 pop_gimplify_context (dep_bind);
8619 /* Expand code for an OpenMP teams directive. */
8621 static void
8622 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8624 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8625 push_gimplify_context ();
8627 tree block = make_node (BLOCK);
8628 gbind *bind = gimple_build_bind (NULL, NULL, block);
8629 gsi_replace (gsi_p, bind, true);
8630 gimple_seq bind_body = NULL;
8631 gimple_seq dlist = NULL;
8632 gimple_seq olist = NULL;
8634 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8635 OMP_CLAUSE_NUM_TEAMS);
8636 if (num_teams == NULL_TREE)
8637 num_teams = build_int_cst (unsigned_type_node, 0);
8638 else
8640 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8641 num_teams = fold_convert (unsigned_type_node, num_teams);
8642 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8644 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8645 OMP_CLAUSE_THREAD_LIMIT);
8646 if (thread_limit == NULL_TREE)
8647 thread_limit = build_int_cst (unsigned_type_node, 0);
8648 else
8650 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8651 thread_limit = fold_convert (unsigned_type_node, thread_limit);
8652 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8653 fb_rvalue);
8656 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8657 &bind_body, &dlist, ctx, NULL);
8658 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8659 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8660 if (!gimple_omp_teams_grid_phony (teams_stmt))
8662 gimple_seq_add_stmt (&bind_body, teams_stmt);
8663 location_t loc = gimple_location (teams_stmt);
8664 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8665 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8666 gimple_set_location (call, loc);
8667 gimple_seq_add_stmt (&bind_body, call);
8670 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8671 gimple_omp_set_body (teams_stmt, NULL);
8672 gimple_seq_add_seq (&bind_body, olist);
8673 gimple_seq_add_seq (&bind_body, dlist);
8674 if (!gimple_omp_teams_grid_phony (teams_stmt))
8675 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8676 gimple_bind_set_body (bind, bind_body);
8678 pop_gimplify_context (bind);
8680 gimple_bind_append_vars (bind, ctx->block_vars);
8681 BLOCK_VARS (block) = ctx->block_vars;
8682 if (BLOCK_VARS (block))
8683 TREE_USED (block) = 1;
8686 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8688 static void
8689 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8691 gimple *stmt = gsi_stmt (*gsi_p);
8692 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8693 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8694 gimple_build_omp_return (false));
8698 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8699 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8700 of OMP context, but with task_shared_vars set. */
8702 static tree
8703 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8704 void *data)
8706 tree t = *tp;
8708 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8709 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8710 return t;
8712 if (task_shared_vars
8713 && DECL_P (t)
8714 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8715 return t;
8717 /* If a global variable has been privatized, TREE_CONSTANT on
8718 ADDR_EXPR might be wrong. */
8719 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8720 recompute_tree_invariant_for_addr_expr (t);
8722 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8723 return NULL_TREE;
8726 /* Data to be communicated between lower_omp_regimplify_operands and
8727 lower_omp_regimplify_operands_p. */
8729 struct lower_omp_regimplify_operands_data
8731 omp_context *ctx;
8732 vec<tree> *decls;
8735 /* Helper function for lower_omp_regimplify_operands. Find
8736 omp_member_access_dummy_var vars and adjust temporarily their
8737 DECL_VALUE_EXPRs if needed. */
8739 static tree
8740 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8741 void *data)
8743 tree t = omp_member_access_dummy_var (*tp);
8744 if (t)
8746 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8747 lower_omp_regimplify_operands_data *ldata
8748 = (lower_omp_regimplify_operands_data *) wi->info;
8749 tree o = maybe_lookup_decl (t, ldata->ctx);
8750 if (o != t)
8752 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8753 ldata->decls->safe_push (*tp);
8754 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8755 SET_DECL_VALUE_EXPR (*tp, v);
8758 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8759 return NULL_TREE;
8762 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8763 of omp_member_access_dummy_var vars during regimplification. */
8765 static void
8766 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8767 gimple_stmt_iterator *gsi_p)
8769 auto_vec<tree, 10> decls;
8770 if (ctx)
8772 struct walk_stmt_info wi;
8773 memset (&wi, '\0', sizeof (wi));
8774 struct lower_omp_regimplify_operands_data data;
8775 data.ctx = ctx;
8776 data.decls = &decls;
8777 wi.info = &data;
8778 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8780 gimple_regimplify_operands (stmt, gsi_p);
8781 while (!decls.is_empty ())
8783 tree t = decls.pop ();
8784 tree v = decls.pop ();
8785 SET_DECL_VALUE_EXPR (t, v);
8789 static void
8790 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8792 gimple *stmt = gsi_stmt (*gsi_p);
8793 struct walk_stmt_info wi;
8794 gcall *call_stmt;
8796 if (gimple_has_location (stmt))
8797 input_location = gimple_location (stmt);
8799 if (task_shared_vars)
8800 memset (&wi, '\0', sizeof (wi));
8802 /* If we have issued syntax errors, avoid doing any heavy lifting.
8803 Just replace the OMP directives with a NOP to avoid
8804 confusing RTL expansion. */
8805 if (seen_error () && is_gimple_omp (stmt))
8807 gsi_replace (gsi_p, gimple_build_nop (), true);
8808 return;
8811 switch (gimple_code (stmt))
8813 case GIMPLE_COND:
8815 gcond *cond_stmt = as_a <gcond *> (stmt);
8816 if ((ctx || task_shared_vars)
8817 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8818 lower_omp_regimplify_p,
8819 ctx ? NULL : &wi, NULL)
8820 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8821 lower_omp_regimplify_p,
8822 ctx ? NULL : &wi, NULL)))
8823 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8825 break;
8826 case GIMPLE_CATCH:
8827 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8828 break;
8829 case GIMPLE_EH_FILTER:
8830 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8831 break;
8832 case GIMPLE_TRY:
8833 lower_omp (gimple_try_eval_ptr (stmt), ctx);
8834 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8835 break;
8836 case GIMPLE_TRANSACTION:
8837 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8838 ctx);
8839 break;
8840 case GIMPLE_BIND:
8841 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8842 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
8843 break;
8844 case GIMPLE_OMP_PARALLEL:
8845 case GIMPLE_OMP_TASK:
8846 ctx = maybe_lookup_ctx (stmt);
8847 gcc_assert (ctx);
8848 if (ctx->cancellable)
8849 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8850 lower_omp_taskreg (gsi_p, ctx);
8851 break;
8852 case GIMPLE_OMP_FOR:
8853 ctx = maybe_lookup_ctx (stmt);
8854 gcc_assert (ctx);
8855 if (ctx->cancellable)
8856 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8857 lower_omp_for (gsi_p, ctx);
8858 break;
8859 case GIMPLE_OMP_SECTIONS:
8860 ctx = maybe_lookup_ctx (stmt);
8861 gcc_assert (ctx);
8862 if (ctx->cancellable)
8863 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8864 lower_omp_sections (gsi_p, ctx);
8865 break;
8866 case GIMPLE_OMP_SINGLE:
8867 ctx = maybe_lookup_ctx (stmt);
8868 gcc_assert (ctx);
8869 lower_omp_single (gsi_p, ctx);
8870 break;
8871 case GIMPLE_OMP_MASTER:
8872 ctx = maybe_lookup_ctx (stmt);
8873 gcc_assert (ctx);
8874 lower_omp_master (gsi_p, ctx);
8875 break;
8876 case GIMPLE_OMP_TASKGROUP:
8877 ctx = maybe_lookup_ctx (stmt);
8878 gcc_assert (ctx);
8879 lower_omp_taskgroup (gsi_p, ctx);
8880 break;
8881 case GIMPLE_OMP_ORDERED:
8882 ctx = maybe_lookup_ctx (stmt);
8883 gcc_assert (ctx);
8884 lower_omp_ordered (gsi_p, ctx);
8885 break;
8886 case GIMPLE_OMP_CRITICAL:
8887 ctx = maybe_lookup_ctx (stmt);
8888 gcc_assert (ctx);
8889 lower_omp_critical (gsi_p, ctx);
8890 break;
8891 case GIMPLE_OMP_ATOMIC_LOAD:
8892 if ((ctx || task_shared_vars)
8893 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8894 as_a <gomp_atomic_load *> (stmt)),
8895 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8896 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8897 break;
8898 case GIMPLE_OMP_TARGET:
8899 ctx = maybe_lookup_ctx (stmt);
8900 gcc_assert (ctx);
8901 lower_omp_target (gsi_p, ctx);
8902 break;
8903 case GIMPLE_OMP_TEAMS:
8904 ctx = maybe_lookup_ctx (stmt);
8905 gcc_assert (ctx);
8906 lower_omp_teams (gsi_p, ctx);
8907 break;
8908 case GIMPLE_OMP_GRID_BODY:
8909 ctx = maybe_lookup_ctx (stmt);
8910 gcc_assert (ctx);
8911 lower_omp_grid_body (gsi_p, ctx);
8912 break;
8913 case GIMPLE_CALL:
8914 tree fndecl;
8915 call_stmt = as_a <gcall *> (stmt);
8916 fndecl = gimple_call_fndecl (call_stmt);
8917 if (fndecl
8918 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8919 switch (DECL_FUNCTION_CODE (fndecl))
8921 case BUILT_IN_GOMP_BARRIER:
8922 if (ctx == NULL)
8923 break;
8924 /* FALLTHRU */
8925 case BUILT_IN_GOMP_CANCEL:
8926 case BUILT_IN_GOMP_CANCELLATION_POINT:
8927 omp_context *cctx;
8928 cctx = ctx;
8929 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8930 cctx = cctx->outer;
8931 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8932 if (!cctx->cancellable)
8934 if (DECL_FUNCTION_CODE (fndecl)
8935 == BUILT_IN_GOMP_CANCELLATION_POINT)
8937 stmt = gimple_build_nop ();
8938 gsi_replace (gsi_p, stmt, false);
8940 break;
8942 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8944 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8945 gimple_call_set_fndecl (call_stmt, fndecl);
8946 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8948 tree lhs;
8949 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8950 gimple_call_set_lhs (call_stmt, lhs);
8951 tree fallthru_label;
8952 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8953 gimple *g;
8954 g = gimple_build_label (fallthru_label);
8955 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8956 g = gimple_build_cond (NE_EXPR, lhs,
8957 fold_convert (TREE_TYPE (lhs),
8958 boolean_false_node),
8959 cctx->cancel_label, fallthru_label);
8960 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8961 break;
8962 default:
8963 break;
8965 /* FALLTHRU */
8966 default:
8967 if ((ctx || task_shared_vars)
8968 && walk_gimple_op (stmt, lower_omp_regimplify_p,
8969 ctx ? NULL : &wi))
8971 /* Just remove clobbers, this should happen only if we have
8972 "privatized" local addressable variables in SIMD regions,
8973 the clobber isn't needed in that case and gimplifying address
8974 of the ARRAY_REF into a pointer and creating MEM_REF based
8975 clobber would create worse code than we get with the clobber
8976 dropped. */
8977 if (gimple_clobber_p (stmt))
8979 gsi_replace (gsi_p, gimple_build_nop (), true);
8980 break;
8982 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8984 break;
8988 static void
8989 lower_omp (gimple_seq *body, omp_context *ctx)
8991 location_t saved_location = input_location;
8992 gimple_stmt_iterator gsi;
8993 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8994 lower_omp_1 (&gsi, ctx);
8995 /* During gimplification, we haven't folded statments inside offloading
8996 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8997 if (target_nesting_level || taskreg_nesting_level)
8998 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8999 fold_stmt (&gsi);
9000 input_location = saved_location;
9003 /* Main entry point. */
9005 static unsigned int
9006 execute_lower_omp (void)
9008 gimple_seq body;
9009 int i;
9010 omp_context *ctx;
9012 /* This pass always runs, to provide PROP_gimple_lomp.
9013 But often, there is nothing to do. */
9014 if (flag_openacc == 0 && flag_openmp == 0
9015 && flag_openmp_simd == 0)
9016 return 0;
9018 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
9019 delete_omp_context);
9021 body = gimple_body (current_function_decl);
9023 if (hsa_gen_requested_p ())
9024 omp_grid_gridify_all_targets (&body);
9026 scan_omp (&body, NULL);
9027 gcc_assert (taskreg_nesting_level == 0);
9028 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
9029 finish_taskreg_scan (ctx);
9030 taskreg_contexts.release ();
9032 if (all_contexts->root)
9034 if (task_shared_vars)
9035 push_gimplify_context ();
9036 lower_omp (&body, NULL);
9037 if (task_shared_vars)
9038 pop_gimplify_context (NULL);
9041 if (all_contexts)
9043 splay_tree_delete (all_contexts);
9044 all_contexts = NULL;
9046 BITMAP_FREE (task_shared_vars);
9048 /* If current function is a method, remove artificial dummy VAR_DECL created
9049 for non-static data member privatization, they aren't needed for
9050 debuginfo nor anything else, have been already replaced everywhere in the
9051 IL and cause problems with LTO. */
9052 if (DECL_ARGUMENTS (current_function_decl)
9053 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
9054 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
9055 == POINTER_TYPE))
9056 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
9057 return 0;
9060 namespace {
9062 const pass_data pass_data_lower_omp =
9064 GIMPLE_PASS, /* type */
9065 "omplower", /* name */
9066 OPTGROUP_OMP, /* optinfo_flags */
9067 TV_NONE, /* tv_id */
9068 PROP_gimple_any, /* properties_required */
9069 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
9070 0, /* properties_destroyed */
9071 0, /* todo_flags_start */
9072 0, /* todo_flags_finish */
9075 class pass_lower_omp : public gimple_opt_pass
9077 public:
9078 pass_lower_omp (gcc::context *ctxt)
9079 : gimple_opt_pass (pass_data_lower_omp, ctxt)
9082 /* opt_pass methods: */
9083 virtual unsigned int execute (function *) { return execute_lower_omp (); }
9085 }; // class pass_lower_omp
9087 } // anon namespace
9089 gimple_opt_pass *
9090 make_pass_lower_omp (gcc::context *ctxt)
9092 return new pass_lower_omp (ctxt);
9095 /* The following is a utility to diagnose structured block violations.
9096 It is not part of the "omplower" pass, as that's invoked too late. It
9097 should be invoked by the respective front ends after gimplification. */
9099 static splay_tree all_labels;
9101 /* Check for mismatched contexts and generate an error if needed. Return
9102 true if an error is detected. */
9104 static bool
9105 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9106 gimple *branch_ctx, gimple *label_ctx)
9108 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9109 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9111 if (label_ctx == branch_ctx)
9112 return false;
9114 const char* kind = NULL;
9116 if (flag_openacc)
9118 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9119 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9121 gcc_checking_assert (kind == NULL);
9122 kind = "OpenACC";
9125 if (kind == NULL)
9127 gcc_checking_assert (flag_openmp || flag_openmp_simd);
9128 kind = "OpenMP";
9131 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9132 so we could traverse it and issue a correct "exit" or "enter" error
9133 message upon a structured block violation.
9135 We built the context by building a list with tree_cons'ing, but there is
9136 no easy counterpart in gimple tuples. It seems like far too much work
9137 for issuing exit/enter error messages. If someone really misses the
9138 distinct error message... patches welcome. */
9140 #if 0
9141 /* Try to avoid confusing the user by producing and error message
9142 with correct "exit" or "enter" verbiage. We prefer "exit"
9143 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9144 if (branch_ctx == NULL)
9145 exit_p = false;
9146 else
9148 while (label_ctx)
9150 if (TREE_VALUE (label_ctx) == branch_ctx)
9152 exit_p = false;
9153 break;
9155 label_ctx = TREE_CHAIN (label_ctx);
9159 if (exit_p)
9160 error ("invalid exit from %s structured block", kind);
9161 else
9162 error ("invalid entry to %s structured block", kind);
9163 #endif
9165 /* If it's obvious we have an invalid entry, be specific about the error. */
9166 if (branch_ctx == NULL)
9167 error ("invalid entry to %s structured block", kind);
9168 else
9170 /* Otherwise, be vague and lazy, but efficient. */
9171 error ("invalid branch to/from %s structured block", kind);
9174 gsi_replace (gsi_p, gimple_build_nop (), false);
9175 return true;
9178 /* Pass 1: Create a minimal tree of structured blocks, and record
9179 where each label is found. */
9181 static tree
9182 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9183 struct walk_stmt_info *wi)
9185 gimple *context = (gimple *) wi->info;
9186 gimple *inner_context;
9187 gimple *stmt = gsi_stmt (*gsi_p);
9189 *handled_ops_p = true;
9191 switch (gimple_code (stmt))
9193 WALK_SUBSTMTS;
9195 case GIMPLE_OMP_PARALLEL:
9196 case GIMPLE_OMP_TASK:
9197 case GIMPLE_OMP_SECTIONS:
9198 case GIMPLE_OMP_SINGLE:
9199 case GIMPLE_OMP_SECTION:
9200 case GIMPLE_OMP_MASTER:
9201 case GIMPLE_OMP_ORDERED:
9202 case GIMPLE_OMP_CRITICAL:
9203 case GIMPLE_OMP_TARGET:
9204 case GIMPLE_OMP_TEAMS:
9205 case GIMPLE_OMP_TASKGROUP:
9206 /* The minimal context here is just the current OMP construct. */
9207 inner_context = stmt;
9208 wi->info = inner_context;
9209 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9210 wi->info = context;
9211 break;
9213 case GIMPLE_OMP_FOR:
9214 inner_context = stmt;
9215 wi->info = inner_context;
9216 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9217 walk them. */
9218 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9219 diagnose_sb_1, NULL, wi);
9220 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9221 wi->info = context;
9222 break;
9224 case GIMPLE_LABEL:
9225 splay_tree_insert (all_labels,
9226 (splay_tree_key) gimple_label_label (
9227 as_a <glabel *> (stmt)),
9228 (splay_tree_value) context);
9229 break;
9231 default:
9232 break;
9235 return NULL_TREE;
9238 /* Pass 2: Check each branch and see if its context differs from that of
9239 the destination label's context. */
9241 static tree
9242 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9243 struct walk_stmt_info *wi)
9245 gimple *context = (gimple *) wi->info;
9246 splay_tree_node n;
9247 gimple *stmt = gsi_stmt (*gsi_p);
9249 *handled_ops_p = true;
9251 switch (gimple_code (stmt))
9253 WALK_SUBSTMTS;
9255 case GIMPLE_OMP_PARALLEL:
9256 case GIMPLE_OMP_TASK:
9257 case GIMPLE_OMP_SECTIONS:
9258 case GIMPLE_OMP_SINGLE:
9259 case GIMPLE_OMP_SECTION:
9260 case GIMPLE_OMP_MASTER:
9261 case GIMPLE_OMP_ORDERED:
9262 case GIMPLE_OMP_CRITICAL:
9263 case GIMPLE_OMP_TARGET:
9264 case GIMPLE_OMP_TEAMS:
9265 case GIMPLE_OMP_TASKGROUP:
9266 wi->info = stmt;
9267 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9268 wi->info = context;
9269 break;
9271 case GIMPLE_OMP_FOR:
9272 wi->info = stmt;
9273 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9274 walk them. */
9275 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9276 diagnose_sb_2, NULL, wi);
9277 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9278 wi->info = context;
9279 break;
9281 case GIMPLE_COND:
9283 gcond *cond_stmt = as_a <gcond *> (stmt);
9284 tree lab = gimple_cond_true_label (cond_stmt);
9285 if (lab)
9287 n = splay_tree_lookup (all_labels,
9288 (splay_tree_key) lab);
9289 diagnose_sb_0 (gsi_p, context,
9290 n ? (gimple *) n->value : NULL);
9292 lab = gimple_cond_false_label (cond_stmt);
9293 if (lab)
9295 n = splay_tree_lookup (all_labels,
9296 (splay_tree_key) lab);
9297 diagnose_sb_0 (gsi_p, context,
9298 n ? (gimple *) n->value : NULL);
9301 break;
9303 case GIMPLE_GOTO:
9305 tree lab = gimple_goto_dest (stmt);
9306 if (TREE_CODE (lab) != LABEL_DECL)
9307 break;
9309 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9310 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9312 break;
9314 case GIMPLE_SWITCH:
9316 gswitch *switch_stmt = as_a <gswitch *> (stmt);
9317 unsigned int i;
9318 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9320 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9321 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9322 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9323 break;
9326 break;
9328 case GIMPLE_RETURN:
9329 diagnose_sb_0 (gsi_p, context, NULL);
9330 break;
9332 default:
9333 break;
9336 return NULL_TREE;
9339 static unsigned int
9340 diagnose_omp_structured_block_errors (void)
9342 struct walk_stmt_info wi;
9343 gimple_seq body = gimple_body (current_function_decl);
9345 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9347 memset (&wi, 0, sizeof (wi));
9348 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9350 memset (&wi, 0, sizeof (wi));
9351 wi.want_locations = true;
9352 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9354 gimple_set_body (current_function_decl, body);
9356 splay_tree_delete (all_labels);
9357 all_labels = NULL;
9359 return 0;
9362 namespace {
9364 const pass_data pass_data_diagnose_omp_blocks =
9366 GIMPLE_PASS, /* type */
9367 "*diagnose_omp_blocks", /* name */
9368 OPTGROUP_OMP, /* optinfo_flags */
9369 TV_NONE, /* tv_id */
9370 PROP_gimple_any, /* properties_required */
9371 0, /* properties_provided */
9372 0, /* properties_destroyed */
9373 0, /* todo_flags_start */
9374 0, /* todo_flags_finish */
9377 class pass_diagnose_omp_blocks : public gimple_opt_pass
9379 public:
9380 pass_diagnose_omp_blocks (gcc::context *ctxt)
9381 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9384 /* opt_pass methods: */
9385 virtual bool gate (function *)
9387 return flag_openacc || flag_openmp || flag_openmp_simd;
9389 virtual unsigned int execute (function *)
9391 return diagnose_omp_structured_block_errors ();
9394 }; // class pass_diagnose_omp_blocks
9396 } // anon namespace
9398 gimple_opt_pass *
9399 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9401 return new pass_diagnose_omp_blocks (ctxt);
9405 #include "gt-omp-low.h"