PR c++/86342 - -Wdeprecated-copy and system headers.
[official-gcc.git] / gcc / omp-low.c
blobc591231d8f168b04db1292f7f07b3c1e986d326f
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2018 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* Nesting depth of this context. Used to beautify error messages re
118 invalid gotos. The outermost ctx is depth 1, with depth 0 being
119 reserved for the main body of the function. */
120 int depth;
122 /* True if this parallel directive is nested within another. */
123 bool is_nested;
125 /* True if this construct can be cancelled. */
126 bool cancellable;
129 static splay_tree all_contexts;
130 static int taskreg_nesting_level;
131 static int target_nesting_level;
132 static bitmap task_shared_vars;
133 static vec<omp_context *> taskreg_contexts;
135 static void scan_omp (gimple_seq *, omp_context *);
136 static tree scan_omp_1_op (tree *, int *, void *);
138 #define WALK_SUBSTMTS \
139 case GIMPLE_BIND: \
140 case GIMPLE_TRY: \
141 case GIMPLE_CATCH: \
142 case GIMPLE_EH_FILTER: \
143 case GIMPLE_TRANSACTION: \
144 /* The sub-statements for these should be walked. */ \
145 *handled_ops_p = false; \
146 break;
148 /* Return true if CTX corresponds to an oacc parallel region. */
150 static bool
151 is_oacc_parallel (omp_context *ctx)
153 enum gimple_code outer_type = gimple_code (ctx->stmt);
154 return ((outer_type == GIMPLE_OMP_TARGET)
155 && (gimple_omp_target_kind (ctx->stmt)
156 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
159 /* Return true if CTX corresponds to an oacc kernels region. */
161 static bool
162 is_oacc_kernels (omp_context *ctx)
164 enum gimple_code outer_type = gimple_code (ctx->stmt);
165 return ((outer_type == GIMPLE_OMP_TARGET)
166 && (gimple_omp_target_kind (ctx->stmt)
167 == GF_OMP_TARGET_KIND_OACC_KERNELS));
170 /* If DECL is the artificial dummy VAR_DECL created for non-static
171 data member privatization, return the underlying "this" parameter,
172 otherwise return NULL. */
174 tree
175 omp_member_access_dummy_var (tree decl)
177 if (!VAR_P (decl)
178 || !DECL_ARTIFICIAL (decl)
179 || !DECL_IGNORED_P (decl)
180 || !DECL_HAS_VALUE_EXPR_P (decl)
181 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
182 return NULL_TREE;
184 tree v = DECL_VALUE_EXPR (decl);
185 if (TREE_CODE (v) != COMPONENT_REF)
186 return NULL_TREE;
188 while (1)
189 switch (TREE_CODE (v))
191 case COMPONENT_REF:
192 case MEM_REF:
193 case INDIRECT_REF:
194 CASE_CONVERT:
195 case POINTER_PLUS_EXPR:
196 v = TREE_OPERAND (v, 0);
197 continue;
198 case PARM_DECL:
199 if (DECL_CONTEXT (v) == current_function_decl
200 && DECL_ARTIFICIAL (v)
201 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
202 return v;
203 return NULL_TREE;
204 default:
205 return NULL_TREE;
209 /* Helper for unshare_and_remap, called through walk_tree. */
211 static tree
212 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
214 tree *pair = (tree *) data;
215 if (*tp == pair[0])
217 *tp = unshare_expr (pair[1]);
218 *walk_subtrees = 0;
220 else if (IS_TYPE_OR_DECL_P (*tp))
221 *walk_subtrees = 0;
222 return NULL_TREE;
225 /* Return unshare_expr (X) with all occurrences of FROM
226 replaced with TO. */
228 static tree
229 unshare_and_remap (tree x, tree from, tree to)
231 tree pair[2] = { from, to };
232 x = unshare_expr (x);
233 walk_tree (&x, unshare_and_remap_1, pair, NULL);
234 return x;
237 /* Convenience function for calling scan_omp_1_op on tree operands. */
239 static inline tree
240 scan_omp_op (tree *tp, omp_context *ctx)
242 struct walk_stmt_info wi;
244 memset (&wi, 0, sizeof (wi));
245 wi.info = ctx;
246 wi.want_locations = true;
248 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
251 static void lower_omp (gimple_seq *, omp_context *);
252 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
253 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
255 /* Return true if CTX is for an omp parallel. */
257 static inline bool
258 is_parallel_ctx (omp_context *ctx)
260 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
264 /* Return true if CTX is for an omp task. */
266 static inline bool
267 is_task_ctx (omp_context *ctx)
269 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
273 /* Return true if CTX is for an omp taskloop. */
275 static inline bool
276 is_taskloop_ctx (omp_context *ctx)
278 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
279 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
283 /* Return true if CTX is for an omp parallel or omp task. */
285 static inline bool
286 is_taskreg_ctx (omp_context *ctx)
288 return is_parallel_ctx (ctx) || is_task_ctx (ctx);
291 /* Return true if EXPR is variable sized. */
293 static inline bool
294 is_variable_sized (const_tree expr)
296 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
299 /* Lookup variables. The "maybe" form
300 allows for the variable form to not have been entered, otherwise we
301 assert that the variable must have been entered. */
303 static inline tree
304 lookup_decl (tree var, omp_context *ctx)
306 tree *n = ctx->cb.decl_map->get (var);
307 return *n;
310 static inline tree
311 maybe_lookup_decl (const_tree var, omp_context *ctx)
313 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
314 return n ? *n : NULL_TREE;
317 static inline tree
318 lookup_field (tree var, omp_context *ctx)
320 splay_tree_node n;
321 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
322 return (tree) n->value;
325 static inline tree
326 lookup_sfield (splay_tree_key key, omp_context *ctx)
328 splay_tree_node n;
329 n = splay_tree_lookup (ctx->sfield_map
330 ? ctx->sfield_map : ctx->field_map, key);
331 return (tree) n->value;
334 static inline tree
335 lookup_sfield (tree var, omp_context *ctx)
337 return lookup_sfield ((splay_tree_key) var, ctx);
340 static inline tree
341 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
343 splay_tree_node n;
344 n = splay_tree_lookup (ctx->field_map, key);
345 return n ? (tree) n->value : NULL_TREE;
348 static inline tree
349 maybe_lookup_field (tree var, omp_context *ctx)
351 return maybe_lookup_field ((splay_tree_key) var, ctx);
354 /* Return true if DECL should be copied by pointer. SHARED_CTX is
355 the parallel context if DECL is to be shared. */
357 static bool
358 use_pointer_for_field (tree decl, omp_context *shared_ctx)
360 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
361 || TYPE_ATOMIC (TREE_TYPE (decl)))
362 return true;
364 /* We can only use copy-in/copy-out semantics for shared variables
365 when we know the value is not accessible from an outer scope. */
366 if (shared_ctx)
368 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
370 /* ??? Trivially accessible from anywhere. But why would we even
371 be passing an address in this case? Should we simply assert
372 this to be false, or should we have a cleanup pass that removes
373 these from the list of mappings? */
374 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
375 return true;
377 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
378 without analyzing the expression whether or not its location
379 is accessible to anyone else. In the case of nested parallel
380 regions it certainly may be. */
381 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
382 return true;
384 /* Do not use copy-in/copy-out for variables that have their
385 address taken. */
386 if (TREE_ADDRESSABLE (decl))
387 return true;
389 /* lower_send_shared_vars only uses copy-in, but not copy-out
390 for these. */
391 if (TREE_READONLY (decl)
392 || ((TREE_CODE (decl) == RESULT_DECL
393 || TREE_CODE (decl) == PARM_DECL)
394 && DECL_BY_REFERENCE (decl)))
395 return false;
397 /* Disallow copy-in/out in nested parallel if
398 decl is shared in outer parallel, otherwise
399 each thread could store the shared variable
400 in its own copy-in location, making the
401 variable no longer really shared. */
402 if (shared_ctx->is_nested)
404 omp_context *up;
406 for (up = shared_ctx->outer; up; up = up->outer)
407 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
408 break;
410 if (up)
412 tree c;
414 for (c = gimple_omp_taskreg_clauses (up->stmt);
415 c; c = OMP_CLAUSE_CHAIN (c))
416 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
417 && OMP_CLAUSE_DECL (c) == decl)
418 break;
420 if (c)
421 goto maybe_mark_addressable_and_ret;
425 /* For tasks avoid using copy-in/out. As tasks can be
426 deferred or executed in different thread, when GOMP_task
427 returns, the task hasn't necessarily terminated. */
428 if (is_task_ctx (shared_ctx))
430 tree outer;
431 maybe_mark_addressable_and_ret:
432 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
433 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
435 /* Taking address of OUTER in lower_send_shared_vars
436 might need regimplification of everything that uses the
437 variable. */
438 if (!task_shared_vars)
439 task_shared_vars = BITMAP_ALLOC (NULL);
440 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
441 TREE_ADDRESSABLE (outer) = 1;
443 return true;
447 return false;
450 /* Construct a new automatic decl similar to VAR. */
452 static tree
453 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
455 tree copy = copy_var_decl (var, name, type);
457 DECL_CONTEXT (copy) = current_function_decl;
458 DECL_CHAIN (copy) = ctx->block_vars;
459 /* If VAR is listed in task_shared_vars, it means it wasn't
460 originally addressable and is just because task needs to take
461 it's address. But we don't need to take address of privatizations
462 from that var. */
463 if (TREE_ADDRESSABLE (var)
464 && task_shared_vars
465 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
466 TREE_ADDRESSABLE (copy) = 0;
467 ctx->block_vars = copy;
469 return copy;
472 static tree
473 omp_copy_decl_1 (tree var, omp_context *ctx)
475 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
478 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
479 as appropriate. */
480 static tree
481 omp_build_component_ref (tree obj, tree field)
483 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
484 if (TREE_THIS_VOLATILE (field))
485 TREE_THIS_VOLATILE (ret) |= 1;
486 if (TREE_READONLY (field))
487 TREE_READONLY (ret) |= 1;
488 return ret;
491 /* Build tree nodes to access the field for VAR on the receiver side. */
493 static tree
494 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
496 tree x, field = lookup_field (var, ctx);
498 /* If the receiver record type was remapped in the child function,
499 remap the field into the new record type. */
500 x = maybe_lookup_field (field, ctx);
501 if (x != NULL)
502 field = x;
504 x = build_simple_mem_ref (ctx->receiver_decl);
505 TREE_THIS_NOTRAP (x) = 1;
506 x = omp_build_component_ref (x, field);
507 if (by_ref)
509 x = build_simple_mem_ref (x);
510 TREE_THIS_NOTRAP (x) = 1;
513 return x;
516 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
517 of a parallel, this is a component reference; for workshare constructs
518 this is some variable. */
520 static tree
521 build_outer_var_ref (tree var, omp_context *ctx,
522 enum omp_clause_code code = OMP_CLAUSE_ERROR)
524 tree x;
526 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
527 x = var;
528 else if (is_variable_sized (var))
530 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
531 x = build_outer_var_ref (x, ctx, code);
532 x = build_simple_mem_ref (x);
534 else if (is_taskreg_ctx (ctx))
536 bool by_ref = use_pointer_for_field (var, NULL);
537 x = build_receiver_ref (var, by_ref, ctx);
539 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
540 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
541 || (code == OMP_CLAUSE_PRIVATE
542 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
543 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
544 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
546 /* #pragma omp simd isn't a worksharing construct, and can reference
547 even private vars in its linear etc. clauses.
548 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
549 to private vars in all worksharing constructs. */
550 x = NULL_TREE;
551 if (ctx->outer && is_taskreg_ctx (ctx))
552 x = lookup_decl (var, ctx->outer);
553 else if (ctx->outer)
554 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
555 if (x == NULL_TREE)
556 x = var;
558 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
560 gcc_assert (ctx->outer);
561 splay_tree_node n
562 = splay_tree_lookup (ctx->outer->field_map,
563 (splay_tree_key) &DECL_UID (var));
564 if (n == NULL)
566 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
567 x = var;
568 else
569 x = lookup_decl (var, ctx->outer);
571 else
573 tree field = (tree) n->value;
574 /* If the receiver record type was remapped in the child function,
575 remap the field into the new record type. */
576 x = maybe_lookup_field (field, ctx->outer);
577 if (x != NULL)
578 field = x;
580 x = build_simple_mem_ref (ctx->outer->receiver_decl);
581 x = omp_build_component_ref (x, field);
582 if (use_pointer_for_field (var, ctx->outer))
583 x = build_simple_mem_ref (x);
586 else if (ctx->outer)
588 omp_context *outer = ctx->outer;
589 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
591 outer = outer->outer;
592 gcc_assert (outer
593 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
595 x = lookup_decl (var, outer);
597 else if (omp_is_reference (var))
598 /* This can happen with orphaned constructs. If var is reference, it is
599 possible it is shared and as such valid. */
600 x = var;
601 else if (omp_member_access_dummy_var (var))
602 x = var;
603 else
604 gcc_unreachable ();
606 if (x == var)
608 tree t = omp_member_access_dummy_var (var);
609 if (t)
611 x = DECL_VALUE_EXPR (var);
612 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
613 if (o != t)
614 x = unshare_and_remap (x, t, o);
615 else
616 x = unshare_expr (x);
620 if (omp_is_reference (var))
621 x = build_simple_mem_ref (x);
623 return x;
626 /* Build tree nodes to access the field for VAR on the sender side. */
628 static tree
629 build_sender_ref (splay_tree_key key, omp_context *ctx)
631 tree field = lookup_sfield (key, ctx);
632 return omp_build_component_ref (ctx->sender_decl, field);
635 static tree
636 build_sender_ref (tree var, omp_context *ctx)
638 return build_sender_ref ((splay_tree_key) var, ctx);
641 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
642 BASE_POINTERS_RESTRICT, declare the field with restrict. */
644 static void
645 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
647 tree field, type, sfield = NULL_TREE;
648 splay_tree_key key = (splay_tree_key) var;
650 if ((mask & 8) != 0)
652 key = (splay_tree_key) &DECL_UID (var);
653 gcc_checking_assert (key != (splay_tree_key) var);
655 gcc_assert ((mask & 1) == 0
656 || !splay_tree_lookup (ctx->field_map, key));
657 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
658 || !splay_tree_lookup (ctx->sfield_map, key));
659 gcc_assert ((mask & 3) == 3
660 || !is_gimple_omp_oacc (ctx->stmt));
662 type = TREE_TYPE (var);
663 /* Prevent redeclaring the var in the split-off function with a restrict
664 pointer type. Note that we only clear type itself, restrict qualifiers in
665 the pointed-to type will be ignored by points-to analysis. */
666 if (POINTER_TYPE_P (type)
667 && TYPE_RESTRICT (type))
668 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
670 if (mask & 4)
672 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
673 type = build_pointer_type (build_pointer_type (type));
675 else if (by_ref)
676 type = build_pointer_type (type);
677 else if ((mask & 3) == 1 && omp_is_reference (var))
678 type = TREE_TYPE (type);
680 field = build_decl (DECL_SOURCE_LOCATION (var),
681 FIELD_DECL, DECL_NAME (var), type);
683 /* Remember what variable this field was created for. This does have a
684 side effect of making dwarf2out ignore this member, so for helpful
685 debugging we clear it later in delete_omp_context. */
686 DECL_ABSTRACT_ORIGIN (field) = var;
687 if (type == TREE_TYPE (var))
689 SET_DECL_ALIGN (field, DECL_ALIGN (var));
690 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
691 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
693 else
694 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
696 if ((mask & 3) == 3)
698 insert_field_into_struct (ctx->record_type, field);
699 if (ctx->srecord_type)
701 sfield = build_decl (DECL_SOURCE_LOCATION (var),
702 FIELD_DECL, DECL_NAME (var), type);
703 DECL_ABSTRACT_ORIGIN (sfield) = var;
704 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
705 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
706 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
707 insert_field_into_struct (ctx->srecord_type, sfield);
710 else
712 if (ctx->srecord_type == NULL_TREE)
714 tree t;
716 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
717 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
718 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
720 sfield = build_decl (DECL_SOURCE_LOCATION (t),
721 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
722 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
723 insert_field_into_struct (ctx->srecord_type, sfield);
724 splay_tree_insert (ctx->sfield_map,
725 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
726 (splay_tree_value) sfield);
729 sfield = field;
730 insert_field_into_struct ((mask & 1) ? ctx->record_type
731 : ctx->srecord_type, field);
734 if (mask & 1)
735 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
736 if ((mask & 2) && ctx->sfield_map)
737 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
740 static tree
741 install_var_local (tree var, omp_context *ctx)
743 tree new_var = omp_copy_decl_1 (var, ctx);
744 insert_decl_map (&ctx->cb, var, new_var);
745 return new_var;
748 /* Adjust the replacement for DECL in CTX for the new context. This means
749 copying the DECL_VALUE_EXPR, and fixing up the type. */
751 static void
752 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
754 tree new_decl, size;
756 new_decl = lookup_decl (decl, ctx);
758 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
760 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
761 && DECL_HAS_VALUE_EXPR_P (decl))
763 tree ve = DECL_VALUE_EXPR (decl);
764 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
765 SET_DECL_VALUE_EXPR (new_decl, ve);
766 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
769 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
771 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
772 if (size == error_mark_node)
773 size = TYPE_SIZE (TREE_TYPE (new_decl));
774 DECL_SIZE (new_decl) = size;
776 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
777 if (size == error_mark_node)
778 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
779 DECL_SIZE_UNIT (new_decl) = size;
783 /* The callback for remap_decl. Search all containing contexts for a
784 mapping of the variable; this avoids having to duplicate the splay
785 tree ahead of time. We know a mapping doesn't already exist in the
786 given context. Create new mappings to implement default semantics. */
788 static tree
789 omp_copy_decl (tree var, copy_body_data *cb)
791 omp_context *ctx = (omp_context *) cb;
792 tree new_var;
794 if (TREE_CODE (var) == LABEL_DECL)
796 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
797 return var;
798 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
799 DECL_CONTEXT (new_var) = current_function_decl;
800 insert_decl_map (&ctx->cb, var, new_var);
801 return new_var;
804 while (!is_taskreg_ctx (ctx))
806 ctx = ctx->outer;
807 if (ctx == NULL)
808 return var;
809 new_var = maybe_lookup_decl (var, ctx);
810 if (new_var)
811 return new_var;
814 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
815 return var;
817 return error_mark_node;
820 /* Create a new context, with OUTER_CTX being the surrounding context. */
822 static omp_context *
823 new_omp_context (gimple *stmt, omp_context *outer_ctx)
825 omp_context *ctx = XCNEW (omp_context);
827 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
828 (splay_tree_value) ctx);
829 ctx->stmt = stmt;
831 if (outer_ctx)
833 ctx->outer = outer_ctx;
834 ctx->cb = outer_ctx->cb;
835 ctx->cb.block = NULL;
836 ctx->depth = outer_ctx->depth + 1;
838 else
840 ctx->cb.src_fn = current_function_decl;
841 ctx->cb.dst_fn = current_function_decl;
842 ctx->cb.src_node = cgraph_node::get (current_function_decl);
843 gcc_checking_assert (ctx->cb.src_node);
844 ctx->cb.dst_node = ctx->cb.src_node;
845 ctx->cb.src_cfun = cfun;
846 ctx->cb.copy_decl = omp_copy_decl;
847 ctx->cb.eh_lp_nr = 0;
848 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
849 ctx->depth = 1;
852 ctx->cb.decl_map = new hash_map<tree, tree>;
854 return ctx;
857 static gimple_seq maybe_catch_exception (gimple_seq);
859 /* Finalize task copyfn. */
861 static void
862 finalize_task_copyfn (gomp_task *task_stmt)
864 struct function *child_cfun;
865 tree child_fn;
866 gimple_seq seq = NULL, new_seq;
867 gbind *bind;
869 child_fn = gimple_omp_task_copy_fn (task_stmt);
870 if (child_fn == NULL_TREE)
871 return;
873 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
874 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
876 push_cfun (child_cfun);
877 bind = gimplify_body (child_fn, false);
878 gimple_seq_add_stmt (&seq, bind);
879 new_seq = maybe_catch_exception (seq);
880 if (new_seq != seq)
882 bind = gimple_build_bind (NULL, new_seq, NULL);
883 seq = NULL;
884 gimple_seq_add_stmt (&seq, bind);
886 gimple_set_body (child_fn, seq);
887 pop_cfun ();
889 /* Inform the callgraph about the new function. */
890 cgraph_node *node = cgraph_node::get_create (child_fn);
891 node->parallelized_function = 1;
892 cgraph_node::add_new_function (child_fn, false);
895 /* Destroy a omp_context data structures. Called through the splay tree
896 value delete callback. */
898 static void
899 delete_omp_context (splay_tree_value value)
901 omp_context *ctx = (omp_context *) value;
903 delete ctx->cb.decl_map;
905 if (ctx->field_map)
906 splay_tree_delete (ctx->field_map);
907 if (ctx->sfield_map)
908 splay_tree_delete (ctx->sfield_map);
910 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
911 it produces corrupt debug information. */
912 if (ctx->record_type)
914 tree t;
915 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
916 DECL_ABSTRACT_ORIGIN (t) = NULL;
918 if (ctx->srecord_type)
920 tree t;
921 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
922 DECL_ABSTRACT_ORIGIN (t) = NULL;
925 if (is_task_ctx (ctx))
926 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
928 XDELETE (ctx);
931 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
932 context. */
934 static void
935 fixup_child_record_type (omp_context *ctx)
937 tree f, type = ctx->record_type;
939 if (!ctx->receiver_decl)
940 return;
941 /* ??? It isn't sufficient to just call remap_type here, because
942 variably_modified_type_p doesn't work the way we expect for
943 record types. Testing each field for whether it needs remapping
944 and creating a new record by hand works, however. */
945 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
946 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
947 break;
948 if (f)
950 tree name, new_fields = NULL;
952 type = lang_hooks.types.make_type (RECORD_TYPE);
953 name = DECL_NAME (TYPE_NAME (ctx->record_type));
954 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
955 TYPE_DECL, name, type);
956 TYPE_NAME (type) = name;
958 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
960 tree new_f = copy_node (f);
961 DECL_CONTEXT (new_f) = type;
962 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
963 DECL_CHAIN (new_f) = new_fields;
964 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
965 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
966 &ctx->cb, NULL);
967 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
968 &ctx->cb, NULL);
969 new_fields = new_f;
971 /* Arrange to be able to look up the receiver field
972 given the sender field. */
973 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
974 (splay_tree_value) new_f);
976 TYPE_FIELDS (type) = nreverse (new_fields);
977 layout_type (type);
980 /* In a target region we never modify any of the pointers in *.omp_data_i,
981 so attempt to help the optimizers. */
982 if (is_gimple_omp_offloaded (ctx->stmt))
983 type = build_qualified_type (type, TYPE_QUAL_CONST);
985 TREE_TYPE (ctx->receiver_decl)
986 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
989 /* Instantiate decls as necessary in CTX to satisfy the data sharing
990 specified by CLAUSES. */
992 static void
993 scan_sharing_clauses (tree clauses, omp_context *ctx)
995 tree c, decl;
996 bool scan_array_reductions = false;
998 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1000 bool by_ref;
1002 switch (OMP_CLAUSE_CODE (c))
1004 case OMP_CLAUSE_PRIVATE:
1005 decl = OMP_CLAUSE_DECL (c);
1006 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1007 goto do_private;
1008 else if (!is_variable_sized (decl))
1009 install_var_local (decl, ctx);
1010 break;
1012 case OMP_CLAUSE_SHARED:
1013 decl = OMP_CLAUSE_DECL (c);
1014 /* Ignore shared directives in teams construct. */
1015 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1017 /* Global variables don't need to be copied,
1018 the receiver side will use them directly. */
1019 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1020 if (is_global_var (odecl))
1021 break;
1022 insert_decl_map (&ctx->cb, decl, odecl);
1023 break;
1025 gcc_assert (is_taskreg_ctx (ctx));
1026 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1027 || !is_variable_sized (decl));
1028 /* Global variables don't need to be copied,
1029 the receiver side will use them directly. */
1030 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1031 break;
1032 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1034 use_pointer_for_field (decl, ctx);
1035 break;
1037 by_ref = use_pointer_for_field (decl, NULL);
1038 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1039 || TREE_ADDRESSABLE (decl)
1040 || by_ref
1041 || omp_is_reference (decl))
1043 by_ref = use_pointer_for_field (decl, ctx);
1044 install_var_field (decl, by_ref, 3, ctx);
1045 install_var_local (decl, ctx);
1046 break;
1048 /* We don't need to copy const scalar vars back. */
1049 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1050 goto do_private;
1052 case OMP_CLAUSE_REDUCTION:
1053 decl = OMP_CLAUSE_DECL (c);
1054 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1055 && TREE_CODE (decl) == MEM_REF)
1057 tree t = TREE_OPERAND (decl, 0);
1058 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1059 t = TREE_OPERAND (t, 0);
1060 if (TREE_CODE (t) == INDIRECT_REF
1061 || TREE_CODE (t) == ADDR_EXPR)
1062 t = TREE_OPERAND (t, 0);
1063 install_var_local (t, ctx);
1064 if (is_taskreg_ctx (ctx)
1065 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1066 && !is_variable_sized (t))
1068 by_ref = use_pointer_for_field (t, ctx);
1069 install_var_field (t, by_ref, 3, ctx);
1071 break;
1073 goto do_private;
1075 case OMP_CLAUSE_LASTPRIVATE:
1076 /* Let the corresponding firstprivate clause create
1077 the variable. */
1078 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1079 break;
1080 /* FALLTHRU */
1082 case OMP_CLAUSE_FIRSTPRIVATE:
1083 case OMP_CLAUSE_LINEAR:
1084 decl = OMP_CLAUSE_DECL (c);
1085 do_private:
1086 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1087 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1088 && is_gimple_omp_offloaded (ctx->stmt))
1090 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1091 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1092 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1093 install_var_field (decl, true, 3, ctx);
1094 else
1095 install_var_field (decl, false, 3, ctx);
1097 if (is_variable_sized (decl))
1099 if (is_task_ctx (ctx))
1100 install_var_field (decl, false, 1, ctx);
1101 break;
1103 else if (is_taskreg_ctx (ctx))
1105 bool global
1106 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1107 by_ref = use_pointer_for_field (decl, NULL);
1109 if (is_task_ctx (ctx)
1110 && (global || by_ref || omp_is_reference (decl)))
1112 install_var_field (decl, false, 1, ctx);
1113 if (!global)
1114 install_var_field (decl, by_ref, 2, ctx);
1116 else if (!global)
1117 install_var_field (decl, by_ref, 3, ctx);
1119 install_var_local (decl, ctx);
1120 break;
1122 case OMP_CLAUSE_USE_DEVICE_PTR:
1123 decl = OMP_CLAUSE_DECL (c);
1124 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1125 install_var_field (decl, true, 3, ctx);
1126 else
1127 install_var_field (decl, false, 3, ctx);
1128 if (DECL_SIZE (decl)
1129 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1131 tree decl2 = DECL_VALUE_EXPR (decl);
1132 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1133 decl2 = TREE_OPERAND (decl2, 0);
1134 gcc_assert (DECL_P (decl2));
1135 install_var_local (decl2, ctx);
1137 install_var_local (decl, ctx);
1138 break;
1140 case OMP_CLAUSE_IS_DEVICE_PTR:
1141 decl = OMP_CLAUSE_DECL (c);
1142 goto do_private;
1144 case OMP_CLAUSE__LOOPTEMP_:
1145 gcc_assert (is_taskreg_ctx (ctx));
1146 decl = OMP_CLAUSE_DECL (c);
1147 install_var_field (decl, false, 3, ctx);
1148 install_var_local (decl, ctx);
1149 break;
1151 case OMP_CLAUSE_COPYPRIVATE:
1152 case OMP_CLAUSE_COPYIN:
1153 decl = OMP_CLAUSE_DECL (c);
1154 by_ref = use_pointer_for_field (decl, NULL);
1155 install_var_field (decl, by_ref, 3, ctx);
1156 break;
1158 case OMP_CLAUSE_FINAL:
1159 case OMP_CLAUSE_IF:
1160 case OMP_CLAUSE_NUM_THREADS:
1161 case OMP_CLAUSE_NUM_TEAMS:
1162 case OMP_CLAUSE_THREAD_LIMIT:
1163 case OMP_CLAUSE_DEVICE:
1164 case OMP_CLAUSE_SCHEDULE:
1165 case OMP_CLAUSE_DIST_SCHEDULE:
1166 case OMP_CLAUSE_DEPEND:
1167 case OMP_CLAUSE_PRIORITY:
1168 case OMP_CLAUSE_GRAINSIZE:
1169 case OMP_CLAUSE_NUM_TASKS:
1170 case OMP_CLAUSE_NUM_GANGS:
1171 case OMP_CLAUSE_NUM_WORKERS:
1172 case OMP_CLAUSE_VECTOR_LENGTH:
1173 if (ctx->outer)
1174 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1175 break;
1177 case OMP_CLAUSE_TO:
1178 case OMP_CLAUSE_FROM:
1179 case OMP_CLAUSE_MAP:
1180 if (ctx->outer)
1181 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1182 decl = OMP_CLAUSE_DECL (c);
1183 /* Global variables with "omp declare target" attribute
1184 don't need to be copied, the receiver side will use them
1185 directly. However, global variables with "omp declare target link"
1186 attribute need to be copied. */
1187 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1188 && DECL_P (decl)
1189 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1190 && (OMP_CLAUSE_MAP_KIND (c)
1191 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1192 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1193 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1194 && varpool_node::get_create (decl)->offloadable
1195 && !lookup_attribute ("omp declare target link",
1196 DECL_ATTRIBUTES (decl)))
1197 break;
1198 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1199 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1201 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1202 not offloaded; there is nothing to map for those. */
1203 if (!is_gimple_omp_offloaded (ctx->stmt)
1204 && !POINTER_TYPE_P (TREE_TYPE (decl))
1205 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1206 break;
1208 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1209 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1210 || (OMP_CLAUSE_MAP_KIND (c)
1211 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1213 if (TREE_CODE (decl) == COMPONENT_REF
1214 || (TREE_CODE (decl) == INDIRECT_REF
1215 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1216 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1217 == REFERENCE_TYPE)))
1218 break;
1219 if (DECL_SIZE (decl)
1220 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1222 tree decl2 = DECL_VALUE_EXPR (decl);
1223 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1224 decl2 = TREE_OPERAND (decl2, 0);
1225 gcc_assert (DECL_P (decl2));
1226 install_var_local (decl2, ctx);
1228 install_var_local (decl, ctx);
1229 break;
1231 if (DECL_P (decl))
1233 if (DECL_SIZE (decl)
1234 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1236 tree decl2 = DECL_VALUE_EXPR (decl);
1237 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1238 decl2 = TREE_OPERAND (decl2, 0);
1239 gcc_assert (DECL_P (decl2));
1240 install_var_field (decl2, true, 3, ctx);
1241 install_var_local (decl2, ctx);
1242 install_var_local (decl, ctx);
1244 else
1246 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1247 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1248 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1249 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1250 install_var_field (decl, true, 7, ctx);
1251 else
1252 install_var_field (decl, true, 3, ctx);
1253 if (is_gimple_omp_offloaded (ctx->stmt)
1254 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1255 install_var_local (decl, ctx);
1258 else
1260 tree base = get_base_address (decl);
1261 tree nc = OMP_CLAUSE_CHAIN (c);
1262 if (DECL_P (base)
1263 && nc != NULL_TREE
1264 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1265 && OMP_CLAUSE_DECL (nc) == base
1266 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1267 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1269 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1270 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1272 else
1274 if (ctx->outer)
1276 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1277 decl = OMP_CLAUSE_DECL (c);
1279 gcc_assert (!splay_tree_lookup (ctx->field_map,
1280 (splay_tree_key) decl));
1281 tree field
1282 = build_decl (OMP_CLAUSE_LOCATION (c),
1283 FIELD_DECL, NULL_TREE, ptr_type_node);
1284 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1285 insert_field_into_struct (ctx->record_type, field);
1286 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1287 (splay_tree_value) field);
1290 break;
1292 case OMP_CLAUSE__GRIDDIM_:
1293 if (ctx->outer)
1295 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1296 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1298 break;
1300 case OMP_CLAUSE_NOWAIT:
1301 case OMP_CLAUSE_ORDERED:
1302 case OMP_CLAUSE_COLLAPSE:
1303 case OMP_CLAUSE_UNTIED:
1304 case OMP_CLAUSE_MERGEABLE:
1305 case OMP_CLAUSE_PROC_BIND:
1306 case OMP_CLAUSE_SAFELEN:
1307 case OMP_CLAUSE_SIMDLEN:
1308 case OMP_CLAUSE_THREADS:
1309 case OMP_CLAUSE_SIMD:
1310 case OMP_CLAUSE_NOGROUP:
1311 case OMP_CLAUSE_DEFAULTMAP:
1312 case OMP_CLAUSE_ASYNC:
1313 case OMP_CLAUSE_WAIT:
1314 case OMP_CLAUSE_GANG:
1315 case OMP_CLAUSE_WORKER:
1316 case OMP_CLAUSE_VECTOR:
1317 case OMP_CLAUSE_INDEPENDENT:
1318 case OMP_CLAUSE_AUTO:
1319 case OMP_CLAUSE_SEQ:
1320 case OMP_CLAUSE_TILE:
1321 case OMP_CLAUSE__SIMT_:
1322 case OMP_CLAUSE_DEFAULT:
1323 case OMP_CLAUSE_IF_PRESENT:
1324 case OMP_CLAUSE_FINALIZE:
1325 break;
1327 case OMP_CLAUSE_ALIGNED:
1328 decl = OMP_CLAUSE_DECL (c);
1329 if (is_global_var (decl)
1330 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1331 install_var_local (decl, ctx);
1332 break;
1334 case OMP_CLAUSE__CACHE_:
1335 default:
1336 gcc_unreachable ();
1340 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1342 switch (OMP_CLAUSE_CODE (c))
1344 case OMP_CLAUSE_LASTPRIVATE:
1345 /* Let the corresponding firstprivate clause create
1346 the variable. */
1347 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1348 scan_array_reductions = true;
1349 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1350 break;
1351 /* FALLTHRU */
1353 case OMP_CLAUSE_FIRSTPRIVATE:
1354 case OMP_CLAUSE_PRIVATE:
1355 case OMP_CLAUSE_LINEAR:
1356 case OMP_CLAUSE_IS_DEVICE_PTR:
1357 decl = OMP_CLAUSE_DECL (c);
1358 if (is_variable_sized (decl))
1360 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1361 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1362 && is_gimple_omp_offloaded (ctx->stmt))
1364 tree decl2 = DECL_VALUE_EXPR (decl);
1365 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1366 decl2 = TREE_OPERAND (decl2, 0);
1367 gcc_assert (DECL_P (decl2));
1368 install_var_local (decl2, ctx);
1369 fixup_remapped_decl (decl2, ctx, false);
1371 install_var_local (decl, ctx);
1373 fixup_remapped_decl (decl, ctx,
1374 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1375 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1376 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1377 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1378 scan_array_reductions = true;
1379 break;
1381 case OMP_CLAUSE_REDUCTION:
1382 decl = OMP_CLAUSE_DECL (c);
1383 if (TREE_CODE (decl) != MEM_REF)
1385 if (is_variable_sized (decl))
1386 install_var_local (decl, ctx);
1387 fixup_remapped_decl (decl, ctx, false);
1389 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1390 scan_array_reductions = true;
1391 break;
1393 case OMP_CLAUSE_SHARED:
1394 /* Ignore shared directives in teams construct. */
1395 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1396 break;
1397 decl = OMP_CLAUSE_DECL (c);
1398 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1399 break;
1400 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1402 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1403 ctx->outer)))
1404 break;
1405 bool by_ref = use_pointer_for_field (decl, ctx);
1406 install_var_field (decl, by_ref, 11, ctx);
1407 break;
1409 fixup_remapped_decl (decl, ctx, false);
1410 break;
1412 case OMP_CLAUSE_MAP:
1413 if (!is_gimple_omp_offloaded (ctx->stmt))
1414 break;
1415 decl = OMP_CLAUSE_DECL (c);
1416 if (DECL_P (decl)
1417 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1418 && (OMP_CLAUSE_MAP_KIND (c)
1419 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1420 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1421 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1422 && varpool_node::get_create (decl)->offloadable)
1423 break;
1424 if (DECL_P (decl))
1426 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1427 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1428 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1429 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1431 tree new_decl = lookup_decl (decl, ctx);
1432 TREE_TYPE (new_decl)
1433 = remap_type (TREE_TYPE (decl), &ctx->cb);
1435 else if (DECL_SIZE (decl)
1436 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1438 tree decl2 = DECL_VALUE_EXPR (decl);
1439 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1440 decl2 = TREE_OPERAND (decl2, 0);
1441 gcc_assert (DECL_P (decl2));
1442 fixup_remapped_decl (decl2, ctx, false);
1443 fixup_remapped_decl (decl, ctx, true);
1445 else
1446 fixup_remapped_decl (decl, ctx, false);
1448 break;
1450 case OMP_CLAUSE_COPYPRIVATE:
1451 case OMP_CLAUSE_COPYIN:
1452 case OMP_CLAUSE_DEFAULT:
1453 case OMP_CLAUSE_IF:
1454 case OMP_CLAUSE_NUM_THREADS:
1455 case OMP_CLAUSE_NUM_TEAMS:
1456 case OMP_CLAUSE_THREAD_LIMIT:
1457 case OMP_CLAUSE_DEVICE:
1458 case OMP_CLAUSE_SCHEDULE:
1459 case OMP_CLAUSE_DIST_SCHEDULE:
1460 case OMP_CLAUSE_NOWAIT:
1461 case OMP_CLAUSE_ORDERED:
1462 case OMP_CLAUSE_COLLAPSE:
1463 case OMP_CLAUSE_UNTIED:
1464 case OMP_CLAUSE_FINAL:
1465 case OMP_CLAUSE_MERGEABLE:
1466 case OMP_CLAUSE_PROC_BIND:
1467 case OMP_CLAUSE_SAFELEN:
1468 case OMP_CLAUSE_SIMDLEN:
1469 case OMP_CLAUSE_ALIGNED:
1470 case OMP_CLAUSE_DEPEND:
1471 case OMP_CLAUSE__LOOPTEMP_:
1472 case OMP_CLAUSE_TO:
1473 case OMP_CLAUSE_FROM:
1474 case OMP_CLAUSE_PRIORITY:
1475 case OMP_CLAUSE_GRAINSIZE:
1476 case OMP_CLAUSE_NUM_TASKS:
1477 case OMP_CLAUSE_THREADS:
1478 case OMP_CLAUSE_SIMD:
1479 case OMP_CLAUSE_NOGROUP:
1480 case OMP_CLAUSE_DEFAULTMAP:
1481 case OMP_CLAUSE_USE_DEVICE_PTR:
1482 case OMP_CLAUSE_ASYNC:
1483 case OMP_CLAUSE_WAIT:
1484 case OMP_CLAUSE_NUM_GANGS:
1485 case OMP_CLAUSE_NUM_WORKERS:
1486 case OMP_CLAUSE_VECTOR_LENGTH:
1487 case OMP_CLAUSE_GANG:
1488 case OMP_CLAUSE_WORKER:
1489 case OMP_CLAUSE_VECTOR:
1490 case OMP_CLAUSE_INDEPENDENT:
1491 case OMP_CLAUSE_AUTO:
1492 case OMP_CLAUSE_SEQ:
1493 case OMP_CLAUSE_TILE:
1494 case OMP_CLAUSE__GRIDDIM_:
1495 case OMP_CLAUSE__SIMT_:
1496 case OMP_CLAUSE_IF_PRESENT:
1497 case OMP_CLAUSE_FINALIZE:
1498 break;
1500 case OMP_CLAUSE__CACHE_:
1501 default:
1502 gcc_unreachable ();
1506 gcc_checking_assert (!scan_array_reductions
1507 || !is_gimple_omp_oacc (ctx->stmt));
1508 if (scan_array_reductions)
1510 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1511 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1512 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1514 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1515 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1517 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1518 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1519 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1520 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1521 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1522 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1526 /* Create a new name for omp child function. Returns an identifier. */
1528 static tree
1529 create_omp_child_function_name (bool task_copy)
1531 return clone_function_name (current_function_decl,
1532 task_copy ? "_omp_cpyfn" : "_omp_fn");
1535 /* Return true if CTX may belong to offloaded code: either if current function
1536 is offloaded, or any enclosing context corresponds to a target region. */
1538 static bool
1539 omp_maybe_offloaded_ctx (omp_context *ctx)
1541 if (cgraph_node::get (current_function_decl)->offloadable)
1542 return true;
1543 for (; ctx; ctx = ctx->outer)
1544 if (is_gimple_omp_offloaded (ctx->stmt))
1545 return true;
1546 return false;
1549 /* Build a decl for the omp child function. It'll not contain a body
1550 yet, just the bare decl. */
1552 static void
1553 create_omp_child_function (omp_context *ctx, bool task_copy)
1555 tree decl, type, name, t;
1557 name = create_omp_child_function_name (task_copy);
1558 if (task_copy)
1559 type = build_function_type_list (void_type_node, ptr_type_node,
1560 ptr_type_node, NULL_TREE);
1561 else
1562 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1564 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1566 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1567 || !task_copy);
1568 if (!task_copy)
1569 ctx->cb.dst_fn = decl;
1570 else
1571 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1573 TREE_STATIC (decl) = 1;
1574 TREE_USED (decl) = 1;
1575 DECL_ARTIFICIAL (decl) = 1;
1576 DECL_IGNORED_P (decl) = 0;
1577 TREE_PUBLIC (decl) = 0;
1578 DECL_UNINLINABLE (decl) = 1;
1579 DECL_EXTERNAL (decl) = 0;
1580 DECL_CONTEXT (decl) = NULL_TREE;
1581 DECL_INITIAL (decl) = make_node (BLOCK);
1582 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1583 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1584 /* Remove omp declare simd attribute from the new attributes. */
1585 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1587 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1588 a = a2;
1589 a = TREE_CHAIN (a);
1590 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1591 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1592 *p = TREE_CHAIN (*p);
1593 else
1595 tree chain = TREE_CHAIN (*p);
1596 *p = copy_node (*p);
1597 p = &TREE_CHAIN (*p);
1598 *p = chain;
1601 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1602 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1603 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1604 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1605 DECL_FUNCTION_VERSIONED (decl)
1606 = DECL_FUNCTION_VERSIONED (current_function_decl);
1608 if (omp_maybe_offloaded_ctx (ctx))
1610 cgraph_node::get_create (decl)->offloadable = 1;
1611 if (ENABLE_OFFLOADING)
1612 g->have_offload = true;
1615 if (cgraph_node::get_create (decl)->offloadable
1616 && !lookup_attribute ("omp declare target",
1617 DECL_ATTRIBUTES (current_function_decl)))
1619 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1620 ? "omp target entrypoint"
1621 : "omp declare target");
1622 DECL_ATTRIBUTES (decl)
1623 = tree_cons (get_identifier (target_attr),
1624 NULL_TREE, DECL_ATTRIBUTES (decl));
1627 t = build_decl (DECL_SOURCE_LOCATION (decl),
1628 RESULT_DECL, NULL_TREE, void_type_node);
1629 DECL_ARTIFICIAL (t) = 1;
1630 DECL_IGNORED_P (t) = 1;
1631 DECL_CONTEXT (t) = decl;
1632 DECL_RESULT (decl) = t;
1634 tree data_name = get_identifier (".omp_data_i");
1635 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1636 ptr_type_node);
1637 DECL_ARTIFICIAL (t) = 1;
1638 DECL_NAMELESS (t) = 1;
1639 DECL_ARG_TYPE (t) = ptr_type_node;
1640 DECL_CONTEXT (t) = current_function_decl;
1641 TREE_USED (t) = 1;
1642 TREE_READONLY (t) = 1;
1643 DECL_ARGUMENTS (decl) = t;
1644 if (!task_copy)
1645 ctx->receiver_decl = t;
1646 else
1648 t = build_decl (DECL_SOURCE_LOCATION (decl),
1649 PARM_DECL, get_identifier (".omp_data_o"),
1650 ptr_type_node);
1651 DECL_ARTIFICIAL (t) = 1;
1652 DECL_NAMELESS (t) = 1;
1653 DECL_ARG_TYPE (t) = ptr_type_node;
1654 DECL_CONTEXT (t) = current_function_decl;
1655 TREE_USED (t) = 1;
1656 TREE_ADDRESSABLE (t) = 1;
1657 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1658 DECL_ARGUMENTS (decl) = t;
1661 /* Allocate memory for the function structure. The call to
1662 allocate_struct_function clobbers CFUN, so we need to restore
1663 it afterward. */
1664 push_struct_function (decl);
1665 cfun->function_end_locus = gimple_location (ctx->stmt);
1666 init_tree_ssa (cfun);
1667 pop_cfun ();
1670 /* Callback for walk_gimple_seq. Check if combined parallel
1671 contains gimple_omp_for_combined_into_p OMP_FOR. */
1673 tree
1674 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1675 bool *handled_ops_p,
1676 struct walk_stmt_info *wi)
1678 gimple *stmt = gsi_stmt (*gsi_p);
1680 *handled_ops_p = true;
1681 switch (gimple_code (stmt))
1683 WALK_SUBSTMTS;
1685 case GIMPLE_OMP_FOR:
1686 if (gimple_omp_for_combined_into_p (stmt)
1687 && gimple_omp_for_kind (stmt)
1688 == *(const enum gf_mask *) (wi->info))
1690 wi->info = stmt;
1691 return integer_zero_node;
1693 break;
1694 default:
1695 break;
1697 return NULL;
1700 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1702 static void
1703 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1704 omp_context *outer_ctx)
1706 struct walk_stmt_info wi;
1708 memset (&wi, 0, sizeof (wi));
1709 wi.val_only = true;
1710 wi.info = (void *) &msk;
1711 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1712 if (wi.info != (void *) &msk)
1714 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1715 struct omp_for_data fd;
1716 omp_extract_for_data (for_stmt, &fd, NULL);
1717 /* We need two temporaries with fd.loop.v type (istart/iend)
1718 and then (fd.collapse - 1) temporaries with the same
1719 type for count2 ... countN-1 vars if not constant. */
1720 size_t count = 2, i;
1721 tree type = fd.iter_type;
1722 if (fd.collapse > 1
1723 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1725 count += fd.collapse - 1;
1726 /* If there are lastprivate clauses on the inner
1727 GIMPLE_OMP_FOR, add one more temporaries for the total number
1728 of iterations (product of count1 ... countN-1). */
1729 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1730 OMP_CLAUSE_LASTPRIVATE))
1731 count++;
1732 else if (msk == GF_OMP_FOR_KIND_FOR
1733 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1734 OMP_CLAUSE_LASTPRIVATE))
1735 count++;
1737 for (i = 0; i < count; i++)
1739 tree temp = create_tmp_var (type);
1740 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1741 insert_decl_map (&outer_ctx->cb, temp, temp);
1742 OMP_CLAUSE_DECL (c) = temp;
1743 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1744 gimple_omp_taskreg_set_clauses (stmt, c);
1749 /* Scan an OpenMP parallel directive. */
1751 static void
1752 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1754 omp_context *ctx;
1755 tree name;
1756 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1758 /* Ignore parallel directives with empty bodies, unless there
1759 are copyin clauses. */
1760 if (optimize > 0
1761 && empty_body_p (gimple_omp_body (stmt))
1762 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1763 OMP_CLAUSE_COPYIN) == NULL)
1765 gsi_replace (gsi, gimple_build_nop (), false);
1766 return;
1769 if (gimple_omp_parallel_combined_p (stmt))
1770 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1772 ctx = new_omp_context (stmt, outer_ctx);
1773 taskreg_contexts.safe_push (ctx);
1774 if (taskreg_nesting_level > 1)
1775 ctx->is_nested = true;
1776 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1777 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1778 name = create_tmp_var_name (".omp_data_s");
1779 name = build_decl (gimple_location (stmt),
1780 TYPE_DECL, name, ctx->record_type);
1781 DECL_ARTIFICIAL (name) = 1;
1782 DECL_NAMELESS (name) = 1;
1783 TYPE_NAME (ctx->record_type) = name;
1784 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1785 if (!gimple_omp_parallel_grid_phony (stmt))
1787 create_omp_child_function (ctx, false);
1788 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1791 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1792 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1794 if (TYPE_FIELDS (ctx->record_type) == NULL)
1795 ctx->record_type = ctx->receiver_decl = NULL;
1798 /* Scan an OpenMP task directive. */
1800 static void
1801 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1803 omp_context *ctx;
1804 tree name, t;
1805 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1807 /* Ignore task directives with empty bodies, unless they have depend
1808 clause. */
1809 if (optimize > 0
1810 && empty_body_p (gimple_omp_body (stmt))
1811 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1813 gsi_replace (gsi, gimple_build_nop (), false);
1814 return;
1817 if (gimple_omp_task_taskloop_p (stmt))
1818 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1820 ctx = new_omp_context (stmt, outer_ctx);
1821 taskreg_contexts.safe_push (ctx);
1822 if (taskreg_nesting_level > 1)
1823 ctx->is_nested = true;
1824 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1825 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1826 name = create_tmp_var_name (".omp_data_s");
1827 name = build_decl (gimple_location (stmt),
1828 TYPE_DECL, name, ctx->record_type);
1829 DECL_ARTIFICIAL (name) = 1;
1830 DECL_NAMELESS (name) = 1;
1831 TYPE_NAME (ctx->record_type) = name;
1832 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1833 create_omp_child_function (ctx, false);
1834 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1836 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1838 if (ctx->srecord_type)
1840 name = create_tmp_var_name (".omp_data_a");
1841 name = build_decl (gimple_location (stmt),
1842 TYPE_DECL, name, ctx->srecord_type);
1843 DECL_ARTIFICIAL (name) = 1;
1844 DECL_NAMELESS (name) = 1;
1845 TYPE_NAME (ctx->srecord_type) = name;
1846 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1847 create_omp_child_function (ctx, true);
1850 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1852 if (TYPE_FIELDS (ctx->record_type) == NULL)
1854 ctx->record_type = ctx->receiver_decl = NULL;
1855 t = build_int_cst (long_integer_type_node, 0);
1856 gimple_omp_task_set_arg_size (stmt, t);
1857 t = build_int_cst (long_integer_type_node, 1);
1858 gimple_omp_task_set_arg_align (stmt, t);
1862 /* Helper function for finish_taskreg_scan, called through walk_tree.
1863 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1864 tree, replace it in the expression. */
1866 static tree
1867 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1869 if (VAR_P (*tp))
1871 omp_context *ctx = (omp_context *) data;
1872 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1873 if (t != *tp)
1875 if (DECL_HAS_VALUE_EXPR_P (t))
1876 t = unshare_expr (DECL_VALUE_EXPR (t));
1877 *tp = t;
1879 *walk_subtrees = 0;
1881 else if (IS_TYPE_OR_DECL_P (*tp))
1882 *walk_subtrees = 0;
1883 return NULL_TREE;
1886 /* If any decls have been made addressable during scan_omp,
1887 adjust their fields if needed, and layout record types
1888 of parallel/task constructs. */
1890 static void
1891 finish_taskreg_scan (omp_context *ctx)
1893 if (ctx->record_type == NULL_TREE)
1894 return;
1896 /* If any task_shared_vars were needed, verify all
1897 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1898 statements if use_pointer_for_field hasn't changed
1899 because of that. If it did, update field types now. */
1900 if (task_shared_vars)
1902 tree c;
1904 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1905 c; c = OMP_CLAUSE_CHAIN (c))
1906 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1907 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1909 tree decl = OMP_CLAUSE_DECL (c);
1911 /* Global variables don't need to be copied,
1912 the receiver side will use them directly. */
1913 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1914 continue;
1915 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1916 || !use_pointer_for_field (decl, ctx))
1917 continue;
1918 tree field = lookup_field (decl, ctx);
1919 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1920 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1921 continue;
1922 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1923 TREE_THIS_VOLATILE (field) = 0;
1924 DECL_USER_ALIGN (field) = 0;
1925 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1926 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1927 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1928 if (ctx->srecord_type)
1930 tree sfield = lookup_sfield (decl, ctx);
1931 TREE_TYPE (sfield) = TREE_TYPE (field);
1932 TREE_THIS_VOLATILE (sfield) = 0;
1933 DECL_USER_ALIGN (sfield) = 0;
1934 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1935 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1936 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1941 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1943 layout_type (ctx->record_type);
1944 fixup_child_record_type (ctx);
1946 else
1948 location_t loc = gimple_location (ctx->stmt);
1949 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1950 /* Move VLA fields to the end. */
1951 p = &TYPE_FIELDS (ctx->record_type);
1952 while (*p)
1953 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1954 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1956 *q = *p;
1957 *p = TREE_CHAIN (*p);
1958 TREE_CHAIN (*q) = NULL_TREE;
1959 q = &TREE_CHAIN (*q);
1961 else
1962 p = &DECL_CHAIN (*p);
1963 *p = vla_fields;
1964 if (gimple_omp_task_taskloop_p (ctx->stmt))
1966 /* Move fields corresponding to first and second _looptemp_
1967 clause first. There are filled by GOMP_taskloop
1968 and thus need to be in specific positions. */
1969 tree c1 = gimple_omp_task_clauses (ctx->stmt);
1970 c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
1971 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
1972 OMP_CLAUSE__LOOPTEMP_);
1973 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
1974 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
1975 p = &TYPE_FIELDS (ctx->record_type);
1976 while (*p)
1977 if (*p == f1 || *p == f2)
1978 *p = DECL_CHAIN (*p);
1979 else
1980 p = &DECL_CHAIN (*p);
1981 DECL_CHAIN (f1) = f2;
1982 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
1983 TYPE_FIELDS (ctx->record_type) = f1;
1984 if (ctx->srecord_type)
1986 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
1987 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
1988 p = &TYPE_FIELDS (ctx->srecord_type);
1989 while (*p)
1990 if (*p == f1 || *p == f2)
1991 *p = DECL_CHAIN (*p);
1992 else
1993 p = &DECL_CHAIN (*p);
1994 DECL_CHAIN (f1) = f2;
1995 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
1996 TYPE_FIELDS (ctx->srecord_type) = f1;
1999 layout_type (ctx->record_type);
2000 fixup_child_record_type (ctx);
2001 if (ctx->srecord_type)
2002 layout_type (ctx->srecord_type);
2003 tree t = fold_convert_loc (loc, long_integer_type_node,
2004 TYPE_SIZE_UNIT (ctx->record_type));
2005 if (TREE_CODE (t) != INTEGER_CST)
2007 t = unshare_expr (t);
2008 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2010 gimple_omp_task_set_arg_size (ctx->stmt, t);
2011 t = build_int_cst (long_integer_type_node,
2012 TYPE_ALIGN_UNIT (ctx->record_type));
2013 gimple_omp_task_set_arg_align (ctx->stmt, t);
2017 /* Find the enclosing offload context. */
2019 static omp_context *
2020 enclosing_target_ctx (omp_context *ctx)
2022 for (; ctx; ctx = ctx->outer)
2023 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2024 break;
2026 return ctx;
2029 /* Return true if ctx is part of an oacc kernels region. */
2031 static bool
2032 ctx_in_oacc_kernels_region (omp_context *ctx)
2034 for (;ctx != NULL; ctx = ctx->outer)
2036 gimple *stmt = ctx->stmt;
2037 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2038 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2039 return true;
2042 return false;
2045 /* Check the parallelism clauses inside a kernels regions.
2046 Until kernels handling moves to use the same loop indirection
2047 scheme as parallel, we need to do this checking early. */
2049 static unsigned
2050 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2052 bool checking = true;
2053 unsigned outer_mask = 0;
2054 unsigned this_mask = 0;
2055 bool has_seq = false, has_auto = false;
2057 if (ctx->outer)
2058 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2059 if (!stmt)
2061 checking = false;
2062 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2063 return outer_mask;
2064 stmt = as_a <gomp_for *> (ctx->stmt);
2067 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2069 switch (OMP_CLAUSE_CODE (c))
2071 case OMP_CLAUSE_GANG:
2072 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2073 break;
2074 case OMP_CLAUSE_WORKER:
2075 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2076 break;
2077 case OMP_CLAUSE_VECTOR:
2078 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2079 break;
2080 case OMP_CLAUSE_SEQ:
2081 has_seq = true;
2082 break;
2083 case OMP_CLAUSE_AUTO:
2084 has_auto = true;
2085 break;
2086 default:
2087 break;
2091 if (checking)
2093 if (has_seq && (this_mask || has_auto))
2094 error_at (gimple_location (stmt), "%<seq%> overrides other"
2095 " OpenACC loop specifiers");
2096 else if (has_auto && this_mask)
2097 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2098 " OpenACC loop specifiers");
2100 if (this_mask & outer_mask)
2101 error_at (gimple_location (stmt), "inner loop uses same"
2102 " OpenACC parallelism as containing loop");
2105 return outer_mask | this_mask;
2108 /* Scan a GIMPLE_OMP_FOR. */
2110 static omp_context *
2111 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2113 omp_context *ctx;
2114 size_t i;
2115 tree clauses = gimple_omp_for_clauses (stmt);
2117 ctx = new_omp_context (stmt, outer_ctx);
2119 if (is_gimple_omp_oacc (stmt))
2121 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2123 if (!tgt || is_oacc_parallel (tgt))
2124 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2126 char const *check = NULL;
2128 switch (OMP_CLAUSE_CODE (c))
2130 case OMP_CLAUSE_GANG:
2131 check = "gang";
2132 break;
2134 case OMP_CLAUSE_WORKER:
2135 check = "worker";
2136 break;
2138 case OMP_CLAUSE_VECTOR:
2139 check = "vector";
2140 break;
2142 default:
2143 break;
2146 if (check && OMP_CLAUSE_OPERAND (c, 0))
2147 error_at (gimple_location (stmt),
2148 "argument not permitted on %qs clause in"
2149 " OpenACC %<parallel%>", check);
2152 if (tgt && is_oacc_kernels (tgt))
2154 /* Strip out reductions, as they are not handled yet. */
2155 tree *prev_ptr = &clauses;
2157 while (tree probe = *prev_ptr)
2159 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2161 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2162 *prev_ptr = *next_ptr;
2163 else
2164 prev_ptr = next_ptr;
2167 gimple_omp_for_set_clauses (stmt, clauses);
2168 check_oacc_kernel_gwv (stmt, ctx);
2172 scan_sharing_clauses (clauses, ctx);
2174 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2175 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2177 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2178 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2179 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2180 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2182 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2183 return ctx;
2186 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2188 static void
2189 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2190 omp_context *outer_ctx)
2192 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2193 gsi_replace (gsi, bind, false);
2194 gimple_seq seq = NULL;
2195 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2196 tree cond = create_tmp_var_raw (integer_type_node);
2197 DECL_CONTEXT (cond) = current_function_decl;
2198 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2199 gimple_bind_set_vars (bind, cond);
2200 gimple_call_set_lhs (g, cond);
2201 gimple_seq_add_stmt (&seq, g);
2202 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2203 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2204 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2205 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2206 gimple_seq_add_stmt (&seq, g);
2207 g = gimple_build_label (lab1);
2208 gimple_seq_add_stmt (&seq, g);
2209 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2210 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2211 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2212 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2213 gimple_omp_for_set_clauses (new_stmt, clause);
2214 gimple_seq_add_stmt (&seq, new_stmt);
2215 g = gimple_build_goto (lab3);
2216 gimple_seq_add_stmt (&seq, g);
2217 g = gimple_build_label (lab2);
2218 gimple_seq_add_stmt (&seq, g);
2219 gimple_seq_add_stmt (&seq, stmt);
2220 g = gimple_build_label (lab3);
2221 gimple_seq_add_stmt (&seq, g);
2222 gimple_bind_set_body (bind, seq);
2223 update_stmt (bind);
2224 scan_omp_for (new_stmt, outer_ctx);
2225 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2228 /* Scan an OpenMP sections directive. */
2230 static void
2231 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2233 omp_context *ctx;
2235 ctx = new_omp_context (stmt, outer_ctx);
2236 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2237 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2240 /* Scan an OpenMP single directive. */
2242 static void
2243 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2245 omp_context *ctx;
2246 tree name;
2248 ctx = new_omp_context (stmt, outer_ctx);
2249 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2250 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2251 name = create_tmp_var_name (".omp_copy_s");
2252 name = build_decl (gimple_location (stmt),
2253 TYPE_DECL, name, ctx->record_type);
2254 TYPE_NAME (ctx->record_type) = name;
2256 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2257 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2259 if (TYPE_FIELDS (ctx->record_type) == NULL)
2260 ctx->record_type = NULL;
2261 else
2262 layout_type (ctx->record_type);
2265 /* Scan a GIMPLE_OMP_TARGET. */
2267 static void
2268 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2270 omp_context *ctx;
2271 tree name;
2272 bool offloaded = is_gimple_omp_offloaded (stmt);
2273 tree clauses = gimple_omp_target_clauses (stmt);
2275 ctx = new_omp_context (stmt, outer_ctx);
2276 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2277 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2278 name = create_tmp_var_name (".omp_data_t");
2279 name = build_decl (gimple_location (stmt),
2280 TYPE_DECL, name, ctx->record_type);
2281 DECL_ARTIFICIAL (name) = 1;
2282 DECL_NAMELESS (name) = 1;
2283 TYPE_NAME (ctx->record_type) = name;
2284 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2286 if (offloaded)
2288 create_omp_child_function (ctx, false);
2289 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2292 scan_sharing_clauses (clauses, ctx);
2293 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2295 if (TYPE_FIELDS (ctx->record_type) == NULL)
2296 ctx->record_type = ctx->receiver_decl = NULL;
2297 else
2299 TYPE_FIELDS (ctx->record_type)
2300 = nreverse (TYPE_FIELDS (ctx->record_type));
2301 if (flag_checking)
2303 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2304 for (tree field = TYPE_FIELDS (ctx->record_type);
2305 field;
2306 field = DECL_CHAIN (field))
2307 gcc_assert (DECL_ALIGN (field) == align);
2309 layout_type (ctx->record_type);
2310 if (offloaded)
2311 fixup_child_record_type (ctx);
2315 /* Scan an OpenMP teams directive. */
2317 static void
2318 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2320 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2321 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2322 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2325 /* Check nesting restrictions. */
2326 static bool
2327 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2329 tree c;
2331 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2332 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2333 the original copy of its contents. */
2334 return true;
2336 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2337 inside an OpenACC CTX. */
2338 if (!(is_gimple_omp (stmt)
2339 && is_gimple_omp_oacc (stmt))
2340 /* Except for atomic codes that we share with OpenMP. */
2341 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2342 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2344 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2346 error_at (gimple_location (stmt),
2347 "non-OpenACC construct inside of OpenACC routine");
2348 return false;
2350 else
2351 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2352 if (is_gimple_omp (octx->stmt)
2353 && is_gimple_omp_oacc (octx->stmt))
2355 error_at (gimple_location (stmt),
2356 "non-OpenACC construct inside of OpenACC region");
2357 return false;
2361 if (ctx != NULL)
2363 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2364 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2366 c = NULL_TREE;
2367 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2369 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2370 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2372 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2373 && (ctx->outer == NULL
2374 || !gimple_omp_for_combined_into_p (ctx->stmt)
2375 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2376 || (gimple_omp_for_kind (ctx->outer->stmt)
2377 != GF_OMP_FOR_KIND_FOR)
2378 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2380 error_at (gimple_location (stmt),
2381 "%<ordered simd threads%> must be closely "
2382 "nested inside of %<for simd%> region");
2383 return false;
2385 return true;
2388 error_at (gimple_location (stmt),
2389 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2390 " may not be nested inside %<simd%> region");
2391 return false;
2393 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2395 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2396 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2397 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2398 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2400 error_at (gimple_location (stmt),
2401 "only %<distribute%> or %<parallel%> regions are "
2402 "allowed to be strictly nested inside %<teams%> "
2403 "region");
2404 return false;
2408 switch (gimple_code (stmt))
2410 case GIMPLE_OMP_FOR:
2411 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2412 return true;
2413 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2415 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2417 error_at (gimple_location (stmt),
2418 "%<distribute%> region must be strictly nested "
2419 "inside %<teams%> construct");
2420 return false;
2422 return true;
2424 /* We split taskloop into task and nested taskloop in it. */
2425 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2426 return true;
2427 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2429 bool ok = false;
2431 if (ctx)
2432 switch (gimple_code (ctx->stmt))
2434 case GIMPLE_OMP_FOR:
2435 ok = (gimple_omp_for_kind (ctx->stmt)
2436 == GF_OMP_FOR_KIND_OACC_LOOP);
2437 break;
2439 case GIMPLE_OMP_TARGET:
2440 switch (gimple_omp_target_kind (ctx->stmt))
2442 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2443 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2444 ok = true;
2445 break;
2447 default:
2448 break;
2451 default:
2452 break;
2454 else if (oacc_get_fn_attrib (current_function_decl))
2455 ok = true;
2456 if (!ok)
2458 error_at (gimple_location (stmt),
2459 "OpenACC loop directive must be associated with"
2460 " an OpenACC compute region");
2461 return false;
2464 /* FALLTHRU */
2465 case GIMPLE_CALL:
2466 if (is_gimple_call (stmt)
2467 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2468 == BUILT_IN_GOMP_CANCEL
2469 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2470 == BUILT_IN_GOMP_CANCELLATION_POINT))
2472 const char *bad = NULL;
2473 const char *kind = NULL;
2474 const char *construct
2475 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2476 == BUILT_IN_GOMP_CANCEL)
2477 ? "#pragma omp cancel"
2478 : "#pragma omp cancellation point";
2479 if (ctx == NULL)
2481 error_at (gimple_location (stmt), "orphaned %qs construct",
2482 construct);
2483 return false;
2485 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2486 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2487 : 0)
2489 case 1:
2490 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2491 bad = "#pragma omp parallel";
2492 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2493 == BUILT_IN_GOMP_CANCEL
2494 && !integer_zerop (gimple_call_arg (stmt, 1)))
2495 ctx->cancellable = true;
2496 kind = "parallel";
2497 break;
2498 case 2:
2499 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2500 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2501 bad = "#pragma omp for";
2502 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2503 == BUILT_IN_GOMP_CANCEL
2504 && !integer_zerop (gimple_call_arg (stmt, 1)))
2506 ctx->cancellable = true;
2507 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2508 OMP_CLAUSE_NOWAIT))
2509 warning_at (gimple_location (stmt), 0,
2510 "%<#pragma omp cancel for%> inside "
2511 "%<nowait%> for construct");
2512 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2513 OMP_CLAUSE_ORDERED))
2514 warning_at (gimple_location (stmt), 0,
2515 "%<#pragma omp cancel for%> inside "
2516 "%<ordered%> for construct");
2518 kind = "for";
2519 break;
2520 case 4:
2521 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2522 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2523 bad = "#pragma omp sections";
2524 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2525 == BUILT_IN_GOMP_CANCEL
2526 && !integer_zerop (gimple_call_arg (stmt, 1)))
2528 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2530 ctx->cancellable = true;
2531 if (omp_find_clause (gimple_omp_sections_clauses
2532 (ctx->stmt),
2533 OMP_CLAUSE_NOWAIT))
2534 warning_at (gimple_location (stmt), 0,
2535 "%<#pragma omp cancel sections%> inside "
2536 "%<nowait%> sections construct");
2538 else
2540 gcc_assert (ctx->outer
2541 && gimple_code (ctx->outer->stmt)
2542 == GIMPLE_OMP_SECTIONS);
2543 ctx->outer->cancellable = true;
2544 if (omp_find_clause (gimple_omp_sections_clauses
2545 (ctx->outer->stmt),
2546 OMP_CLAUSE_NOWAIT))
2547 warning_at (gimple_location (stmt), 0,
2548 "%<#pragma omp cancel sections%> inside "
2549 "%<nowait%> sections construct");
2552 kind = "sections";
2553 break;
2554 case 8:
2555 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2556 bad = "#pragma omp task";
2557 else
2559 for (omp_context *octx = ctx->outer;
2560 octx; octx = octx->outer)
2562 switch (gimple_code (octx->stmt))
2564 case GIMPLE_OMP_TASKGROUP:
2565 break;
2566 case GIMPLE_OMP_TARGET:
2567 if (gimple_omp_target_kind (octx->stmt)
2568 != GF_OMP_TARGET_KIND_REGION)
2569 continue;
2570 /* FALLTHRU */
2571 case GIMPLE_OMP_PARALLEL:
2572 case GIMPLE_OMP_TEAMS:
2573 error_at (gimple_location (stmt),
2574 "%<%s taskgroup%> construct not closely "
2575 "nested inside of %<taskgroup%> region",
2576 construct);
2577 return false;
2578 default:
2579 continue;
2581 break;
2583 ctx->cancellable = true;
2585 kind = "taskgroup";
2586 break;
2587 default:
2588 error_at (gimple_location (stmt), "invalid arguments");
2589 return false;
2591 if (bad)
2593 error_at (gimple_location (stmt),
2594 "%<%s %s%> construct not closely nested inside of %qs",
2595 construct, kind, bad);
2596 return false;
2599 /* FALLTHRU */
2600 case GIMPLE_OMP_SECTIONS:
2601 case GIMPLE_OMP_SINGLE:
2602 for (; ctx != NULL; ctx = ctx->outer)
2603 switch (gimple_code (ctx->stmt))
2605 case GIMPLE_OMP_FOR:
2606 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2607 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2608 break;
2609 /* FALLTHRU */
2610 case GIMPLE_OMP_SECTIONS:
2611 case GIMPLE_OMP_SINGLE:
2612 case GIMPLE_OMP_ORDERED:
2613 case GIMPLE_OMP_MASTER:
2614 case GIMPLE_OMP_TASK:
2615 case GIMPLE_OMP_CRITICAL:
2616 if (is_gimple_call (stmt))
2618 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2619 != BUILT_IN_GOMP_BARRIER)
2620 return true;
2621 error_at (gimple_location (stmt),
2622 "barrier region may not be closely nested inside "
2623 "of work-sharing, %<critical%>, %<ordered%>, "
2624 "%<master%>, explicit %<task%> or %<taskloop%> "
2625 "region");
2626 return false;
2628 error_at (gimple_location (stmt),
2629 "work-sharing region may not be closely nested inside "
2630 "of work-sharing, %<critical%>, %<ordered%>, "
2631 "%<master%>, explicit %<task%> or %<taskloop%> region");
2632 return false;
2633 case GIMPLE_OMP_PARALLEL:
2634 case GIMPLE_OMP_TEAMS:
2635 return true;
2636 case GIMPLE_OMP_TARGET:
2637 if (gimple_omp_target_kind (ctx->stmt)
2638 == GF_OMP_TARGET_KIND_REGION)
2639 return true;
2640 break;
2641 default:
2642 break;
2644 break;
2645 case GIMPLE_OMP_MASTER:
2646 for (; ctx != NULL; ctx = ctx->outer)
2647 switch (gimple_code (ctx->stmt))
2649 case GIMPLE_OMP_FOR:
2650 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2651 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2652 break;
2653 /* FALLTHRU */
2654 case GIMPLE_OMP_SECTIONS:
2655 case GIMPLE_OMP_SINGLE:
2656 case GIMPLE_OMP_TASK:
2657 error_at (gimple_location (stmt),
2658 "%<master%> region may not be closely nested inside "
2659 "of work-sharing, explicit %<task%> or %<taskloop%> "
2660 "region");
2661 return false;
2662 case GIMPLE_OMP_PARALLEL:
2663 case GIMPLE_OMP_TEAMS:
2664 return true;
2665 case GIMPLE_OMP_TARGET:
2666 if (gimple_omp_target_kind (ctx->stmt)
2667 == GF_OMP_TARGET_KIND_REGION)
2668 return true;
2669 break;
2670 default:
2671 break;
2673 break;
2674 case GIMPLE_OMP_TASK:
2675 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2676 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2677 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2678 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2680 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2681 error_at (OMP_CLAUSE_LOCATION (c),
2682 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2683 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2684 return false;
2686 break;
2687 case GIMPLE_OMP_ORDERED:
2688 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2689 c; c = OMP_CLAUSE_CHAIN (c))
2691 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2693 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2694 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2695 continue;
2697 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2698 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2699 || kind == OMP_CLAUSE_DEPEND_SINK)
2701 tree oclause;
2702 /* Look for containing ordered(N) loop. */
2703 if (ctx == NULL
2704 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2705 || (oclause
2706 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2707 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2709 error_at (OMP_CLAUSE_LOCATION (c),
2710 "%<ordered%> construct with %<depend%> clause "
2711 "must be closely nested inside an %<ordered%> "
2712 "loop");
2713 return false;
2715 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2717 error_at (OMP_CLAUSE_LOCATION (c),
2718 "%<ordered%> construct with %<depend%> clause "
2719 "must be closely nested inside a loop with "
2720 "%<ordered%> clause with a parameter");
2721 return false;
2724 else
2726 error_at (OMP_CLAUSE_LOCATION (c),
2727 "invalid depend kind in omp %<ordered%> %<depend%>");
2728 return false;
2731 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2732 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2734 /* ordered simd must be closely nested inside of simd region,
2735 and simd region must not encounter constructs other than
2736 ordered simd, therefore ordered simd may be either orphaned,
2737 or ctx->stmt must be simd. The latter case is handled already
2738 earlier. */
2739 if (ctx != NULL)
2741 error_at (gimple_location (stmt),
2742 "%<ordered%> %<simd%> must be closely nested inside "
2743 "%<simd%> region");
2744 return false;
2747 for (; ctx != NULL; ctx = ctx->outer)
2748 switch (gimple_code (ctx->stmt))
2750 case GIMPLE_OMP_CRITICAL:
2751 case GIMPLE_OMP_TASK:
2752 case GIMPLE_OMP_ORDERED:
2753 ordered_in_taskloop:
2754 error_at (gimple_location (stmt),
2755 "%<ordered%> region may not be closely nested inside "
2756 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2757 "%<taskloop%> region");
2758 return false;
2759 case GIMPLE_OMP_FOR:
2760 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2761 goto ordered_in_taskloop;
2762 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2763 OMP_CLAUSE_ORDERED) == NULL)
2765 error_at (gimple_location (stmt),
2766 "%<ordered%> region must be closely nested inside "
2767 "a loop region with an %<ordered%> clause");
2768 return false;
2770 return true;
2771 case GIMPLE_OMP_TARGET:
2772 if (gimple_omp_target_kind (ctx->stmt)
2773 != GF_OMP_TARGET_KIND_REGION)
2774 break;
2775 /* FALLTHRU */
2776 case GIMPLE_OMP_PARALLEL:
2777 case GIMPLE_OMP_TEAMS:
2778 error_at (gimple_location (stmt),
2779 "%<ordered%> region must be closely nested inside "
2780 "a loop region with an %<ordered%> clause");
2781 return false;
2782 default:
2783 break;
2785 break;
2786 case GIMPLE_OMP_CRITICAL:
2788 tree this_stmt_name
2789 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2790 for (; ctx != NULL; ctx = ctx->outer)
2791 if (gomp_critical *other_crit
2792 = dyn_cast <gomp_critical *> (ctx->stmt))
2793 if (this_stmt_name == gimple_omp_critical_name (other_crit))
2795 error_at (gimple_location (stmt),
2796 "%<critical%> region may not be nested inside "
2797 "a %<critical%> region with the same name");
2798 return false;
2801 break;
2802 case GIMPLE_OMP_TEAMS:
2803 if (ctx == NULL
2804 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2805 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2807 error_at (gimple_location (stmt),
2808 "%<teams%> construct not closely nested inside of "
2809 "%<target%> construct");
2810 return false;
2812 break;
2813 case GIMPLE_OMP_TARGET:
2814 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2815 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2816 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2817 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2819 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2820 error_at (OMP_CLAUSE_LOCATION (c),
2821 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2822 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2823 return false;
2825 if (is_gimple_omp_offloaded (stmt)
2826 && oacc_get_fn_attrib (cfun->decl) != NULL)
2828 error_at (gimple_location (stmt),
2829 "OpenACC region inside of OpenACC routine, nested "
2830 "parallelism not supported yet");
2831 return false;
2833 for (; ctx != NULL; ctx = ctx->outer)
2835 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2837 if (is_gimple_omp (stmt)
2838 && is_gimple_omp_oacc (stmt)
2839 && is_gimple_omp (ctx->stmt))
2841 error_at (gimple_location (stmt),
2842 "OpenACC construct inside of non-OpenACC region");
2843 return false;
2845 continue;
2848 const char *stmt_name, *ctx_stmt_name;
2849 switch (gimple_omp_target_kind (stmt))
2851 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2852 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2853 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2854 case GF_OMP_TARGET_KIND_ENTER_DATA:
2855 stmt_name = "target enter data"; break;
2856 case GF_OMP_TARGET_KIND_EXIT_DATA:
2857 stmt_name = "target exit data"; break;
2858 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2859 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2860 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2861 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
2862 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2863 stmt_name = "enter/exit data"; break;
2864 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2865 break;
2866 default: gcc_unreachable ();
2868 switch (gimple_omp_target_kind (ctx->stmt))
2870 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2871 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
2872 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2873 ctx_stmt_name = "parallel"; break;
2874 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2875 ctx_stmt_name = "kernels"; break;
2876 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
2877 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2878 ctx_stmt_name = "host_data"; break;
2879 default: gcc_unreachable ();
2882 /* OpenACC/OpenMP mismatch? */
2883 if (is_gimple_omp_oacc (stmt)
2884 != is_gimple_omp_oacc (ctx->stmt))
2886 error_at (gimple_location (stmt),
2887 "%s %qs construct inside of %s %qs region",
2888 (is_gimple_omp_oacc (stmt)
2889 ? "OpenACC" : "OpenMP"), stmt_name,
2890 (is_gimple_omp_oacc (ctx->stmt)
2891 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
2892 return false;
2894 if (is_gimple_omp_offloaded (ctx->stmt))
2896 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
2897 if (is_gimple_omp_oacc (ctx->stmt))
2899 error_at (gimple_location (stmt),
2900 "%qs construct inside of %qs region",
2901 stmt_name, ctx_stmt_name);
2902 return false;
2904 else
2906 warning_at (gimple_location (stmt), 0,
2907 "%qs construct inside of %qs region",
2908 stmt_name, ctx_stmt_name);
2912 break;
2913 default:
2914 break;
2916 return true;
2920 /* Helper function scan_omp.
2922 Callback for walk_tree or operators in walk_gimple_stmt used to
2923 scan for OMP directives in TP. */
2925 static tree
2926 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
2928 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2929 omp_context *ctx = (omp_context *) wi->info;
2930 tree t = *tp;
2932 switch (TREE_CODE (t))
2934 case VAR_DECL:
2935 case PARM_DECL:
2936 case LABEL_DECL:
2937 case RESULT_DECL:
2938 if (ctx)
2940 tree repl = remap_decl (t, &ctx->cb);
2941 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
2942 *tp = repl;
2944 break;
2946 default:
2947 if (ctx && TYPE_P (t))
2948 *tp = remap_type (t, &ctx->cb);
2949 else if (!DECL_P (t))
2951 *walk_subtrees = 1;
2952 if (ctx)
2954 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
2955 if (tem != TREE_TYPE (t))
2957 if (TREE_CODE (t) == INTEGER_CST)
2958 *tp = wide_int_to_tree (tem, wi::to_wide (t));
2959 else
2960 TREE_TYPE (t) = tem;
2964 break;
2967 return NULL_TREE;
2970 /* Return true if FNDECL is a setjmp or a longjmp. */
2972 static bool
2973 setjmp_or_longjmp_p (const_tree fndecl)
2975 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2976 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
2977 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
2978 return true;
2980 tree declname = DECL_NAME (fndecl);
2981 if (!declname)
2982 return false;
2983 const char *name = IDENTIFIER_POINTER (declname);
2984 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
2988 /* Helper function for scan_omp.
2990 Callback for walk_gimple_stmt used to scan for OMP directives in
2991 the current statement in GSI. */
2993 static tree
2994 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2995 struct walk_stmt_info *wi)
2997 gimple *stmt = gsi_stmt (*gsi);
2998 omp_context *ctx = (omp_context *) wi->info;
3000 if (gimple_has_location (stmt))
3001 input_location = gimple_location (stmt);
3003 /* Check the nesting restrictions. */
3004 bool remove = false;
3005 if (is_gimple_omp (stmt))
3006 remove = !check_omp_nesting_restrictions (stmt, ctx);
3007 else if (is_gimple_call (stmt))
3009 tree fndecl = gimple_call_fndecl (stmt);
3010 if (fndecl)
3012 if (setjmp_or_longjmp_p (fndecl)
3013 && ctx
3014 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3015 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3017 remove = true;
3018 error_at (gimple_location (stmt),
3019 "setjmp/longjmp inside simd construct");
3021 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3022 switch (DECL_FUNCTION_CODE (fndecl))
3024 case BUILT_IN_GOMP_BARRIER:
3025 case BUILT_IN_GOMP_CANCEL:
3026 case BUILT_IN_GOMP_CANCELLATION_POINT:
3027 case BUILT_IN_GOMP_TASKYIELD:
3028 case BUILT_IN_GOMP_TASKWAIT:
3029 case BUILT_IN_GOMP_TASKGROUP_START:
3030 case BUILT_IN_GOMP_TASKGROUP_END:
3031 remove = !check_omp_nesting_restrictions (stmt, ctx);
3032 break;
3033 default:
3034 break;
3038 if (remove)
3040 stmt = gimple_build_nop ();
3041 gsi_replace (gsi, stmt, false);
3044 *handled_ops_p = true;
3046 switch (gimple_code (stmt))
3048 case GIMPLE_OMP_PARALLEL:
3049 taskreg_nesting_level++;
3050 scan_omp_parallel (gsi, ctx);
3051 taskreg_nesting_level--;
3052 break;
3054 case GIMPLE_OMP_TASK:
3055 taskreg_nesting_level++;
3056 scan_omp_task (gsi, ctx);
3057 taskreg_nesting_level--;
3058 break;
3060 case GIMPLE_OMP_FOR:
3061 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3062 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3063 && omp_maybe_offloaded_ctx (ctx)
3064 && omp_max_simt_vf ())
3065 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3066 else
3067 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3068 break;
3070 case GIMPLE_OMP_SECTIONS:
3071 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3072 break;
3074 case GIMPLE_OMP_SINGLE:
3075 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3076 break;
3078 case GIMPLE_OMP_SECTION:
3079 case GIMPLE_OMP_MASTER:
3080 case GIMPLE_OMP_TASKGROUP:
3081 case GIMPLE_OMP_ORDERED:
3082 case GIMPLE_OMP_CRITICAL:
3083 case GIMPLE_OMP_GRID_BODY:
3084 ctx = new_omp_context (stmt, ctx);
3085 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3086 break;
3088 case GIMPLE_OMP_TARGET:
3089 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3090 break;
3092 case GIMPLE_OMP_TEAMS:
3093 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3094 break;
3096 case GIMPLE_BIND:
3098 tree var;
3100 *handled_ops_p = false;
3101 if (ctx)
3102 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3103 var ;
3104 var = DECL_CHAIN (var))
3105 insert_decl_map (&ctx->cb, var, var);
3107 break;
3108 default:
3109 *handled_ops_p = false;
3110 break;
3113 return NULL_TREE;
3117 /* Scan all the statements starting at the current statement. CTX
3118 contains context information about the OMP directives and
3119 clauses found during the scan. */
3121 static void
3122 scan_omp (gimple_seq *body_p, omp_context *ctx)
3124 location_t saved_location;
3125 struct walk_stmt_info wi;
3127 memset (&wi, 0, sizeof (wi));
3128 wi.info = ctx;
3129 wi.want_locations = true;
3131 saved_location = input_location;
3132 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3133 input_location = saved_location;
3136 /* Re-gimplification and code generation routines. */
3138 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3139 of BIND if in a method. */
3141 static void
3142 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3144 if (DECL_ARGUMENTS (current_function_decl)
3145 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3146 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3147 == POINTER_TYPE))
3149 tree vars = gimple_bind_vars (bind);
3150 for (tree *pvar = &vars; *pvar; )
3151 if (omp_member_access_dummy_var (*pvar))
3152 *pvar = DECL_CHAIN (*pvar);
3153 else
3154 pvar = &DECL_CHAIN (*pvar);
3155 gimple_bind_set_vars (bind, vars);
3159 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3160 block and its subblocks. */
3162 static void
3163 remove_member_access_dummy_vars (tree block)
3165 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3166 if (omp_member_access_dummy_var (*pvar))
3167 *pvar = DECL_CHAIN (*pvar);
3168 else
3169 pvar = &DECL_CHAIN (*pvar);
3171 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3172 remove_member_access_dummy_vars (block);
3175 /* If a context was created for STMT when it was scanned, return it. */
3177 static omp_context *
3178 maybe_lookup_ctx (gimple *stmt)
3180 splay_tree_node n;
3181 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3182 return n ? (omp_context *) n->value : NULL;
3186 /* Find the mapping for DECL in CTX or the immediately enclosing
3187 context that has a mapping for DECL.
3189 If CTX is a nested parallel directive, we may have to use the decl
3190 mappings created in CTX's parent context. Suppose that we have the
3191 following parallel nesting (variable UIDs showed for clarity):
3193 iD.1562 = 0;
3194 #omp parallel shared(iD.1562) -> outer parallel
3195 iD.1562 = iD.1562 + 1;
3197 #omp parallel shared (iD.1562) -> inner parallel
3198 iD.1562 = iD.1562 - 1;
3200 Each parallel structure will create a distinct .omp_data_s structure
3201 for copying iD.1562 in/out of the directive:
3203 outer parallel .omp_data_s.1.i -> iD.1562
3204 inner parallel .omp_data_s.2.i -> iD.1562
3206 A shared variable mapping will produce a copy-out operation before
3207 the parallel directive and a copy-in operation after it. So, in
3208 this case we would have:
3210 iD.1562 = 0;
3211 .omp_data_o.1.i = iD.1562;
3212 #omp parallel shared(iD.1562) -> outer parallel
3213 .omp_data_i.1 = &.omp_data_o.1
3214 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3216 .omp_data_o.2.i = iD.1562; -> **
3217 #omp parallel shared(iD.1562) -> inner parallel
3218 .omp_data_i.2 = &.omp_data_o.2
3219 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3222 ** This is a problem. The symbol iD.1562 cannot be referenced
3223 inside the body of the outer parallel region. But since we are
3224 emitting this copy operation while expanding the inner parallel
3225 directive, we need to access the CTX structure of the outer
3226 parallel directive to get the correct mapping:
3228 .omp_data_o.2.i = .omp_data_i.1->i
3230 Since there may be other workshare or parallel directives enclosing
3231 the parallel directive, it may be necessary to walk up the context
3232 parent chain. This is not a problem in general because nested
3233 parallelism happens only rarely. */
3235 static tree
3236 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3238 tree t;
3239 omp_context *up;
3241 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3242 t = maybe_lookup_decl (decl, up);
3244 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3246 return t ? t : decl;
3250 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3251 in outer contexts. */
3253 static tree
3254 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3256 tree t = NULL;
3257 omp_context *up;
3259 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3260 t = maybe_lookup_decl (decl, up);
3262 return t ? t : decl;
3266 /* Construct the initialization value for reduction operation OP. */
3268 tree
3269 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3271 switch (op)
3273 case PLUS_EXPR:
3274 case MINUS_EXPR:
3275 case BIT_IOR_EXPR:
3276 case BIT_XOR_EXPR:
3277 case TRUTH_OR_EXPR:
3278 case TRUTH_ORIF_EXPR:
3279 case TRUTH_XOR_EXPR:
3280 case NE_EXPR:
3281 return build_zero_cst (type);
3283 case MULT_EXPR:
3284 case TRUTH_AND_EXPR:
3285 case TRUTH_ANDIF_EXPR:
3286 case EQ_EXPR:
3287 return fold_convert_loc (loc, type, integer_one_node);
3289 case BIT_AND_EXPR:
3290 return fold_convert_loc (loc, type, integer_minus_one_node);
3292 case MAX_EXPR:
3293 if (SCALAR_FLOAT_TYPE_P (type))
3295 REAL_VALUE_TYPE max, min;
3296 if (HONOR_INFINITIES (type))
3298 real_inf (&max);
3299 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3301 else
3302 real_maxval (&min, 1, TYPE_MODE (type));
3303 return build_real (type, min);
3305 else if (POINTER_TYPE_P (type))
3307 wide_int min
3308 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3309 return wide_int_to_tree (type, min);
3311 else
3313 gcc_assert (INTEGRAL_TYPE_P (type));
3314 return TYPE_MIN_VALUE (type);
3317 case MIN_EXPR:
3318 if (SCALAR_FLOAT_TYPE_P (type))
3320 REAL_VALUE_TYPE max;
3321 if (HONOR_INFINITIES (type))
3322 real_inf (&max);
3323 else
3324 real_maxval (&max, 0, TYPE_MODE (type));
3325 return build_real (type, max);
3327 else if (POINTER_TYPE_P (type))
3329 wide_int max
3330 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3331 return wide_int_to_tree (type, max);
3333 else
3335 gcc_assert (INTEGRAL_TYPE_P (type));
3336 return TYPE_MAX_VALUE (type);
3339 default:
3340 gcc_unreachable ();
3344 /* Construct the initialization value for reduction CLAUSE. */
3346 tree
3347 omp_reduction_init (tree clause, tree type)
3349 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3350 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3353 /* Return alignment to be assumed for var in CLAUSE, which should be
3354 OMP_CLAUSE_ALIGNED. */
3356 static tree
3357 omp_clause_aligned_alignment (tree clause)
3359 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3360 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3362 /* Otherwise return implementation defined alignment. */
3363 unsigned int al = 1;
3364 opt_scalar_mode mode_iter;
3365 auto_vector_sizes sizes;
3366 targetm.vectorize.autovectorize_vector_sizes (&sizes);
3367 poly_uint64 vs = 0;
3368 for (unsigned int i = 0; i < sizes.length (); ++i)
3369 vs = ordered_max (vs, sizes[i]);
3370 static enum mode_class classes[]
3371 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3372 for (int i = 0; i < 4; i += 2)
3373 /* The for loop above dictates that we only walk through scalar classes. */
3374 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3376 scalar_mode mode = mode_iter.require ();
3377 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3378 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3379 continue;
3380 while (maybe_ne (vs, 0U)
3381 && known_lt (GET_MODE_SIZE (vmode), vs)
3382 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3383 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3385 tree type = lang_hooks.types.type_for_mode (mode, 1);
3386 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3387 continue;
3388 poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3389 GET_MODE_SIZE (mode));
3390 type = build_vector_type (type, nelts);
3391 if (TYPE_MODE (type) != vmode)
3392 continue;
3393 if (TYPE_ALIGN_UNIT (type) > al)
3394 al = TYPE_ALIGN_UNIT (type);
3396 return build_int_cst (integer_type_node, al);
3400 /* This structure is part of the interface between lower_rec_simd_input_clauses
3401 and lower_rec_input_clauses. */
3403 struct omplow_simd_context {
3404 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3405 tree idx;
3406 tree lane;
3407 vec<tree, va_heap> simt_eargs;
3408 gimple_seq simt_dlist;
3409 poly_uint64_pod max_vf;
3410 bool is_simt;
3413 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3414 privatization. */
3416 static bool
3417 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3418 omplow_simd_context *sctx, tree &ivar, tree &lvar)
3420 if (known_eq (sctx->max_vf, 0U))
3422 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3423 if (maybe_gt (sctx->max_vf, 1U))
3425 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3426 OMP_CLAUSE_SAFELEN);
3427 if (c)
3429 poly_uint64 safe_len;
3430 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3431 || maybe_lt (safe_len, 1U))
3432 sctx->max_vf = 1;
3433 else
3434 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3437 if (maybe_gt (sctx->max_vf, 1U))
3439 sctx->idx = create_tmp_var (unsigned_type_node);
3440 sctx->lane = create_tmp_var (unsigned_type_node);
3443 if (known_eq (sctx->max_vf, 1U))
3444 return false;
3446 if (sctx->is_simt)
3448 if (is_gimple_reg (new_var))
3450 ivar = lvar = new_var;
3451 return true;
3453 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3454 ivar = lvar = create_tmp_var (type);
3455 TREE_ADDRESSABLE (ivar) = 1;
3456 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3457 NULL, DECL_ATTRIBUTES (ivar));
3458 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3459 tree clobber = build_constructor (type, NULL);
3460 TREE_THIS_VOLATILE (clobber) = 1;
3461 gimple *g = gimple_build_assign (ivar, clobber);
3462 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3464 else
3466 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3467 tree avar = create_tmp_var_raw (atype);
3468 if (TREE_ADDRESSABLE (new_var))
3469 TREE_ADDRESSABLE (avar) = 1;
3470 DECL_ATTRIBUTES (avar)
3471 = tree_cons (get_identifier ("omp simd array"), NULL,
3472 DECL_ATTRIBUTES (avar));
3473 gimple_add_tmp_var (avar);
3474 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3475 NULL_TREE, NULL_TREE);
3476 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3477 NULL_TREE, NULL_TREE);
3479 if (DECL_P (new_var))
3481 SET_DECL_VALUE_EXPR (new_var, lvar);
3482 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3484 return true;
3487 /* Helper function of lower_rec_input_clauses. For a reference
3488 in simd reduction, add an underlying variable it will reference. */
3490 static void
3491 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3493 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3494 if (TREE_CONSTANT (z))
3496 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3497 get_name (new_vard));
3498 gimple_add_tmp_var (z);
3499 TREE_ADDRESSABLE (z) = 1;
3500 z = build_fold_addr_expr_loc (loc, z);
3501 gimplify_assign (new_vard, z, ilist);
3505 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3506 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3507 private variables. Initialization statements go in ILIST, while calls
3508 to destructors go in DLIST. */
3510 static void
3511 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3512 omp_context *ctx, struct omp_for_data *fd)
3514 tree c, dtor, copyin_seq, x, ptr;
3515 bool copyin_by_ref = false;
3516 bool lastprivate_firstprivate = false;
3517 bool reduction_omp_orig_ref = false;
3518 int pass;
3519 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3520 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3521 omplow_simd_context sctx = omplow_simd_context ();
3522 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3523 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3524 gimple_seq llist[3] = { };
3526 copyin_seq = NULL;
3527 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3529 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3530 with data sharing clauses referencing variable sized vars. That
3531 is unnecessarily hard to support and very unlikely to result in
3532 vectorized code anyway. */
3533 if (is_simd)
3534 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3535 switch (OMP_CLAUSE_CODE (c))
3537 case OMP_CLAUSE_LINEAR:
3538 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3539 sctx.max_vf = 1;
3540 /* FALLTHRU */
3541 case OMP_CLAUSE_PRIVATE:
3542 case OMP_CLAUSE_FIRSTPRIVATE:
3543 case OMP_CLAUSE_LASTPRIVATE:
3544 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3545 sctx.max_vf = 1;
3546 break;
3547 case OMP_CLAUSE_REDUCTION:
3548 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3549 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3550 sctx.max_vf = 1;
3551 break;
3552 default:
3553 continue;
3556 /* Add a placeholder for simduid. */
3557 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
3558 sctx.simt_eargs.safe_push (NULL_TREE);
3560 /* Do all the fixed sized types in the first pass, and the variable sized
3561 types in the second pass. This makes sure that the scalar arguments to
3562 the variable sized types are processed before we use them in the
3563 variable sized operations. */
3564 for (pass = 0; pass < 2; ++pass)
3566 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3568 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3569 tree var, new_var;
3570 bool by_ref;
3571 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3573 switch (c_kind)
3575 case OMP_CLAUSE_PRIVATE:
3576 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3577 continue;
3578 break;
3579 case OMP_CLAUSE_SHARED:
3580 /* Ignore shared directives in teams construct. */
3581 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3582 continue;
3583 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3585 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3586 || is_global_var (OMP_CLAUSE_DECL (c)));
3587 continue;
3589 case OMP_CLAUSE_FIRSTPRIVATE:
3590 case OMP_CLAUSE_COPYIN:
3591 break;
3592 case OMP_CLAUSE_LINEAR:
3593 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3594 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3595 lastprivate_firstprivate = true;
3596 break;
3597 case OMP_CLAUSE_REDUCTION:
3598 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3599 reduction_omp_orig_ref = true;
3600 break;
3601 case OMP_CLAUSE__LOOPTEMP_:
3602 /* Handle _looptemp_ clauses only on parallel/task. */
3603 if (fd)
3604 continue;
3605 break;
3606 case OMP_CLAUSE_LASTPRIVATE:
3607 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3609 lastprivate_firstprivate = true;
3610 if (pass != 0 || is_taskloop_ctx (ctx))
3611 continue;
3613 /* Even without corresponding firstprivate, if
3614 decl is Fortran allocatable, it needs outer var
3615 reference. */
3616 else if (pass == 0
3617 && lang_hooks.decls.omp_private_outer_ref
3618 (OMP_CLAUSE_DECL (c)))
3619 lastprivate_firstprivate = true;
3620 break;
3621 case OMP_CLAUSE_ALIGNED:
3622 if (pass == 0)
3623 continue;
3624 var = OMP_CLAUSE_DECL (c);
3625 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3626 && !is_global_var (var))
3628 new_var = maybe_lookup_decl (var, ctx);
3629 if (new_var == NULL_TREE)
3630 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3631 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3632 tree alarg = omp_clause_aligned_alignment (c);
3633 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3634 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3635 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3636 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3637 gimplify_and_add (x, ilist);
3639 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3640 && is_global_var (var))
3642 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3643 new_var = lookup_decl (var, ctx);
3644 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3645 t = build_fold_addr_expr_loc (clause_loc, t);
3646 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3647 tree alarg = omp_clause_aligned_alignment (c);
3648 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3649 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3650 t = fold_convert_loc (clause_loc, ptype, t);
3651 x = create_tmp_var (ptype);
3652 t = build2 (MODIFY_EXPR, ptype, x, t);
3653 gimplify_and_add (t, ilist);
3654 t = build_simple_mem_ref_loc (clause_loc, x);
3655 SET_DECL_VALUE_EXPR (new_var, t);
3656 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3658 continue;
3659 default:
3660 continue;
3663 new_var = var = OMP_CLAUSE_DECL (c);
3664 if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3666 var = TREE_OPERAND (var, 0);
3667 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3668 var = TREE_OPERAND (var, 0);
3669 if (TREE_CODE (var) == INDIRECT_REF
3670 || TREE_CODE (var) == ADDR_EXPR)
3671 var = TREE_OPERAND (var, 0);
3672 if (is_variable_sized (var))
3674 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3675 var = DECL_VALUE_EXPR (var);
3676 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3677 var = TREE_OPERAND (var, 0);
3678 gcc_assert (DECL_P (var));
3680 new_var = var;
3682 if (c_kind != OMP_CLAUSE_COPYIN)
3683 new_var = lookup_decl (var, ctx);
3685 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3687 if (pass != 0)
3688 continue;
3690 /* C/C++ array section reductions. */
3691 else if (c_kind == OMP_CLAUSE_REDUCTION
3692 && var != OMP_CLAUSE_DECL (c))
3694 if (pass == 0)
3695 continue;
3697 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3698 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3699 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3701 tree b = TREE_OPERAND (orig_var, 1);
3702 b = maybe_lookup_decl (b, ctx);
3703 if (b == NULL)
3705 b = TREE_OPERAND (orig_var, 1);
3706 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3708 if (integer_zerop (bias))
3709 bias = b;
3710 else
3712 bias = fold_convert_loc (clause_loc,
3713 TREE_TYPE (b), bias);
3714 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3715 TREE_TYPE (b), b, bias);
3717 orig_var = TREE_OPERAND (orig_var, 0);
3719 if (TREE_CODE (orig_var) == INDIRECT_REF
3720 || TREE_CODE (orig_var) == ADDR_EXPR)
3721 orig_var = TREE_OPERAND (orig_var, 0);
3722 tree d = OMP_CLAUSE_DECL (c);
3723 tree type = TREE_TYPE (d);
3724 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3725 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3726 const char *name = get_name (orig_var);
3727 if (TREE_CONSTANT (v))
3729 x = create_tmp_var_raw (type, name);
3730 gimple_add_tmp_var (x);
3731 TREE_ADDRESSABLE (x) = 1;
3732 x = build_fold_addr_expr_loc (clause_loc, x);
3734 else
3736 tree atmp
3737 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3738 tree t = maybe_lookup_decl (v, ctx);
3739 if (t)
3740 v = t;
3741 else
3742 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3743 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3744 t = fold_build2_loc (clause_loc, PLUS_EXPR,
3745 TREE_TYPE (v), v,
3746 build_int_cst (TREE_TYPE (v), 1));
3747 t = fold_build2_loc (clause_loc, MULT_EXPR,
3748 TREE_TYPE (v), t,
3749 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3750 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3751 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3754 tree ptype = build_pointer_type (TREE_TYPE (type));
3755 x = fold_convert_loc (clause_loc, ptype, x);
3756 tree y = create_tmp_var (ptype, name);
3757 gimplify_assign (y, x, ilist);
3758 x = y;
3759 tree yb = y;
3761 if (!integer_zerop (bias))
3763 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3764 bias);
3765 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3767 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3768 pointer_sized_int_node, yb, bias);
3769 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3770 yb = create_tmp_var (ptype, name);
3771 gimplify_assign (yb, x, ilist);
3772 x = yb;
3775 d = TREE_OPERAND (d, 0);
3776 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3777 d = TREE_OPERAND (d, 0);
3778 if (TREE_CODE (d) == ADDR_EXPR)
3780 if (orig_var != var)
3782 gcc_assert (is_variable_sized (orig_var));
3783 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3785 gimplify_assign (new_var, x, ilist);
3786 tree new_orig_var = lookup_decl (orig_var, ctx);
3787 tree t = build_fold_indirect_ref (new_var);
3788 DECL_IGNORED_P (new_var) = 0;
3789 TREE_THIS_NOTRAP (t);
3790 SET_DECL_VALUE_EXPR (new_orig_var, t);
3791 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3793 else
3795 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3796 build_int_cst (ptype, 0));
3797 SET_DECL_VALUE_EXPR (new_var, x);
3798 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3801 else
3803 gcc_assert (orig_var == var);
3804 if (TREE_CODE (d) == INDIRECT_REF)
3806 x = create_tmp_var (ptype, name);
3807 TREE_ADDRESSABLE (x) = 1;
3808 gimplify_assign (x, yb, ilist);
3809 x = build_fold_addr_expr_loc (clause_loc, x);
3811 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3812 gimplify_assign (new_var, x, ilist);
3814 tree y1 = create_tmp_var (ptype, NULL);
3815 gimplify_assign (y1, y, ilist);
3816 tree i2 = NULL_TREE, y2 = NULL_TREE;
3817 tree body2 = NULL_TREE, end2 = NULL_TREE;
3818 tree y3 = NULL_TREE, y4 = NULL_TREE;
3819 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3821 y2 = create_tmp_var (ptype, NULL);
3822 gimplify_assign (y2, y, ilist);
3823 tree ref = build_outer_var_ref (var, ctx);
3824 /* For ref build_outer_var_ref already performs this. */
3825 if (TREE_CODE (d) == INDIRECT_REF)
3826 gcc_assert (omp_is_reference (var));
3827 else if (TREE_CODE (d) == ADDR_EXPR)
3828 ref = build_fold_addr_expr (ref);
3829 else if (omp_is_reference (var))
3830 ref = build_fold_addr_expr (ref);
3831 ref = fold_convert_loc (clause_loc, ptype, ref);
3832 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3833 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3835 y3 = create_tmp_var (ptype, NULL);
3836 gimplify_assign (y3, unshare_expr (ref), ilist);
3838 if (is_simd)
3840 y4 = create_tmp_var (ptype, NULL);
3841 gimplify_assign (y4, ref, dlist);
3844 tree i = create_tmp_var (TREE_TYPE (v), NULL);
3845 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3846 tree body = create_artificial_label (UNKNOWN_LOCATION);
3847 tree end = create_artificial_label (UNKNOWN_LOCATION);
3848 gimple_seq_add_stmt (ilist, gimple_build_label (body));
3849 if (y2)
3851 i2 = create_tmp_var (TREE_TYPE (v), NULL);
3852 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3853 body2 = create_artificial_label (UNKNOWN_LOCATION);
3854 end2 = create_artificial_label (UNKNOWN_LOCATION);
3855 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3857 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3859 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3860 tree decl_placeholder
3861 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3862 SET_DECL_VALUE_EXPR (decl_placeholder,
3863 build_simple_mem_ref (y1));
3864 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3865 SET_DECL_VALUE_EXPR (placeholder,
3866 y3 ? build_simple_mem_ref (y3)
3867 : error_mark_node);
3868 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3869 x = lang_hooks.decls.omp_clause_default_ctor
3870 (c, build_simple_mem_ref (y1),
3871 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3872 if (x)
3873 gimplify_and_add (x, ilist);
3874 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3876 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3877 lower_omp (&tseq, ctx);
3878 gimple_seq_add_seq (ilist, tseq);
3880 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3881 if (is_simd)
3883 SET_DECL_VALUE_EXPR (decl_placeholder,
3884 build_simple_mem_ref (y2));
3885 SET_DECL_VALUE_EXPR (placeholder,
3886 build_simple_mem_ref (y4));
3887 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3888 lower_omp (&tseq, ctx);
3889 gimple_seq_add_seq (dlist, tseq);
3890 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3892 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3893 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3894 x = lang_hooks.decls.omp_clause_dtor
3895 (c, build_simple_mem_ref (y2));
3896 if (x)
3898 gimple_seq tseq = NULL;
3899 dtor = x;
3900 gimplify_stmt (&dtor, &tseq);
3901 gimple_seq_add_seq (dlist, tseq);
3904 else
3906 x = omp_reduction_init (c, TREE_TYPE (type));
3907 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3909 /* reduction(-:var) sums up the partial results, so it
3910 acts identically to reduction(+:var). */
3911 if (code == MINUS_EXPR)
3912 code = PLUS_EXPR;
3914 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
3915 if (is_simd)
3917 x = build2 (code, TREE_TYPE (type),
3918 build_simple_mem_ref (y4),
3919 build_simple_mem_ref (y2));
3920 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
3923 gimple *g
3924 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
3925 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3926 gimple_seq_add_stmt (ilist, g);
3927 if (y3)
3929 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
3930 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3931 gimple_seq_add_stmt (ilist, g);
3933 g = gimple_build_assign (i, PLUS_EXPR, i,
3934 build_int_cst (TREE_TYPE (i), 1));
3935 gimple_seq_add_stmt (ilist, g);
3936 g = gimple_build_cond (LE_EXPR, i, v, body, end);
3937 gimple_seq_add_stmt (ilist, g);
3938 gimple_seq_add_stmt (ilist, gimple_build_label (end));
3939 if (y2)
3941 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
3942 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3943 gimple_seq_add_stmt (dlist, g);
3944 if (y4)
3946 g = gimple_build_assign
3947 (y4, POINTER_PLUS_EXPR, y4,
3948 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3949 gimple_seq_add_stmt (dlist, g);
3951 g = gimple_build_assign (i2, PLUS_EXPR, i2,
3952 build_int_cst (TREE_TYPE (i2), 1));
3953 gimple_seq_add_stmt (dlist, g);
3954 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
3955 gimple_seq_add_stmt (dlist, g);
3956 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
3958 continue;
3960 else if (is_variable_sized (var))
3962 /* For variable sized types, we need to allocate the
3963 actual storage here. Call alloca and store the
3964 result in the pointer decl that we created elsewhere. */
3965 if (pass == 0)
3966 continue;
3968 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
3970 gcall *stmt;
3971 tree tmp, atmp;
3973 ptr = DECL_VALUE_EXPR (new_var);
3974 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
3975 ptr = TREE_OPERAND (ptr, 0);
3976 gcc_assert (DECL_P (ptr));
3977 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
3979 /* void *tmp = __builtin_alloca */
3980 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3981 stmt = gimple_build_call (atmp, 2, x,
3982 size_int (DECL_ALIGN (var)));
3983 tmp = create_tmp_var_raw (ptr_type_node);
3984 gimple_add_tmp_var (tmp);
3985 gimple_call_set_lhs (stmt, tmp);
3987 gimple_seq_add_stmt (ilist, stmt);
3989 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
3990 gimplify_assign (ptr, x, ilist);
3993 else if (omp_is_reference (var))
3995 /* For references that are being privatized for Fortran,
3996 allocate new backing storage for the new pointer
3997 variable. This allows us to avoid changing all the
3998 code that expects a pointer to something that expects
3999 a direct variable. */
4000 if (pass == 0)
4001 continue;
4003 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4004 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4006 x = build_receiver_ref (var, false, ctx);
4007 x = build_fold_addr_expr_loc (clause_loc, x);
4009 else if (TREE_CONSTANT (x))
4011 /* For reduction in SIMD loop, defer adding the
4012 initialization of the reference, because if we decide
4013 to use SIMD array for it, the initilization could cause
4014 expansion ICE. */
4015 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4016 x = NULL_TREE;
4017 else
4019 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4020 get_name (var));
4021 gimple_add_tmp_var (x);
4022 TREE_ADDRESSABLE (x) = 1;
4023 x = build_fold_addr_expr_loc (clause_loc, x);
4026 else
4028 tree atmp
4029 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4030 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4031 tree al = size_int (TYPE_ALIGN (rtype));
4032 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4035 if (x)
4037 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4038 gimplify_assign (new_var, x, ilist);
4041 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4043 else if (c_kind == OMP_CLAUSE_REDUCTION
4044 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4046 if (pass == 0)
4047 continue;
4049 else if (pass != 0)
4050 continue;
4052 switch (OMP_CLAUSE_CODE (c))
4054 case OMP_CLAUSE_SHARED:
4055 /* Ignore shared directives in teams construct. */
4056 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4057 continue;
4058 /* Shared global vars are just accessed directly. */
4059 if (is_global_var (new_var))
4060 break;
4061 /* For taskloop firstprivate/lastprivate, represented
4062 as firstprivate and shared clause on the task, new_var
4063 is the firstprivate var. */
4064 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4065 break;
4066 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4067 needs to be delayed until after fixup_child_record_type so
4068 that we get the correct type during the dereference. */
4069 by_ref = use_pointer_for_field (var, ctx);
4070 x = build_receiver_ref (var, by_ref, ctx);
4071 SET_DECL_VALUE_EXPR (new_var, x);
4072 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4074 /* ??? If VAR is not passed by reference, and the variable
4075 hasn't been initialized yet, then we'll get a warning for
4076 the store into the omp_data_s structure. Ideally, we'd be
4077 able to notice this and not store anything at all, but
4078 we're generating code too early. Suppress the warning. */
4079 if (!by_ref)
4080 TREE_NO_WARNING (var) = 1;
4081 break;
4083 case OMP_CLAUSE_LASTPRIVATE:
4084 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4085 break;
4086 /* FALLTHRU */
4088 case OMP_CLAUSE_PRIVATE:
4089 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4090 x = build_outer_var_ref (var, ctx);
4091 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4093 if (is_task_ctx (ctx))
4094 x = build_receiver_ref (var, false, ctx);
4095 else
4096 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4098 else
4099 x = NULL;
4100 do_private:
4101 tree nx;
4102 nx = lang_hooks.decls.omp_clause_default_ctor
4103 (c, unshare_expr (new_var), x);
4104 if (is_simd)
4106 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4107 if ((TREE_ADDRESSABLE (new_var) || nx || y
4108 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4109 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4110 ivar, lvar))
4112 if (nx)
4113 x = lang_hooks.decls.omp_clause_default_ctor
4114 (c, unshare_expr (ivar), x);
4115 if (nx && x)
4116 gimplify_and_add (x, &llist[0]);
4117 if (y)
4119 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4120 if (y)
4122 gimple_seq tseq = NULL;
4124 dtor = y;
4125 gimplify_stmt (&dtor, &tseq);
4126 gimple_seq_add_seq (&llist[1], tseq);
4129 break;
4132 if (nx)
4133 gimplify_and_add (nx, ilist);
4134 /* FALLTHRU */
4136 do_dtor:
4137 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4138 if (x)
4140 gimple_seq tseq = NULL;
4142 dtor = x;
4143 gimplify_stmt (&dtor, &tseq);
4144 gimple_seq_add_seq (dlist, tseq);
4146 break;
4148 case OMP_CLAUSE_LINEAR:
4149 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4150 goto do_firstprivate;
4151 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4152 x = NULL;
4153 else
4154 x = build_outer_var_ref (var, ctx);
4155 goto do_private;
4157 case OMP_CLAUSE_FIRSTPRIVATE:
4158 if (is_task_ctx (ctx))
4160 if (omp_is_reference (var) || is_variable_sized (var))
4161 goto do_dtor;
4162 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4163 ctx))
4164 || use_pointer_for_field (var, NULL))
4166 x = build_receiver_ref (var, false, ctx);
4167 SET_DECL_VALUE_EXPR (new_var, x);
4168 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4169 goto do_dtor;
4172 do_firstprivate:
4173 x = build_outer_var_ref (var, ctx);
4174 if (is_simd)
4176 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4177 && gimple_omp_for_combined_into_p (ctx->stmt))
4179 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4180 tree stept = TREE_TYPE (t);
4181 tree ct = omp_find_clause (clauses,
4182 OMP_CLAUSE__LOOPTEMP_);
4183 gcc_assert (ct);
4184 tree l = OMP_CLAUSE_DECL (ct);
4185 tree n1 = fd->loop.n1;
4186 tree step = fd->loop.step;
4187 tree itype = TREE_TYPE (l);
4188 if (POINTER_TYPE_P (itype))
4189 itype = signed_type_for (itype);
4190 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4191 if (TYPE_UNSIGNED (itype)
4192 && fd->loop.cond_code == GT_EXPR)
4193 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4194 fold_build1 (NEGATE_EXPR, itype, l),
4195 fold_build1 (NEGATE_EXPR,
4196 itype, step));
4197 else
4198 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4199 t = fold_build2 (MULT_EXPR, stept,
4200 fold_convert (stept, l), t);
4202 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4204 x = lang_hooks.decls.omp_clause_linear_ctor
4205 (c, new_var, x, t);
4206 gimplify_and_add (x, ilist);
4207 goto do_dtor;
4210 if (POINTER_TYPE_P (TREE_TYPE (x)))
4211 x = fold_build2 (POINTER_PLUS_EXPR,
4212 TREE_TYPE (x), x, t);
4213 else
4214 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4217 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4218 || TREE_ADDRESSABLE (new_var))
4219 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4220 ivar, lvar))
4222 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4224 tree iv = create_tmp_var (TREE_TYPE (new_var));
4225 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4226 gimplify_and_add (x, ilist);
4227 gimple_stmt_iterator gsi
4228 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4229 gassign *g
4230 = gimple_build_assign (unshare_expr (lvar), iv);
4231 gsi_insert_before_without_update (&gsi, g,
4232 GSI_SAME_STMT);
4233 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4234 enum tree_code code = PLUS_EXPR;
4235 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4236 code = POINTER_PLUS_EXPR;
4237 g = gimple_build_assign (iv, code, iv, t);
4238 gsi_insert_before_without_update (&gsi, g,
4239 GSI_SAME_STMT);
4240 break;
4242 x = lang_hooks.decls.omp_clause_copy_ctor
4243 (c, unshare_expr (ivar), x);
4244 gimplify_and_add (x, &llist[0]);
4245 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4246 if (x)
4248 gimple_seq tseq = NULL;
4250 dtor = x;
4251 gimplify_stmt (&dtor, &tseq);
4252 gimple_seq_add_seq (&llist[1], tseq);
4254 break;
4257 x = lang_hooks.decls.omp_clause_copy_ctor
4258 (c, unshare_expr (new_var), x);
4259 gimplify_and_add (x, ilist);
4260 goto do_dtor;
4262 case OMP_CLAUSE__LOOPTEMP_:
4263 gcc_assert (is_taskreg_ctx (ctx));
4264 x = build_outer_var_ref (var, ctx);
4265 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4266 gimplify_and_add (x, ilist);
4267 break;
4269 case OMP_CLAUSE_COPYIN:
4270 by_ref = use_pointer_for_field (var, NULL);
4271 x = build_receiver_ref (var, by_ref, ctx);
4272 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4273 append_to_statement_list (x, &copyin_seq);
4274 copyin_by_ref |= by_ref;
4275 break;
4277 case OMP_CLAUSE_REDUCTION:
4278 /* OpenACC reductions are initialized using the
4279 GOACC_REDUCTION internal function. */
4280 if (is_gimple_omp_oacc (ctx->stmt))
4281 break;
4282 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4284 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4285 gimple *tseq;
4286 x = build_outer_var_ref (var, ctx);
4288 if (omp_is_reference (var)
4289 && !useless_type_conversion_p (TREE_TYPE (placeholder),
4290 TREE_TYPE (x)))
4291 x = build_fold_addr_expr_loc (clause_loc, x);
4292 SET_DECL_VALUE_EXPR (placeholder, x);
4293 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4294 tree new_vard = new_var;
4295 if (omp_is_reference (var))
4297 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4298 new_vard = TREE_OPERAND (new_var, 0);
4299 gcc_assert (DECL_P (new_vard));
4301 if (is_simd
4302 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4303 ivar, lvar))
4305 if (new_vard == new_var)
4307 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4308 SET_DECL_VALUE_EXPR (new_var, ivar);
4310 else
4312 SET_DECL_VALUE_EXPR (new_vard,
4313 build_fold_addr_expr (ivar));
4314 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4316 x = lang_hooks.decls.omp_clause_default_ctor
4317 (c, unshare_expr (ivar),
4318 build_outer_var_ref (var, ctx));
4319 if (x)
4320 gimplify_and_add (x, &llist[0]);
4321 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4323 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4324 lower_omp (&tseq, ctx);
4325 gimple_seq_add_seq (&llist[0], tseq);
4327 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4328 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4329 lower_omp (&tseq, ctx);
4330 gimple_seq_add_seq (&llist[1], tseq);
4331 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4332 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4333 if (new_vard == new_var)
4334 SET_DECL_VALUE_EXPR (new_var, lvar);
4335 else
4336 SET_DECL_VALUE_EXPR (new_vard,
4337 build_fold_addr_expr (lvar));
4338 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4339 if (x)
4341 tseq = NULL;
4342 dtor = x;
4343 gimplify_stmt (&dtor, &tseq);
4344 gimple_seq_add_seq (&llist[1], tseq);
4346 break;
4348 /* If this is a reference to constant size reduction var
4349 with placeholder, we haven't emitted the initializer
4350 for it because it is undesirable if SIMD arrays are used.
4351 But if they aren't used, we need to emit the deferred
4352 initialization now. */
4353 else if (omp_is_reference (var) && is_simd)
4354 handle_simd_reference (clause_loc, new_vard, ilist);
4355 x = lang_hooks.decls.omp_clause_default_ctor
4356 (c, unshare_expr (new_var),
4357 build_outer_var_ref (var, ctx));
4358 if (x)
4359 gimplify_and_add (x, ilist);
4360 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4362 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4363 lower_omp (&tseq, ctx);
4364 gimple_seq_add_seq (ilist, tseq);
4366 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4367 if (is_simd)
4369 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4370 lower_omp (&tseq, ctx);
4371 gimple_seq_add_seq (dlist, tseq);
4372 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4374 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4375 goto do_dtor;
4377 else
4379 x = omp_reduction_init (c, TREE_TYPE (new_var));
4380 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4381 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4383 /* reduction(-:var) sums up the partial results, so it
4384 acts identically to reduction(+:var). */
4385 if (code == MINUS_EXPR)
4386 code = PLUS_EXPR;
4388 tree new_vard = new_var;
4389 if (is_simd && omp_is_reference (var))
4391 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4392 new_vard = TREE_OPERAND (new_var, 0);
4393 gcc_assert (DECL_P (new_vard));
4395 if (is_simd
4396 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4397 ivar, lvar))
4399 tree ref = build_outer_var_ref (var, ctx);
4401 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4403 if (sctx.is_simt)
4405 if (!simt_lane)
4406 simt_lane = create_tmp_var (unsigned_type_node);
4407 x = build_call_expr_internal_loc
4408 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4409 TREE_TYPE (ivar), 2, ivar, simt_lane);
4410 x = build2 (code, TREE_TYPE (ivar), ivar, x);
4411 gimplify_assign (ivar, x, &llist[2]);
4413 x = build2 (code, TREE_TYPE (ref), ref, ivar);
4414 ref = build_outer_var_ref (var, ctx);
4415 gimplify_assign (ref, x, &llist[1]);
4417 if (new_vard != new_var)
4419 SET_DECL_VALUE_EXPR (new_vard,
4420 build_fold_addr_expr (lvar));
4421 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4424 else
4426 if (omp_is_reference (var) && is_simd)
4427 handle_simd_reference (clause_loc, new_vard, ilist);
4428 gimplify_assign (new_var, x, ilist);
4429 if (is_simd)
4431 tree ref = build_outer_var_ref (var, ctx);
4433 x = build2 (code, TREE_TYPE (ref), ref, new_var);
4434 ref = build_outer_var_ref (var, ctx);
4435 gimplify_assign (ref, x, dlist);
4439 break;
4441 default:
4442 gcc_unreachable ();
4447 if (known_eq (sctx.max_vf, 1U))
4448 sctx.is_simt = false;
4450 if (sctx.lane || sctx.is_simt)
4452 uid = create_tmp_var (ptr_type_node, "simduid");
4453 /* Don't want uninit warnings on simduid, it is always uninitialized,
4454 but we use it not for the value, but for the DECL_UID only. */
4455 TREE_NO_WARNING (uid) = 1;
4456 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4457 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4458 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4459 gimple_omp_for_set_clauses (ctx->stmt, c);
4461 /* Emit calls denoting privatized variables and initializing a pointer to
4462 structure that holds private variables as fields after ompdevlow pass. */
4463 if (sctx.is_simt)
4465 sctx.simt_eargs[0] = uid;
4466 gimple *g
4467 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4468 gimple_call_set_lhs (g, uid);
4469 gimple_seq_add_stmt (ilist, g);
4470 sctx.simt_eargs.release ();
4472 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4473 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4474 gimple_call_set_lhs (g, simtrec);
4475 gimple_seq_add_stmt (ilist, g);
4477 if (sctx.lane)
4479 gimple *g
4480 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4481 gimple_call_set_lhs (g, sctx.lane);
4482 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4483 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4484 g = gimple_build_assign (sctx.lane, INTEGER_CST,
4485 build_int_cst (unsigned_type_node, 0));
4486 gimple_seq_add_stmt (ilist, g);
4487 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4488 if (llist[2])
4490 tree simt_vf = create_tmp_var (unsigned_type_node);
4491 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4492 gimple_call_set_lhs (g, simt_vf);
4493 gimple_seq_add_stmt (dlist, g);
4495 tree t = build_int_cst (unsigned_type_node, 1);
4496 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4497 gimple_seq_add_stmt (dlist, g);
4499 t = build_int_cst (unsigned_type_node, 0);
4500 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4501 gimple_seq_add_stmt (dlist, g);
4503 tree body = create_artificial_label (UNKNOWN_LOCATION);
4504 tree header = create_artificial_label (UNKNOWN_LOCATION);
4505 tree end = create_artificial_label (UNKNOWN_LOCATION);
4506 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4507 gimple_seq_add_stmt (dlist, gimple_build_label (body));
4509 gimple_seq_add_seq (dlist, llist[2]);
4511 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4512 gimple_seq_add_stmt (dlist, g);
4514 gimple_seq_add_stmt (dlist, gimple_build_label (header));
4515 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4516 gimple_seq_add_stmt (dlist, g);
4518 gimple_seq_add_stmt (dlist, gimple_build_label (end));
4520 for (int i = 0; i < 2; i++)
4521 if (llist[i])
4523 tree vf = create_tmp_var (unsigned_type_node);
4524 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4525 gimple_call_set_lhs (g, vf);
4526 gimple_seq *seq = i == 0 ? ilist : dlist;
4527 gimple_seq_add_stmt (seq, g);
4528 tree t = build_int_cst (unsigned_type_node, 0);
4529 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4530 gimple_seq_add_stmt (seq, g);
4531 tree body = create_artificial_label (UNKNOWN_LOCATION);
4532 tree header = create_artificial_label (UNKNOWN_LOCATION);
4533 tree end = create_artificial_label (UNKNOWN_LOCATION);
4534 gimple_seq_add_stmt (seq, gimple_build_goto (header));
4535 gimple_seq_add_stmt (seq, gimple_build_label (body));
4536 gimple_seq_add_seq (seq, llist[i]);
4537 t = build_int_cst (unsigned_type_node, 1);
4538 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
4539 gimple_seq_add_stmt (seq, g);
4540 gimple_seq_add_stmt (seq, gimple_build_label (header));
4541 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
4542 gimple_seq_add_stmt (seq, g);
4543 gimple_seq_add_stmt (seq, gimple_build_label (end));
4546 if (sctx.is_simt)
4548 gimple_seq_add_seq (dlist, sctx.simt_dlist);
4549 gimple *g
4550 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4551 gimple_seq_add_stmt (dlist, g);
4554 /* The copyin sequence is not to be executed by the main thread, since
4555 that would result in self-copies. Perhaps not visible to scalars,
4556 but it certainly is to C++ operator=. */
4557 if (copyin_seq)
4559 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4561 x = build2 (NE_EXPR, boolean_type_node, x,
4562 build_int_cst (TREE_TYPE (x), 0));
4563 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4564 gimplify_and_add (x, ilist);
4567 /* If any copyin variable is passed by reference, we must ensure the
4568 master thread doesn't modify it before it is copied over in all
4569 threads. Similarly for variables in both firstprivate and
4570 lastprivate clauses we need to ensure the lastprivate copying
4571 happens after firstprivate copying in all threads. And similarly
4572 for UDRs if initializer expression refers to omp_orig. */
4573 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4575 /* Don't add any barrier for #pragma omp simd or
4576 #pragma omp distribute. */
4577 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4578 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4579 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4582 /* If max_vf is non-zero, then we can use only a vectorization factor
4583 up to the max_vf we chose. So stick it into the safelen clause. */
4584 if (maybe_ne (sctx.max_vf, 0U))
4586 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4587 OMP_CLAUSE_SAFELEN);
4588 poly_uint64 safe_len;
4589 if (c == NULL_TREE
4590 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4591 && maybe_gt (safe_len, sctx.max_vf)))
4593 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4594 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4595 sctx.max_vf);
4596 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4597 gimple_omp_for_set_clauses (ctx->stmt, c);
4603 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4604 both parallel and workshare constructs. PREDICATE may be NULL if it's
4605 always true. */
4607 static void
4608 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4609 omp_context *ctx)
4611 tree x, c, label = NULL, orig_clauses = clauses;
4612 bool par_clauses = false;
4613 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4615 /* Early exit if there are no lastprivate or linear clauses. */
4616 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4617 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4618 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4619 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4620 break;
4621 if (clauses == NULL)
4623 /* If this was a workshare clause, see if it had been combined
4624 with its parallel. In that case, look for the clauses on the
4625 parallel statement itself. */
4626 if (is_parallel_ctx (ctx))
4627 return;
4629 ctx = ctx->outer;
4630 if (ctx == NULL || !is_parallel_ctx (ctx))
4631 return;
4633 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4634 OMP_CLAUSE_LASTPRIVATE);
4635 if (clauses == NULL)
4636 return;
4637 par_clauses = true;
4640 bool maybe_simt = false;
4641 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4642 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4644 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4645 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4646 if (simduid)
4647 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4650 if (predicate)
4652 gcond *stmt;
4653 tree label_true, arm1, arm2;
4654 enum tree_code pred_code = TREE_CODE (predicate);
4656 label = create_artificial_label (UNKNOWN_LOCATION);
4657 label_true = create_artificial_label (UNKNOWN_LOCATION);
4658 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4660 arm1 = TREE_OPERAND (predicate, 0);
4661 arm2 = TREE_OPERAND (predicate, 1);
4662 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4663 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4665 else
4667 arm1 = predicate;
4668 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4669 arm2 = boolean_false_node;
4670 pred_code = NE_EXPR;
4672 if (maybe_simt)
4674 c = build2 (pred_code, boolean_type_node, arm1, arm2);
4675 c = fold_convert (integer_type_node, c);
4676 simtcond = create_tmp_var (integer_type_node);
4677 gimplify_assign (simtcond, c, stmt_list);
4678 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4679 1, simtcond);
4680 c = create_tmp_var (integer_type_node);
4681 gimple_call_set_lhs (g, c);
4682 gimple_seq_add_stmt (stmt_list, g);
4683 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4684 label_true, label);
4686 else
4687 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4688 gimple_seq_add_stmt (stmt_list, stmt);
4689 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4692 for (c = clauses; c ;)
4694 tree var, new_var;
4695 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4697 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4698 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4699 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4701 var = OMP_CLAUSE_DECL (c);
4702 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4703 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4704 && is_taskloop_ctx (ctx))
4706 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4707 new_var = lookup_decl (var, ctx->outer);
4709 else
4711 new_var = lookup_decl (var, ctx);
4712 /* Avoid uninitialized warnings for lastprivate and
4713 for linear iterators. */
4714 if (predicate
4715 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4716 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4717 TREE_NO_WARNING (new_var) = 1;
4720 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4722 tree val = DECL_VALUE_EXPR (new_var);
4723 if (TREE_CODE (val) == ARRAY_REF
4724 && VAR_P (TREE_OPERAND (val, 0))
4725 && lookup_attribute ("omp simd array",
4726 DECL_ATTRIBUTES (TREE_OPERAND (val,
4727 0))))
4729 if (lastlane == NULL)
4731 lastlane = create_tmp_var (unsigned_type_node);
4732 gcall *g
4733 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4734 2, simduid,
4735 TREE_OPERAND (val, 1));
4736 gimple_call_set_lhs (g, lastlane);
4737 gimple_seq_add_stmt (stmt_list, g);
4739 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4740 TREE_OPERAND (val, 0), lastlane,
4741 NULL_TREE, NULL_TREE);
4744 else if (maybe_simt)
4746 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
4747 ? DECL_VALUE_EXPR (new_var)
4748 : new_var);
4749 if (simtlast == NULL)
4751 simtlast = create_tmp_var (unsigned_type_node);
4752 gcall *g = gimple_build_call_internal
4753 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4754 gimple_call_set_lhs (g, simtlast);
4755 gimple_seq_add_stmt (stmt_list, g);
4757 x = build_call_expr_internal_loc
4758 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4759 TREE_TYPE (val), 2, val, simtlast);
4760 new_var = unshare_expr (new_var);
4761 gimplify_assign (new_var, x, stmt_list);
4762 new_var = unshare_expr (new_var);
4765 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4766 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4768 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4769 gimple_seq_add_seq (stmt_list,
4770 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4771 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4773 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4774 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4776 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4777 gimple_seq_add_seq (stmt_list,
4778 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4779 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4782 x = NULL_TREE;
4783 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4784 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4786 gcc_checking_assert (is_taskloop_ctx (ctx));
4787 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4788 ctx->outer->outer);
4789 if (is_global_var (ovar))
4790 x = ovar;
4792 if (!x)
4793 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4794 if (omp_is_reference (var))
4795 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4796 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4797 gimplify_and_add (x, stmt_list);
4799 c = OMP_CLAUSE_CHAIN (c);
4800 if (c == NULL && !par_clauses)
4802 /* If this was a workshare clause, see if it had been combined
4803 with its parallel. In that case, continue looking for the
4804 clauses also on the parallel statement itself. */
4805 if (is_parallel_ctx (ctx))
4806 break;
4808 ctx = ctx->outer;
4809 if (ctx == NULL || !is_parallel_ctx (ctx))
4810 break;
4812 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4813 OMP_CLAUSE_LASTPRIVATE);
4814 par_clauses = true;
4818 if (label)
4819 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4822 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4823 (which might be a placeholder). INNER is true if this is an inner
4824 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4825 join markers. Generate the before-loop forking sequence in
4826 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4827 general form of these sequences is
4829 GOACC_REDUCTION_SETUP
4830 GOACC_FORK
4831 GOACC_REDUCTION_INIT
4833 GOACC_REDUCTION_FINI
4834 GOACC_JOIN
4835 GOACC_REDUCTION_TEARDOWN. */
4837 static void
4838 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4839 gcall *fork, gcall *join, gimple_seq *fork_seq,
4840 gimple_seq *join_seq, omp_context *ctx)
4842 gimple_seq before_fork = NULL;
4843 gimple_seq after_fork = NULL;
4844 gimple_seq before_join = NULL;
4845 gimple_seq after_join = NULL;
4846 tree init_code = NULL_TREE, fini_code = NULL_TREE,
4847 setup_code = NULL_TREE, teardown_code = NULL_TREE;
4848 unsigned offset = 0;
4850 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4851 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4853 tree orig = OMP_CLAUSE_DECL (c);
4854 tree var = maybe_lookup_decl (orig, ctx);
4855 tree ref_to_res = NULL_TREE;
4856 tree incoming, outgoing, v1, v2, v3;
4857 bool is_private = false;
4859 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4860 if (rcode == MINUS_EXPR)
4861 rcode = PLUS_EXPR;
4862 else if (rcode == TRUTH_ANDIF_EXPR)
4863 rcode = BIT_AND_EXPR;
4864 else if (rcode == TRUTH_ORIF_EXPR)
4865 rcode = BIT_IOR_EXPR;
4866 tree op = build_int_cst (unsigned_type_node, rcode);
4868 if (!var)
4869 var = orig;
4871 incoming = outgoing = var;
4873 if (!inner)
4875 /* See if an outer construct also reduces this variable. */
4876 omp_context *outer = ctx;
4878 while (omp_context *probe = outer->outer)
4880 enum gimple_code type = gimple_code (probe->stmt);
4881 tree cls;
4883 switch (type)
4885 case GIMPLE_OMP_FOR:
4886 cls = gimple_omp_for_clauses (probe->stmt);
4887 break;
4889 case GIMPLE_OMP_TARGET:
4890 if (gimple_omp_target_kind (probe->stmt)
4891 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
4892 goto do_lookup;
4894 cls = gimple_omp_target_clauses (probe->stmt);
4895 break;
4897 default:
4898 goto do_lookup;
4901 outer = probe;
4902 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
4903 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4904 && orig == OMP_CLAUSE_DECL (cls))
4906 incoming = outgoing = lookup_decl (orig, probe);
4907 goto has_outer_reduction;
4909 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4910 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4911 && orig == OMP_CLAUSE_DECL (cls))
4913 is_private = true;
4914 goto do_lookup;
4918 do_lookup:
4919 /* This is the outermost construct with this reduction,
4920 see if there's a mapping for it. */
4921 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
4922 && maybe_lookup_field (orig, outer) && !is_private)
4924 ref_to_res = build_receiver_ref (orig, false, outer);
4925 if (omp_is_reference (orig))
4926 ref_to_res = build_simple_mem_ref (ref_to_res);
4928 tree type = TREE_TYPE (var);
4929 if (POINTER_TYPE_P (type))
4930 type = TREE_TYPE (type);
4932 outgoing = var;
4933 incoming = omp_reduction_init_op (loc, rcode, type);
4935 else
4937 /* Try to look at enclosing contexts for reduction var,
4938 use original if no mapping found. */
4939 tree t = NULL_TREE;
4940 omp_context *c = ctx->outer;
4941 while (c && !t)
4943 t = maybe_lookup_decl (orig, c);
4944 c = c->outer;
4946 incoming = outgoing = (t ? t : orig);
4949 has_outer_reduction:;
4952 if (!ref_to_res)
4953 ref_to_res = integer_zero_node;
4955 if (omp_is_reference (orig))
4957 tree type = TREE_TYPE (var);
4958 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
4960 if (!inner)
4962 tree x = create_tmp_var (TREE_TYPE (type), id);
4963 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
4966 v1 = create_tmp_var (type, id);
4967 v2 = create_tmp_var (type, id);
4968 v3 = create_tmp_var (type, id);
4970 gimplify_assign (v1, var, fork_seq);
4971 gimplify_assign (v2, var, fork_seq);
4972 gimplify_assign (v3, var, fork_seq);
4974 var = build_simple_mem_ref (var);
4975 v1 = build_simple_mem_ref (v1);
4976 v2 = build_simple_mem_ref (v2);
4977 v3 = build_simple_mem_ref (v3);
4978 outgoing = build_simple_mem_ref (outgoing);
4980 if (!TREE_CONSTANT (incoming))
4981 incoming = build_simple_mem_ref (incoming);
4983 else
4984 v1 = v2 = v3 = var;
4986 /* Determine position in reduction buffer, which may be used
4987 by target. The parser has ensured that this is not a
4988 variable-sized type. */
4989 fixed_size_mode mode
4990 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
4991 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
4992 offset = (offset + align - 1) & ~(align - 1);
4993 tree off = build_int_cst (sizetype, offset);
4994 offset += GET_MODE_SIZE (mode);
4996 if (!init_code)
4998 init_code = build_int_cst (integer_type_node,
4999 IFN_GOACC_REDUCTION_INIT);
5000 fini_code = build_int_cst (integer_type_node,
5001 IFN_GOACC_REDUCTION_FINI);
5002 setup_code = build_int_cst (integer_type_node,
5003 IFN_GOACC_REDUCTION_SETUP);
5004 teardown_code = build_int_cst (integer_type_node,
5005 IFN_GOACC_REDUCTION_TEARDOWN);
5008 tree setup_call
5009 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5010 TREE_TYPE (var), 6, setup_code,
5011 unshare_expr (ref_to_res),
5012 incoming, level, op, off);
5013 tree init_call
5014 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5015 TREE_TYPE (var), 6, init_code,
5016 unshare_expr (ref_to_res),
5017 v1, level, op, off);
5018 tree fini_call
5019 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5020 TREE_TYPE (var), 6, fini_code,
5021 unshare_expr (ref_to_res),
5022 v2, level, op, off);
5023 tree teardown_call
5024 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5025 TREE_TYPE (var), 6, teardown_code,
5026 ref_to_res, v3, level, op, off);
5028 gimplify_assign (v1, setup_call, &before_fork);
5029 gimplify_assign (v2, init_call, &after_fork);
5030 gimplify_assign (v3, fini_call, &before_join);
5031 gimplify_assign (outgoing, teardown_call, &after_join);
5034 /* Now stitch things together. */
5035 gimple_seq_add_seq (fork_seq, before_fork);
5036 if (fork)
5037 gimple_seq_add_stmt (fork_seq, fork);
5038 gimple_seq_add_seq (fork_seq, after_fork);
5040 gimple_seq_add_seq (join_seq, before_join);
5041 if (join)
5042 gimple_seq_add_stmt (join_seq, join);
5043 gimple_seq_add_seq (join_seq, after_join);
5046 /* Generate code to implement the REDUCTION clauses. */
5048 static void
5049 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5051 gimple_seq sub_seq = NULL;
5052 gimple *stmt;
5053 tree x, c;
5054 int count = 0;
5056 /* OpenACC loop reductions are handled elsewhere. */
5057 if (is_gimple_omp_oacc (ctx->stmt))
5058 return;
5060 /* SIMD reductions are handled in lower_rec_input_clauses. */
5061 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5062 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5063 return;
5065 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5066 update in that case, otherwise use a lock. */
5067 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5068 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5070 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5071 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5073 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5074 count = -1;
5075 break;
5077 count++;
5080 if (count == 0)
5081 return;
5083 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5085 tree var, ref, new_var, orig_var;
5086 enum tree_code code;
5087 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5089 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5090 continue;
5092 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
5093 orig_var = var = OMP_CLAUSE_DECL (c);
5094 if (TREE_CODE (var) == MEM_REF)
5096 var = TREE_OPERAND (var, 0);
5097 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5098 var = TREE_OPERAND (var, 0);
5099 if (TREE_CODE (var) == ADDR_EXPR)
5100 var = TREE_OPERAND (var, 0);
5101 else
5103 /* If this is a pointer or referenced based array
5104 section, the var could be private in the outer
5105 context e.g. on orphaned loop construct. Pretend this
5106 is private variable's outer reference. */
5107 ccode = OMP_CLAUSE_PRIVATE;
5108 if (TREE_CODE (var) == INDIRECT_REF)
5109 var = TREE_OPERAND (var, 0);
5111 orig_var = var;
5112 if (is_variable_sized (var))
5114 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5115 var = DECL_VALUE_EXPR (var);
5116 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5117 var = TREE_OPERAND (var, 0);
5118 gcc_assert (DECL_P (var));
5121 new_var = lookup_decl (var, ctx);
5122 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5123 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5124 ref = build_outer_var_ref (var, ctx, ccode);
5125 code = OMP_CLAUSE_REDUCTION_CODE (c);
5127 /* reduction(-:var) sums up the partial results, so it acts
5128 identically to reduction(+:var). */
5129 if (code == MINUS_EXPR)
5130 code = PLUS_EXPR;
5132 if (count == 1)
5134 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5136 addr = save_expr (addr);
5137 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5138 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5139 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5140 gimplify_and_add (x, stmt_seqp);
5141 return;
5143 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5145 tree d = OMP_CLAUSE_DECL (c);
5146 tree type = TREE_TYPE (d);
5147 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5148 tree i = create_tmp_var (TREE_TYPE (v), NULL);
5149 tree ptype = build_pointer_type (TREE_TYPE (type));
5150 tree bias = TREE_OPERAND (d, 1);
5151 d = TREE_OPERAND (d, 0);
5152 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5154 tree b = TREE_OPERAND (d, 1);
5155 b = maybe_lookup_decl (b, ctx);
5156 if (b == NULL)
5158 b = TREE_OPERAND (d, 1);
5159 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5161 if (integer_zerop (bias))
5162 bias = b;
5163 else
5165 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5166 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5167 TREE_TYPE (b), b, bias);
5169 d = TREE_OPERAND (d, 0);
5171 /* For ref build_outer_var_ref already performs this, so
5172 only new_var needs a dereference. */
5173 if (TREE_CODE (d) == INDIRECT_REF)
5175 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5176 gcc_assert (omp_is_reference (var) && var == orig_var);
5178 else if (TREE_CODE (d) == ADDR_EXPR)
5180 if (orig_var == var)
5182 new_var = build_fold_addr_expr (new_var);
5183 ref = build_fold_addr_expr (ref);
5186 else
5188 gcc_assert (orig_var == var);
5189 if (omp_is_reference (var))
5190 ref = build_fold_addr_expr (ref);
5192 if (DECL_P (v))
5194 tree t = maybe_lookup_decl (v, ctx);
5195 if (t)
5196 v = t;
5197 else
5198 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5199 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5201 if (!integer_zerop (bias))
5203 bias = fold_convert_loc (clause_loc, sizetype, bias);
5204 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5205 TREE_TYPE (new_var), new_var,
5206 unshare_expr (bias));
5207 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5208 TREE_TYPE (ref), ref, bias);
5210 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5211 ref = fold_convert_loc (clause_loc, ptype, ref);
5212 tree m = create_tmp_var (ptype, NULL);
5213 gimplify_assign (m, new_var, stmt_seqp);
5214 new_var = m;
5215 m = create_tmp_var (ptype, NULL);
5216 gimplify_assign (m, ref, stmt_seqp);
5217 ref = m;
5218 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5219 tree body = create_artificial_label (UNKNOWN_LOCATION);
5220 tree end = create_artificial_label (UNKNOWN_LOCATION);
5221 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5222 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5223 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5224 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5226 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5227 tree decl_placeholder
5228 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5229 SET_DECL_VALUE_EXPR (placeholder, out);
5230 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5231 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5232 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5233 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5234 gimple_seq_add_seq (&sub_seq,
5235 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5236 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5237 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5238 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5240 else
5242 x = build2 (code, TREE_TYPE (out), out, priv);
5243 out = unshare_expr (out);
5244 gimplify_assign (out, x, &sub_seq);
5246 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5247 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5248 gimple_seq_add_stmt (&sub_seq, g);
5249 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5250 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5251 gimple_seq_add_stmt (&sub_seq, g);
5252 g = gimple_build_assign (i, PLUS_EXPR, i,
5253 build_int_cst (TREE_TYPE (i), 1));
5254 gimple_seq_add_stmt (&sub_seq, g);
5255 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5256 gimple_seq_add_stmt (&sub_seq, g);
5257 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5259 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5261 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5263 if (omp_is_reference (var)
5264 && !useless_type_conversion_p (TREE_TYPE (placeholder),
5265 TREE_TYPE (ref)))
5266 ref = build_fold_addr_expr_loc (clause_loc, ref);
5267 SET_DECL_VALUE_EXPR (placeholder, ref);
5268 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5269 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5270 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5271 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5272 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5274 else
5276 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5277 ref = build_outer_var_ref (var, ctx);
5278 gimplify_assign (ref, x, &sub_seq);
5282 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5284 gimple_seq_add_stmt (stmt_seqp, stmt);
5286 gimple_seq_add_seq (stmt_seqp, sub_seq);
5288 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5290 gimple_seq_add_stmt (stmt_seqp, stmt);
5294 /* Generate code to implement the COPYPRIVATE clauses. */
5296 static void
5297 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5298 omp_context *ctx)
5300 tree c;
5302 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5304 tree var, new_var, ref, x;
5305 bool by_ref;
5306 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5308 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5309 continue;
5311 var = OMP_CLAUSE_DECL (c);
5312 by_ref = use_pointer_for_field (var, NULL);
5314 ref = build_sender_ref (var, ctx);
5315 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5316 if (by_ref)
5318 x = build_fold_addr_expr_loc (clause_loc, new_var);
5319 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5321 gimplify_assign (ref, x, slist);
5323 ref = build_receiver_ref (var, false, ctx);
5324 if (by_ref)
5326 ref = fold_convert_loc (clause_loc,
5327 build_pointer_type (TREE_TYPE (new_var)),
5328 ref);
5329 ref = build_fold_indirect_ref_loc (clause_loc, ref);
5331 if (omp_is_reference (var))
5333 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5334 ref = build_simple_mem_ref_loc (clause_loc, ref);
5335 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5337 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5338 gimplify_and_add (x, rlist);
5343 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5344 and REDUCTION from the sender (aka parent) side. */
5346 static void
5347 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5348 omp_context *ctx)
5350 tree c, t;
5351 int ignored_looptemp = 0;
5352 bool is_taskloop = false;
5354 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5355 by GOMP_taskloop. */
5356 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5358 ignored_looptemp = 2;
5359 is_taskloop = true;
5362 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5364 tree val, ref, x, var;
5365 bool by_ref, do_in = false, do_out = false;
5366 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5368 switch (OMP_CLAUSE_CODE (c))
5370 case OMP_CLAUSE_PRIVATE:
5371 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5372 break;
5373 continue;
5374 case OMP_CLAUSE_FIRSTPRIVATE:
5375 case OMP_CLAUSE_COPYIN:
5376 case OMP_CLAUSE_LASTPRIVATE:
5377 case OMP_CLAUSE_REDUCTION:
5378 break;
5379 case OMP_CLAUSE_SHARED:
5380 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5381 break;
5382 continue;
5383 case OMP_CLAUSE__LOOPTEMP_:
5384 if (ignored_looptemp)
5386 ignored_looptemp--;
5387 continue;
5389 break;
5390 default:
5391 continue;
5394 val = OMP_CLAUSE_DECL (c);
5395 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5396 && TREE_CODE (val) == MEM_REF)
5398 val = TREE_OPERAND (val, 0);
5399 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5400 val = TREE_OPERAND (val, 0);
5401 if (TREE_CODE (val) == INDIRECT_REF
5402 || TREE_CODE (val) == ADDR_EXPR)
5403 val = TREE_OPERAND (val, 0);
5404 if (is_variable_sized (val))
5405 continue;
5408 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5409 outer taskloop region. */
5410 omp_context *ctx_for_o = ctx;
5411 if (is_taskloop
5412 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5413 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5414 ctx_for_o = ctx->outer;
5416 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5418 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5419 && is_global_var (var))
5420 continue;
5422 t = omp_member_access_dummy_var (var);
5423 if (t)
5425 var = DECL_VALUE_EXPR (var);
5426 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5427 if (o != t)
5428 var = unshare_and_remap (var, t, o);
5429 else
5430 var = unshare_expr (var);
5433 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5435 /* Handle taskloop firstprivate/lastprivate, where the
5436 lastprivate on GIMPLE_OMP_TASK is represented as
5437 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5438 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5439 x = omp_build_component_ref (ctx->sender_decl, f);
5440 if (use_pointer_for_field (val, ctx))
5441 var = build_fold_addr_expr (var);
5442 gimplify_assign (x, var, ilist);
5443 DECL_ABSTRACT_ORIGIN (f) = NULL;
5444 continue;
5447 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5448 || val == OMP_CLAUSE_DECL (c))
5449 && is_variable_sized (val))
5450 continue;
5451 by_ref = use_pointer_for_field (val, NULL);
5453 switch (OMP_CLAUSE_CODE (c))
5455 case OMP_CLAUSE_FIRSTPRIVATE:
5456 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5457 && !by_ref
5458 && is_task_ctx (ctx))
5459 TREE_NO_WARNING (var) = 1;
5460 do_in = true;
5461 break;
5463 case OMP_CLAUSE_PRIVATE:
5464 case OMP_CLAUSE_COPYIN:
5465 case OMP_CLAUSE__LOOPTEMP_:
5466 do_in = true;
5467 break;
5469 case OMP_CLAUSE_LASTPRIVATE:
5470 if (by_ref || omp_is_reference (val))
5472 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5473 continue;
5474 do_in = true;
5476 else
5478 do_out = true;
5479 if (lang_hooks.decls.omp_private_outer_ref (val))
5480 do_in = true;
5482 break;
5484 case OMP_CLAUSE_REDUCTION:
5485 do_in = true;
5486 if (val == OMP_CLAUSE_DECL (c))
5487 do_out = !(by_ref || omp_is_reference (val));
5488 else
5489 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5490 break;
5492 default:
5493 gcc_unreachable ();
5496 if (do_in)
5498 ref = build_sender_ref (val, ctx);
5499 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5500 gimplify_assign (ref, x, ilist);
5501 if (is_task_ctx (ctx))
5502 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5505 if (do_out)
5507 ref = build_sender_ref (val, ctx);
5508 gimplify_assign (var, ref, olist);
5513 /* Generate code to implement SHARED from the sender (aka parent)
5514 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5515 list things that got automatically shared. */
5517 static void
5518 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5520 tree var, ovar, nvar, t, f, x, record_type;
5522 if (ctx->record_type == NULL)
5523 return;
5525 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5526 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5528 ovar = DECL_ABSTRACT_ORIGIN (f);
5529 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5530 continue;
5532 nvar = maybe_lookup_decl (ovar, ctx);
5533 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5534 continue;
5536 /* If CTX is a nested parallel directive. Find the immediately
5537 enclosing parallel or workshare construct that contains a
5538 mapping for OVAR. */
5539 var = lookup_decl_in_outer_ctx (ovar, ctx);
5541 t = omp_member_access_dummy_var (var);
5542 if (t)
5544 var = DECL_VALUE_EXPR (var);
5545 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5546 if (o != t)
5547 var = unshare_and_remap (var, t, o);
5548 else
5549 var = unshare_expr (var);
5552 if (use_pointer_for_field (ovar, ctx))
5554 x = build_sender_ref (ovar, ctx);
5555 var = build_fold_addr_expr (var);
5556 gimplify_assign (x, var, ilist);
5558 else
5560 x = build_sender_ref (ovar, ctx);
5561 gimplify_assign (x, var, ilist);
5563 if (!TREE_READONLY (var)
5564 /* We don't need to receive a new reference to a result
5565 or parm decl. In fact we may not store to it as we will
5566 invalidate any pending RSO and generate wrong gimple
5567 during inlining. */
5568 && !((TREE_CODE (var) == RESULT_DECL
5569 || TREE_CODE (var) == PARM_DECL)
5570 && DECL_BY_REFERENCE (var)))
5572 x = build_sender_ref (ovar, ctx);
5573 gimplify_assign (var, x, olist);
5579 /* Emit an OpenACC head marker call, encapulating the partitioning and
5580 other information that must be processed by the target compiler.
5581 Return the maximum number of dimensions the associated loop might
5582 be partitioned over. */
5584 static unsigned
5585 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5586 gimple_seq *seq, omp_context *ctx)
5588 unsigned levels = 0;
5589 unsigned tag = 0;
5590 tree gang_static = NULL_TREE;
5591 auto_vec<tree, 5> args;
5593 args.quick_push (build_int_cst
5594 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5595 args.quick_push (ddvar);
5596 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5598 switch (OMP_CLAUSE_CODE (c))
5600 case OMP_CLAUSE_GANG:
5601 tag |= OLF_DIM_GANG;
5602 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5603 /* static:* is represented by -1, and we can ignore it, as
5604 scheduling is always static. */
5605 if (gang_static && integer_minus_onep (gang_static))
5606 gang_static = NULL_TREE;
5607 levels++;
5608 break;
5610 case OMP_CLAUSE_WORKER:
5611 tag |= OLF_DIM_WORKER;
5612 levels++;
5613 break;
5615 case OMP_CLAUSE_VECTOR:
5616 tag |= OLF_DIM_VECTOR;
5617 levels++;
5618 break;
5620 case OMP_CLAUSE_SEQ:
5621 tag |= OLF_SEQ;
5622 break;
5624 case OMP_CLAUSE_AUTO:
5625 tag |= OLF_AUTO;
5626 break;
5628 case OMP_CLAUSE_INDEPENDENT:
5629 tag |= OLF_INDEPENDENT;
5630 break;
5632 case OMP_CLAUSE_TILE:
5633 tag |= OLF_TILE;
5634 break;
5636 default:
5637 continue;
5641 if (gang_static)
5643 if (DECL_P (gang_static))
5644 gang_static = build_outer_var_ref (gang_static, ctx);
5645 tag |= OLF_GANG_STATIC;
5648 /* In a parallel region, loops are implicitly INDEPENDENT. */
5649 omp_context *tgt = enclosing_target_ctx (ctx);
5650 if (!tgt || is_oacc_parallel (tgt))
5651 tag |= OLF_INDEPENDENT;
5653 if (tag & OLF_TILE)
5654 /* Tiling could use all 3 levels. */
5655 levels = 3;
5656 else
5658 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5659 Ensure at least one level, or 2 for possible auto
5660 partitioning */
5661 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5662 << OLF_DIM_BASE) | OLF_SEQ));
5664 if (levels < 1u + maybe_auto)
5665 levels = 1u + maybe_auto;
5668 args.quick_push (build_int_cst (integer_type_node, levels));
5669 args.quick_push (build_int_cst (integer_type_node, tag));
5670 if (gang_static)
5671 args.quick_push (gang_static);
5673 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5674 gimple_set_location (call, loc);
5675 gimple_set_lhs (call, ddvar);
5676 gimple_seq_add_stmt (seq, call);
5678 return levels;
5681 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5682 partitioning level of the enclosed region. */
5684 static void
5685 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5686 tree tofollow, gimple_seq *seq)
5688 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5689 : IFN_UNIQUE_OACC_TAIL_MARK);
5690 tree marker = build_int_cst (integer_type_node, marker_kind);
5691 int nargs = 2 + (tofollow != NULL_TREE);
5692 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5693 marker, ddvar, tofollow);
5694 gimple_set_location (call, loc);
5695 gimple_set_lhs (call, ddvar);
5696 gimple_seq_add_stmt (seq, call);
5699 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5700 the loop clauses, from which we extract reductions. Initialize
5701 HEAD and TAIL. */
5703 static void
5704 lower_oacc_head_tail (location_t loc, tree clauses,
5705 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5707 bool inner = false;
5708 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5709 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5711 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5712 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5713 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5715 gcc_assert (count);
5716 for (unsigned done = 1; count; count--, done++)
5718 gimple_seq fork_seq = NULL;
5719 gimple_seq join_seq = NULL;
5721 tree place = build_int_cst (integer_type_node, -1);
5722 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5723 fork_kind, ddvar, place);
5724 gimple_set_location (fork, loc);
5725 gimple_set_lhs (fork, ddvar);
5727 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5728 join_kind, ddvar, place);
5729 gimple_set_location (join, loc);
5730 gimple_set_lhs (join, ddvar);
5732 /* Mark the beginning of this level sequence. */
5733 if (inner)
5734 lower_oacc_loop_marker (loc, ddvar, true,
5735 build_int_cst (integer_type_node, count),
5736 &fork_seq);
5737 lower_oacc_loop_marker (loc, ddvar, false,
5738 build_int_cst (integer_type_node, done),
5739 &join_seq);
5741 lower_oacc_reductions (loc, clauses, place, inner,
5742 fork, join, &fork_seq, &join_seq, ctx);
5744 /* Append this level to head. */
5745 gimple_seq_add_seq (head, fork_seq);
5746 /* Prepend it to tail. */
5747 gimple_seq_add_seq (&join_seq, *tail);
5748 *tail = join_seq;
5750 inner = true;
5753 /* Mark the end of the sequence. */
5754 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5755 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5758 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5759 catch handler and return it. This prevents programs from violating the
5760 structured block semantics with throws. */
5762 static gimple_seq
5763 maybe_catch_exception (gimple_seq body)
5765 gimple *g;
5766 tree decl;
5768 if (!flag_exceptions)
5769 return body;
5771 if (lang_hooks.eh_protect_cleanup_actions != NULL)
5772 decl = lang_hooks.eh_protect_cleanup_actions ();
5773 else
5774 decl = builtin_decl_explicit (BUILT_IN_TRAP);
5776 g = gimple_build_eh_must_not_throw (decl);
5777 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5778 GIMPLE_TRY_CATCH);
5780 return gimple_seq_alloc_with_stmt (g);
5784 /* Routines to lower OMP directives into OMP-GIMPLE. */
5786 /* If ctx is a worksharing context inside of a cancellable parallel
5787 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5788 and conditional branch to parallel's cancel_label to handle
5789 cancellation in the implicit barrier. */
5791 static void
5792 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5794 gimple *omp_return = gimple_seq_last_stmt (*body);
5795 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5796 if (gimple_omp_return_nowait_p (omp_return))
5797 return;
5798 if (ctx->outer
5799 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5800 && ctx->outer->cancellable)
5802 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5803 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5804 tree lhs = create_tmp_var (c_bool_type);
5805 gimple_omp_return_set_lhs (omp_return, lhs);
5806 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5807 gimple *g = gimple_build_cond (NE_EXPR, lhs,
5808 fold_convert (c_bool_type,
5809 boolean_false_node),
5810 ctx->outer->cancel_label, fallthru_label);
5811 gimple_seq_add_stmt (body, g);
5812 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5816 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5817 CTX is the enclosing OMP context for the current statement. */
5819 static void
5820 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5822 tree block, control;
5823 gimple_stmt_iterator tgsi;
5824 gomp_sections *stmt;
5825 gimple *t;
5826 gbind *new_stmt, *bind;
5827 gimple_seq ilist, dlist, olist, new_body;
5829 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5831 push_gimplify_context ();
5833 dlist = NULL;
5834 ilist = NULL;
5835 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5836 &ilist, &dlist, ctx, NULL);
5838 new_body = gimple_omp_body (stmt);
5839 gimple_omp_set_body (stmt, NULL);
5840 tgsi = gsi_start (new_body);
5841 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5843 omp_context *sctx;
5844 gimple *sec_start;
5846 sec_start = gsi_stmt (tgsi);
5847 sctx = maybe_lookup_ctx (sec_start);
5848 gcc_assert (sctx);
5850 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5851 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5852 GSI_CONTINUE_LINKING);
5853 gimple_omp_set_body (sec_start, NULL);
5855 if (gsi_one_before_end_p (tgsi))
5857 gimple_seq l = NULL;
5858 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5859 &l, ctx);
5860 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5861 gimple_omp_section_set_last (sec_start);
5864 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5865 GSI_CONTINUE_LINKING);
5868 block = make_node (BLOCK);
5869 bind = gimple_build_bind (NULL, new_body, block);
5871 olist = NULL;
5872 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
5874 block = make_node (BLOCK);
5875 new_stmt = gimple_build_bind (NULL, NULL, block);
5876 gsi_replace (gsi_p, new_stmt, true);
5878 pop_gimplify_context (new_stmt);
5879 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5880 BLOCK_VARS (block) = gimple_bind_vars (bind);
5881 if (BLOCK_VARS (block))
5882 TREE_USED (block) = 1;
5884 new_body = NULL;
5885 gimple_seq_add_seq (&new_body, ilist);
5886 gimple_seq_add_stmt (&new_body, stmt);
5887 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5888 gimple_seq_add_stmt (&new_body, bind);
5890 control = create_tmp_var (unsigned_type_node, ".section");
5891 t = gimple_build_omp_continue (control, control);
5892 gimple_omp_sections_set_control (stmt, control);
5893 gimple_seq_add_stmt (&new_body, t);
5895 gimple_seq_add_seq (&new_body, olist);
5896 if (ctx->cancellable)
5897 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5898 gimple_seq_add_seq (&new_body, dlist);
5900 new_body = maybe_catch_exception (new_body);
5902 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5903 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5904 t = gimple_build_omp_return (nowait);
5905 gimple_seq_add_stmt (&new_body, t);
5906 maybe_add_implicit_barrier_cancel (ctx, &new_body);
5908 gimple_bind_set_body (new_stmt, new_body);
5912 /* A subroutine of lower_omp_single. Expand the simple form of
5913 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5915 if (GOMP_single_start ())
5916 BODY;
5917 [ GOMP_barrier (); ] -> unless 'nowait' is present.
5919 FIXME. It may be better to delay expanding the logic of this until
5920 pass_expand_omp. The expanded logic may make the job more difficult
5921 to a synchronization analysis pass. */
5923 static void
5924 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
5926 location_t loc = gimple_location (single_stmt);
5927 tree tlabel = create_artificial_label (loc);
5928 tree flabel = create_artificial_label (loc);
5929 gimple *call, *cond;
5930 tree lhs, decl;
5932 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
5933 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
5934 call = gimple_build_call (decl, 0);
5935 gimple_call_set_lhs (call, lhs);
5936 gimple_seq_add_stmt (pre_p, call);
5938 cond = gimple_build_cond (EQ_EXPR, lhs,
5939 fold_convert_loc (loc, TREE_TYPE (lhs),
5940 boolean_true_node),
5941 tlabel, flabel);
5942 gimple_seq_add_stmt (pre_p, cond);
5943 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
5944 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
5945 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
5949 /* A subroutine of lower_omp_single. Expand the simple form of
5950 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
5952 #pragma omp single copyprivate (a, b, c)
5954 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
5957 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
5959 BODY;
5960 copyout.a = a;
5961 copyout.b = b;
5962 copyout.c = c;
5963 GOMP_single_copy_end (&copyout);
5965 else
5967 a = copyout_p->a;
5968 b = copyout_p->b;
5969 c = copyout_p->c;
5971 GOMP_barrier ();
5974 FIXME. It may be better to delay expanding the logic of this until
5975 pass_expand_omp. The expanded logic may make the job more difficult
5976 to a synchronization analysis pass. */
5978 static void
5979 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
5980 omp_context *ctx)
5982 tree ptr_type, t, l0, l1, l2, bfn_decl;
5983 gimple_seq copyin_seq;
5984 location_t loc = gimple_location (single_stmt);
5986 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
5988 ptr_type = build_pointer_type (ctx->record_type);
5989 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
5991 l0 = create_artificial_label (loc);
5992 l1 = create_artificial_label (loc);
5993 l2 = create_artificial_label (loc);
5995 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
5996 t = build_call_expr_loc (loc, bfn_decl, 0);
5997 t = fold_convert_loc (loc, ptr_type, t);
5998 gimplify_assign (ctx->receiver_decl, t, pre_p);
6000 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6001 build_int_cst (ptr_type, 0));
6002 t = build3 (COND_EXPR, void_type_node, t,
6003 build_and_jump (&l0), build_and_jump (&l1));
6004 gimplify_and_add (t, pre_p);
6006 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6008 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6010 copyin_seq = NULL;
6011 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6012 &copyin_seq, ctx);
6014 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6015 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6016 t = build_call_expr_loc (loc, bfn_decl, 1, t);
6017 gimplify_and_add (t, pre_p);
6019 t = build_and_jump (&l2);
6020 gimplify_and_add (t, pre_p);
6022 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6024 gimple_seq_add_seq (pre_p, copyin_seq);
6026 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6030 /* Expand code for an OpenMP single directive. */
6032 static void
6033 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6035 tree block;
6036 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6037 gbind *bind;
6038 gimple_seq bind_body, bind_body_tail = NULL, dlist;
6040 push_gimplify_context ();
6042 block = make_node (BLOCK);
6043 bind = gimple_build_bind (NULL, NULL, block);
6044 gsi_replace (gsi_p, bind, true);
6045 bind_body = NULL;
6046 dlist = NULL;
6047 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6048 &bind_body, &dlist, ctx, NULL);
6049 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6051 gimple_seq_add_stmt (&bind_body, single_stmt);
6053 if (ctx->record_type)
6054 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6055 else
6056 lower_omp_single_simple (single_stmt, &bind_body);
6058 gimple_omp_set_body (single_stmt, NULL);
6060 gimple_seq_add_seq (&bind_body, dlist);
6062 bind_body = maybe_catch_exception (bind_body);
6064 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6065 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6066 gimple *g = gimple_build_omp_return (nowait);
6067 gimple_seq_add_stmt (&bind_body_tail, g);
6068 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6069 if (ctx->record_type)
6071 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6072 tree clobber = build_constructor (ctx->record_type, NULL);
6073 TREE_THIS_VOLATILE (clobber) = 1;
6074 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6075 clobber), GSI_SAME_STMT);
6077 gimple_seq_add_seq (&bind_body, bind_body_tail);
6078 gimple_bind_set_body (bind, bind_body);
6080 pop_gimplify_context (bind);
6082 gimple_bind_append_vars (bind, ctx->block_vars);
6083 BLOCK_VARS (block) = ctx->block_vars;
6084 if (BLOCK_VARS (block))
6085 TREE_USED (block) = 1;
6089 /* Expand code for an OpenMP master directive. */
6091 static void
6092 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6094 tree block, lab = NULL, x, bfn_decl;
6095 gimple *stmt = gsi_stmt (*gsi_p);
6096 gbind *bind;
6097 location_t loc = gimple_location (stmt);
6098 gimple_seq tseq;
6100 push_gimplify_context ();
6102 block = make_node (BLOCK);
6103 bind = gimple_build_bind (NULL, NULL, block);
6104 gsi_replace (gsi_p, bind, true);
6105 gimple_bind_add_stmt (bind, stmt);
6107 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6108 x = build_call_expr_loc (loc, bfn_decl, 0);
6109 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6110 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6111 tseq = NULL;
6112 gimplify_and_add (x, &tseq);
6113 gimple_bind_add_seq (bind, tseq);
6115 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6116 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6117 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6118 gimple_omp_set_body (stmt, NULL);
6120 gimple_bind_add_stmt (bind, gimple_build_label (lab));
6122 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6124 pop_gimplify_context (bind);
6126 gimple_bind_append_vars (bind, ctx->block_vars);
6127 BLOCK_VARS (block) = ctx->block_vars;
6131 /* Expand code for an OpenMP taskgroup directive. */
6133 static void
6134 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6136 gimple *stmt = gsi_stmt (*gsi_p);
6137 gcall *x;
6138 gbind *bind;
6139 tree block = make_node (BLOCK);
6141 bind = gimple_build_bind (NULL, NULL, block);
6142 gsi_replace (gsi_p, bind, true);
6143 gimple_bind_add_stmt (bind, stmt);
6145 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6147 gimple_bind_add_stmt (bind, x);
6149 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6150 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6151 gimple_omp_set_body (stmt, NULL);
6153 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6155 gimple_bind_append_vars (bind, ctx->block_vars);
6156 BLOCK_VARS (block) = ctx->block_vars;
6160 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6162 static void
6163 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6164 omp_context *ctx)
6166 struct omp_for_data fd;
6167 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6168 return;
6170 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6171 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6172 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6173 if (!fd.ordered)
6174 return;
6176 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6177 tree c = gimple_omp_ordered_clauses (ord_stmt);
6178 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6179 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6181 /* Merge depend clauses from multiple adjacent
6182 #pragma omp ordered depend(sink:...) constructs
6183 into one #pragma omp ordered depend(sink:...), so that
6184 we can optimize them together. */
6185 gimple_stmt_iterator gsi = *gsi_p;
6186 gsi_next (&gsi);
6187 while (!gsi_end_p (gsi))
6189 gimple *stmt = gsi_stmt (gsi);
6190 if (is_gimple_debug (stmt)
6191 || gimple_code (stmt) == GIMPLE_NOP)
6193 gsi_next (&gsi);
6194 continue;
6196 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6197 break;
6198 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6199 c = gimple_omp_ordered_clauses (ord_stmt2);
6200 if (c == NULL_TREE
6201 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6202 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6203 break;
6204 while (*list_p)
6205 list_p = &OMP_CLAUSE_CHAIN (*list_p);
6206 *list_p = c;
6207 gsi_remove (&gsi, true);
6211 /* Canonicalize sink dependence clauses into one folded clause if
6212 possible.
6214 The basic algorithm is to create a sink vector whose first
6215 element is the GCD of all the first elements, and whose remaining
6216 elements are the minimum of the subsequent columns.
6218 We ignore dependence vectors whose first element is zero because
6219 such dependencies are known to be executed by the same thread.
6221 We take into account the direction of the loop, so a minimum
6222 becomes a maximum if the loop is iterating forwards. We also
6223 ignore sink clauses where the loop direction is unknown, or where
6224 the offsets are clearly invalid because they are not a multiple
6225 of the loop increment.
6227 For example:
6229 #pragma omp for ordered(2)
6230 for (i=0; i < N; ++i)
6231 for (j=0; j < M; ++j)
6233 #pragma omp ordered \
6234 depend(sink:i-8,j-2) \
6235 depend(sink:i,j-1) \ // Completely ignored because i+0.
6236 depend(sink:i-4,j-3) \
6237 depend(sink:i-6,j-4)
6238 #pragma omp ordered depend(source)
6241 Folded clause is:
6243 depend(sink:-gcd(8,4,6),-min(2,3,4))
6244 -or-
6245 depend(sink:-2,-2)
6248 /* FIXME: Computing GCD's where the first element is zero is
6249 non-trivial in the presence of collapsed loops. Do this later. */
6250 if (fd.collapse > 1)
6251 return;
6253 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6255 /* wide_int is not a POD so it must be default-constructed. */
6256 for (unsigned i = 0; i != 2 * len - 1; ++i)
6257 new (static_cast<void*>(folded_deps + i)) wide_int ();
6259 tree folded_dep = NULL_TREE;
6260 /* TRUE if the first dimension's offset is negative. */
6261 bool neg_offset_p = false;
6263 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6264 unsigned int i;
6265 while ((c = *list_p) != NULL)
6267 bool remove = false;
6269 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6270 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6271 goto next_ordered_clause;
6273 tree vec;
6274 for (vec = OMP_CLAUSE_DECL (c), i = 0;
6275 vec && TREE_CODE (vec) == TREE_LIST;
6276 vec = TREE_CHAIN (vec), ++i)
6278 gcc_assert (i < len);
6280 /* omp_extract_for_data has canonicalized the condition. */
6281 gcc_assert (fd.loops[i].cond_code == LT_EXPR
6282 || fd.loops[i].cond_code == GT_EXPR);
6283 bool forward = fd.loops[i].cond_code == LT_EXPR;
6284 bool maybe_lexically_later = true;
6286 /* While the committee makes up its mind, bail if we have any
6287 non-constant steps. */
6288 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6289 goto lower_omp_ordered_ret;
6291 tree itype = TREE_TYPE (TREE_VALUE (vec));
6292 if (POINTER_TYPE_P (itype))
6293 itype = sizetype;
6294 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
6295 TYPE_PRECISION (itype),
6296 TYPE_SIGN (itype));
6298 /* Ignore invalid offsets that are not multiples of the step. */
6299 if (!wi::multiple_of_p (wi::abs (offset),
6300 wi::abs (wi::to_wide (fd.loops[i].step)),
6301 UNSIGNED))
6303 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6304 "ignoring sink clause with offset that is not "
6305 "a multiple of the loop step");
6306 remove = true;
6307 goto next_ordered_clause;
6310 /* Calculate the first dimension. The first dimension of
6311 the folded dependency vector is the GCD of the first
6312 elements, while ignoring any first elements whose offset
6313 is 0. */
6314 if (i == 0)
6316 /* Ignore dependence vectors whose first dimension is 0. */
6317 if (offset == 0)
6319 remove = true;
6320 goto next_ordered_clause;
6322 else
6324 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6326 error_at (OMP_CLAUSE_LOCATION (c),
6327 "first offset must be in opposite direction "
6328 "of loop iterations");
6329 goto lower_omp_ordered_ret;
6331 if (forward)
6332 offset = -offset;
6333 neg_offset_p = forward;
6334 /* Initialize the first time around. */
6335 if (folded_dep == NULL_TREE)
6337 folded_dep = c;
6338 folded_deps[0] = offset;
6340 else
6341 folded_deps[0] = wi::gcd (folded_deps[0],
6342 offset, UNSIGNED);
6345 /* Calculate minimum for the remaining dimensions. */
6346 else
6348 folded_deps[len + i - 1] = offset;
6349 if (folded_dep == c)
6350 folded_deps[i] = offset;
6351 else if (maybe_lexically_later
6352 && !wi::eq_p (folded_deps[i], offset))
6354 if (forward ^ wi::gts_p (folded_deps[i], offset))
6356 unsigned int j;
6357 folded_dep = c;
6358 for (j = 1; j <= i; j++)
6359 folded_deps[j] = folded_deps[len + j - 1];
6361 else
6362 maybe_lexically_later = false;
6366 gcc_assert (i == len);
6368 remove = true;
6370 next_ordered_clause:
6371 if (remove)
6372 *list_p = OMP_CLAUSE_CHAIN (c);
6373 else
6374 list_p = &OMP_CLAUSE_CHAIN (c);
6377 if (folded_dep)
6379 if (neg_offset_p)
6380 folded_deps[0] = -folded_deps[0];
6382 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6383 if (POINTER_TYPE_P (itype))
6384 itype = sizetype;
6386 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6387 = wide_int_to_tree (itype, folded_deps[0]);
6388 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6389 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6392 lower_omp_ordered_ret:
6394 /* Ordered without clauses is #pragma omp threads, while we want
6395 a nop instead if we remove all clauses. */
6396 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6397 gsi_replace (gsi_p, gimple_build_nop (), true);
6401 /* Expand code for an OpenMP ordered directive. */
6403 static void
6404 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6406 tree block;
6407 gimple *stmt = gsi_stmt (*gsi_p), *g;
6408 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6409 gcall *x;
6410 gbind *bind;
6411 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6412 OMP_CLAUSE_SIMD);
6413 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6414 loop. */
6415 bool maybe_simt
6416 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6417 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6418 OMP_CLAUSE_THREADS);
6420 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6421 OMP_CLAUSE_DEPEND))
6423 /* FIXME: This is needs to be moved to the expansion to verify various
6424 conditions only testable on cfg with dominators computed, and also
6425 all the depend clauses to be merged still might need to be available
6426 for the runtime checks. */
6427 if (0)
6428 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6429 return;
6432 push_gimplify_context ();
6434 block = make_node (BLOCK);
6435 bind = gimple_build_bind (NULL, NULL, block);
6436 gsi_replace (gsi_p, bind, true);
6437 gimple_bind_add_stmt (bind, stmt);
6439 if (simd)
6441 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6442 build_int_cst (NULL_TREE, threads));
6443 cfun->has_simduid_loops = true;
6445 else
6446 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6448 gimple_bind_add_stmt (bind, x);
6450 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6451 if (maybe_simt)
6453 counter = create_tmp_var (integer_type_node);
6454 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6455 gimple_call_set_lhs (g, counter);
6456 gimple_bind_add_stmt (bind, g);
6458 body = create_artificial_label (UNKNOWN_LOCATION);
6459 test = create_artificial_label (UNKNOWN_LOCATION);
6460 gimple_bind_add_stmt (bind, gimple_build_label (body));
6462 tree simt_pred = create_tmp_var (integer_type_node);
6463 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6464 gimple_call_set_lhs (g, simt_pred);
6465 gimple_bind_add_stmt (bind, g);
6467 tree t = create_artificial_label (UNKNOWN_LOCATION);
6468 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6469 gimple_bind_add_stmt (bind, g);
6471 gimple_bind_add_stmt (bind, gimple_build_label (t));
6473 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6474 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6475 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6476 gimple_omp_set_body (stmt, NULL);
6478 if (maybe_simt)
6480 gimple_bind_add_stmt (bind, gimple_build_label (test));
6481 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6482 gimple_bind_add_stmt (bind, g);
6484 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6485 tree nonneg = create_tmp_var (integer_type_node);
6486 gimple_seq tseq = NULL;
6487 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6488 gimple_bind_add_seq (bind, tseq);
6490 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6491 gimple_call_set_lhs (g, nonneg);
6492 gimple_bind_add_stmt (bind, g);
6494 tree end = create_artificial_label (UNKNOWN_LOCATION);
6495 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6496 gimple_bind_add_stmt (bind, g);
6498 gimple_bind_add_stmt (bind, gimple_build_label (end));
6500 if (simd)
6501 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6502 build_int_cst (NULL_TREE, threads));
6503 else
6504 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6506 gimple_bind_add_stmt (bind, x);
6508 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6510 pop_gimplify_context (bind);
6512 gimple_bind_append_vars (bind, ctx->block_vars);
6513 BLOCK_VARS (block) = gimple_bind_vars (bind);
6517 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6518 substitution of a couple of function calls. But in the NAMED case,
6519 requires that languages coordinate a symbol name. It is therefore
6520 best put here in common code. */
6522 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6524 static void
6525 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6527 tree block;
6528 tree name, lock, unlock;
6529 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6530 gbind *bind;
6531 location_t loc = gimple_location (stmt);
6532 gimple_seq tbody;
6534 name = gimple_omp_critical_name (stmt);
6535 if (name)
6537 tree decl;
6539 if (!critical_name_mutexes)
6540 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6542 tree *n = critical_name_mutexes->get (name);
6543 if (n == NULL)
6545 char *new_str;
6547 decl = create_tmp_var_raw (ptr_type_node);
6549 new_str = ACONCAT ((".gomp_critical_user_",
6550 IDENTIFIER_POINTER (name), NULL));
6551 DECL_NAME (decl) = get_identifier (new_str);
6552 TREE_PUBLIC (decl) = 1;
6553 TREE_STATIC (decl) = 1;
6554 DECL_COMMON (decl) = 1;
6555 DECL_ARTIFICIAL (decl) = 1;
6556 DECL_IGNORED_P (decl) = 1;
6558 varpool_node::finalize_decl (decl);
6560 critical_name_mutexes->put (name, decl);
6562 else
6563 decl = *n;
6565 /* If '#pragma omp critical' is inside offloaded region or
6566 inside function marked as offloadable, the symbol must be
6567 marked as offloadable too. */
6568 omp_context *octx;
6569 if (cgraph_node::get (current_function_decl)->offloadable)
6570 varpool_node::get_create (decl)->offloadable = 1;
6571 else
6572 for (octx = ctx->outer; octx; octx = octx->outer)
6573 if (is_gimple_omp_offloaded (octx->stmt))
6575 varpool_node::get_create (decl)->offloadable = 1;
6576 break;
6579 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6580 lock = build_call_expr_loc (loc, lock, 1,
6581 build_fold_addr_expr_loc (loc, decl));
6583 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6584 unlock = build_call_expr_loc (loc, unlock, 1,
6585 build_fold_addr_expr_loc (loc, decl));
6587 else
6589 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6590 lock = build_call_expr_loc (loc, lock, 0);
6592 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6593 unlock = build_call_expr_loc (loc, unlock, 0);
6596 push_gimplify_context ();
6598 block = make_node (BLOCK);
6599 bind = gimple_build_bind (NULL, NULL, block);
6600 gsi_replace (gsi_p, bind, true);
6601 gimple_bind_add_stmt (bind, stmt);
6603 tbody = gimple_bind_body (bind);
6604 gimplify_and_add (lock, &tbody);
6605 gimple_bind_set_body (bind, tbody);
6607 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6608 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6609 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6610 gimple_omp_set_body (stmt, NULL);
6612 tbody = gimple_bind_body (bind);
6613 gimplify_and_add (unlock, &tbody);
6614 gimple_bind_set_body (bind, tbody);
6616 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6618 pop_gimplify_context (bind);
6619 gimple_bind_append_vars (bind, ctx->block_vars);
6620 BLOCK_VARS (block) = gimple_bind_vars (bind);
6623 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6624 for a lastprivate clause. Given a loop control predicate of (V
6625 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6626 is appended to *DLIST, iterator initialization is appended to
6627 *BODY_P. */
6629 static void
6630 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6631 gimple_seq *dlist, struct omp_context *ctx)
6633 tree clauses, cond, vinit;
6634 enum tree_code cond_code;
6635 gimple_seq stmts;
6637 cond_code = fd->loop.cond_code;
6638 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6640 /* When possible, use a strict equality expression. This can let VRP
6641 type optimizations deduce the value and remove a copy. */
6642 if (tree_fits_shwi_p (fd->loop.step))
6644 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6645 if (step == 1 || step == -1)
6646 cond_code = EQ_EXPR;
6649 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6650 || gimple_omp_for_grid_phony (fd->for_stmt))
6651 cond = omp_grid_lastprivate_predicate (fd);
6652 else
6654 tree n2 = fd->loop.n2;
6655 if (fd->collapse > 1
6656 && TREE_CODE (n2) != INTEGER_CST
6657 && gimple_omp_for_combined_into_p (fd->for_stmt))
6659 struct omp_context *taskreg_ctx = NULL;
6660 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6662 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6663 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6664 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6666 if (gimple_omp_for_combined_into_p (gfor))
6668 gcc_assert (ctx->outer->outer
6669 && is_parallel_ctx (ctx->outer->outer));
6670 taskreg_ctx = ctx->outer->outer;
6672 else
6674 struct omp_for_data outer_fd;
6675 omp_extract_for_data (gfor, &outer_fd, NULL);
6676 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6679 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6680 taskreg_ctx = ctx->outer->outer;
6682 else if (is_taskreg_ctx (ctx->outer))
6683 taskreg_ctx = ctx->outer;
6684 if (taskreg_ctx)
6686 int i;
6687 tree taskreg_clauses
6688 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6689 tree innerc = omp_find_clause (taskreg_clauses,
6690 OMP_CLAUSE__LOOPTEMP_);
6691 gcc_assert (innerc);
6692 for (i = 0; i < fd->collapse; i++)
6694 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6695 OMP_CLAUSE__LOOPTEMP_);
6696 gcc_assert (innerc);
6698 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6699 OMP_CLAUSE__LOOPTEMP_);
6700 if (innerc)
6701 n2 = fold_convert (TREE_TYPE (n2),
6702 lookup_decl (OMP_CLAUSE_DECL (innerc),
6703 taskreg_ctx));
6706 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6709 clauses = gimple_omp_for_clauses (fd->for_stmt);
6710 stmts = NULL;
6711 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6712 if (!gimple_seq_empty_p (stmts))
6714 gimple_seq_add_seq (&stmts, *dlist);
6715 *dlist = stmts;
6717 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6718 vinit = fd->loop.n1;
6719 if (cond_code == EQ_EXPR
6720 && tree_fits_shwi_p (fd->loop.n2)
6721 && ! integer_zerop (fd->loop.n2))
6722 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6723 else
6724 vinit = unshare_expr (vinit);
6726 /* Initialize the iterator variable, so that threads that don't execute
6727 any iterations don't execute the lastprivate clauses by accident. */
6728 gimplify_assign (fd->loop.v, vinit, body_p);
6733 /* Lower code for an OMP loop directive. */
6735 static void
6736 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6738 tree *rhs_p, block;
6739 struct omp_for_data fd, *fdp = NULL;
6740 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6741 gbind *new_stmt;
6742 gimple_seq omp_for_body, body, dlist;
6743 gimple_seq oacc_head = NULL, oacc_tail = NULL;
6744 size_t i;
6746 push_gimplify_context ();
6748 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6750 block = make_node (BLOCK);
6751 new_stmt = gimple_build_bind (NULL, NULL, block);
6752 /* Replace at gsi right away, so that 'stmt' is no member
6753 of a sequence anymore as we're going to add to a different
6754 one below. */
6755 gsi_replace (gsi_p, new_stmt, true);
6757 /* Move declaration of temporaries in the loop body before we make
6758 it go away. */
6759 omp_for_body = gimple_omp_body (stmt);
6760 if (!gimple_seq_empty_p (omp_for_body)
6761 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6763 gbind *inner_bind
6764 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6765 tree vars = gimple_bind_vars (inner_bind);
6766 gimple_bind_append_vars (new_stmt, vars);
6767 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6768 keep them on the inner_bind and it's block. */
6769 gimple_bind_set_vars (inner_bind, NULL_TREE);
6770 if (gimple_bind_block (inner_bind))
6771 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6774 if (gimple_omp_for_combined_into_p (stmt))
6776 omp_extract_for_data (stmt, &fd, NULL);
6777 fdp = &fd;
6779 /* We need two temporaries with fd.loop.v type (istart/iend)
6780 and then (fd.collapse - 1) temporaries with the same
6781 type for count2 ... countN-1 vars if not constant. */
6782 size_t count = 2;
6783 tree type = fd.iter_type;
6784 if (fd.collapse > 1
6785 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6786 count += fd.collapse - 1;
6787 bool taskreg_for
6788 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6789 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6790 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6791 tree simtc = NULL;
6792 tree clauses = *pc;
6793 if (taskreg_for)
6794 outerc
6795 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6796 OMP_CLAUSE__LOOPTEMP_);
6797 if (ctx->simt_stmt)
6798 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6799 OMP_CLAUSE__LOOPTEMP_);
6800 for (i = 0; i < count; i++)
6802 tree temp;
6803 if (taskreg_for)
6805 gcc_assert (outerc);
6806 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6807 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6808 OMP_CLAUSE__LOOPTEMP_);
6810 else
6812 /* If there are 2 adjacent SIMD stmts, one with _simt_
6813 clause, another without, make sure they have the same
6814 decls in _looptemp_ clauses, because the outer stmt
6815 they are combined into will look up just one inner_stmt. */
6816 if (ctx->simt_stmt)
6817 temp = OMP_CLAUSE_DECL (simtc);
6818 else
6819 temp = create_tmp_var (type);
6820 insert_decl_map (&ctx->outer->cb, temp, temp);
6822 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6823 OMP_CLAUSE_DECL (*pc) = temp;
6824 pc = &OMP_CLAUSE_CHAIN (*pc);
6825 if (ctx->simt_stmt)
6826 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6827 OMP_CLAUSE__LOOPTEMP_);
6829 *pc = clauses;
6832 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6833 dlist = NULL;
6834 body = NULL;
6835 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6836 fdp);
6837 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6839 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6841 /* Lower the header expressions. At this point, we can assume that
6842 the header is of the form:
6844 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6846 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6847 using the .omp_data_s mapping, if needed. */
6848 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6850 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6851 if (!is_gimple_min_invariant (*rhs_p))
6852 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6853 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6854 recompute_tree_invariant_for_addr_expr (*rhs_p);
6856 rhs_p = gimple_omp_for_final_ptr (stmt, i);
6857 if (!is_gimple_min_invariant (*rhs_p))
6858 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6859 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6860 recompute_tree_invariant_for_addr_expr (*rhs_p);
6862 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6863 if (!is_gimple_min_invariant (*rhs_p))
6864 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6867 /* Once lowered, extract the bounds and clauses. */
6868 omp_extract_for_data (stmt, &fd, NULL);
6870 if (is_gimple_omp_oacc (ctx->stmt)
6871 && !ctx_in_oacc_kernels_region (ctx))
6872 lower_oacc_head_tail (gimple_location (stmt),
6873 gimple_omp_for_clauses (stmt),
6874 &oacc_head, &oacc_tail, ctx);
6876 /* Add OpenACC partitioning and reduction markers just before the loop. */
6877 if (oacc_head)
6878 gimple_seq_add_seq (&body, oacc_head);
6880 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
6882 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6883 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
6884 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6885 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6887 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6888 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6889 OMP_CLAUSE_LINEAR_STEP (c)
6890 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6891 ctx);
6894 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6895 && gimple_omp_for_grid_phony (stmt));
6896 if (!phony_loop)
6897 gimple_seq_add_stmt (&body, stmt);
6898 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6900 if (!phony_loop)
6901 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6902 fd.loop.v));
6904 /* After the loop, add exit clauses. */
6905 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
6907 if (ctx->cancellable)
6908 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
6910 gimple_seq_add_seq (&body, dlist);
6912 body = maybe_catch_exception (body);
6914 if (!phony_loop)
6916 /* Region exit marker goes at the end of the loop body. */
6917 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
6918 maybe_add_implicit_barrier_cancel (ctx, &body);
6921 /* Add OpenACC joining and reduction markers just after the loop. */
6922 if (oacc_tail)
6923 gimple_seq_add_seq (&body, oacc_tail);
6925 pop_gimplify_context (new_stmt);
6927 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6928 maybe_remove_omp_member_access_dummy_vars (new_stmt);
6929 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
6930 if (BLOCK_VARS (block))
6931 TREE_USED (block) = 1;
6933 gimple_bind_set_body (new_stmt, body);
6934 gimple_omp_set_body (stmt, NULL);
6935 gimple_omp_for_set_pre_body (stmt, NULL);
6938 /* Callback for walk_stmts. Check if the current statement only contains
6939 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
6941 static tree
6942 check_combined_parallel (gimple_stmt_iterator *gsi_p,
6943 bool *handled_ops_p,
6944 struct walk_stmt_info *wi)
6946 int *info = (int *) wi->info;
6947 gimple *stmt = gsi_stmt (*gsi_p);
6949 *handled_ops_p = true;
6950 switch (gimple_code (stmt))
6952 WALK_SUBSTMTS;
6954 case GIMPLE_DEBUG:
6955 break;
6956 case GIMPLE_OMP_FOR:
6957 case GIMPLE_OMP_SECTIONS:
6958 *info = *info == 0 ? 1 : -1;
6959 break;
6960 default:
6961 *info = -1;
6962 break;
6964 return NULL;
6967 struct omp_taskcopy_context
6969 /* This field must be at the beginning, as we do "inheritance": Some
6970 callback functions for tree-inline.c (e.g., omp_copy_decl)
6971 receive a copy_body_data pointer that is up-casted to an
6972 omp_context pointer. */
6973 copy_body_data cb;
6974 omp_context *ctx;
6977 static tree
6978 task_copyfn_copy_decl (tree var, copy_body_data *cb)
6980 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
6982 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
6983 return create_tmp_var (TREE_TYPE (var));
6985 return var;
6988 static tree
6989 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
6991 tree name, new_fields = NULL, type, f;
6993 type = lang_hooks.types.make_type (RECORD_TYPE);
6994 name = DECL_NAME (TYPE_NAME (orig_type));
6995 name = build_decl (gimple_location (tcctx->ctx->stmt),
6996 TYPE_DECL, name, type);
6997 TYPE_NAME (type) = name;
6999 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
7001 tree new_f = copy_node (f);
7002 DECL_CONTEXT (new_f) = type;
7003 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7004 TREE_CHAIN (new_f) = new_fields;
7005 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7006 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7007 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7008 &tcctx->cb, NULL);
7009 new_fields = new_f;
7010 tcctx->cb.decl_map->put (f, new_f);
7012 TYPE_FIELDS (type) = nreverse (new_fields);
7013 layout_type (type);
7014 return type;
7017 /* Create task copyfn. */
7019 static void
7020 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7022 struct function *child_cfun;
7023 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7024 tree record_type, srecord_type, bind, list;
7025 bool record_needs_remap = false, srecord_needs_remap = false;
7026 splay_tree_node n;
7027 struct omp_taskcopy_context tcctx;
7028 location_t loc = gimple_location (task_stmt);
7030 child_fn = gimple_omp_task_copy_fn (task_stmt);
7031 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7032 gcc_assert (child_cfun->cfg == NULL);
7033 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
7035 /* Reset DECL_CONTEXT on function arguments. */
7036 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7037 DECL_CONTEXT (t) = child_fn;
7039 /* Populate the function. */
7040 push_gimplify_context ();
7041 push_cfun (child_cfun);
7043 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7044 TREE_SIDE_EFFECTS (bind) = 1;
7045 list = NULL;
7046 DECL_SAVED_TREE (child_fn) = bind;
7047 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
7049 /* Remap src and dst argument types if needed. */
7050 record_type = ctx->record_type;
7051 srecord_type = ctx->srecord_type;
7052 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7053 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7055 record_needs_remap = true;
7056 break;
7058 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7059 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7061 srecord_needs_remap = true;
7062 break;
7065 if (record_needs_remap || srecord_needs_remap)
7067 memset (&tcctx, '\0', sizeof (tcctx));
7068 tcctx.cb.src_fn = ctx->cb.src_fn;
7069 tcctx.cb.dst_fn = child_fn;
7070 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7071 gcc_checking_assert (tcctx.cb.src_node);
7072 tcctx.cb.dst_node = tcctx.cb.src_node;
7073 tcctx.cb.src_cfun = ctx->cb.src_cfun;
7074 tcctx.cb.copy_decl = task_copyfn_copy_decl;
7075 tcctx.cb.eh_lp_nr = 0;
7076 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7077 tcctx.cb.decl_map = new hash_map<tree, tree>;
7078 tcctx.ctx = ctx;
7080 if (record_needs_remap)
7081 record_type = task_copyfn_remap_type (&tcctx, record_type);
7082 if (srecord_needs_remap)
7083 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7085 else
7086 tcctx.cb.decl_map = NULL;
7088 arg = DECL_ARGUMENTS (child_fn);
7089 TREE_TYPE (arg) = build_pointer_type (record_type);
7090 sarg = DECL_CHAIN (arg);
7091 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7093 /* First pass: initialize temporaries used in record_type and srecord_type
7094 sizes and field offsets. */
7095 if (tcctx.cb.decl_map)
7096 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7097 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7099 tree *p;
7101 decl = OMP_CLAUSE_DECL (c);
7102 p = tcctx.cb.decl_map->get (decl);
7103 if (p == NULL)
7104 continue;
7105 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7106 sf = (tree) n->value;
7107 sf = *tcctx.cb.decl_map->get (sf);
7108 src = build_simple_mem_ref_loc (loc, sarg);
7109 src = omp_build_component_ref (src, sf);
7110 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7111 append_to_statement_list (t, &list);
7114 /* Second pass: copy shared var pointers and copy construct non-VLA
7115 firstprivate vars. */
7116 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7117 switch (OMP_CLAUSE_CODE (c))
7119 splay_tree_key key;
7120 case OMP_CLAUSE_SHARED:
7121 decl = OMP_CLAUSE_DECL (c);
7122 key = (splay_tree_key) decl;
7123 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7124 key = (splay_tree_key) &DECL_UID (decl);
7125 n = splay_tree_lookup (ctx->field_map, key);
7126 if (n == NULL)
7127 break;
7128 f = (tree) n->value;
7129 if (tcctx.cb.decl_map)
7130 f = *tcctx.cb.decl_map->get (f);
7131 n = splay_tree_lookup (ctx->sfield_map, key);
7132 sf = (tree) n->value;
7133 if (tcctx.cb.decl_map)
7134 sf = *tcctx.cb.decl_map->get (sf);
7135 src = build_simple_mem_ref_loc (loc, sarg);
7136 src = omp_build_component_ref (src, sf);
7137 dst = build_simple_mem_ref_loc (loc, arg);
7138 dst = omp_build_component_ref (dst, f);
7139 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7140 append_to_statement_list (t, &list);
7141 break;
7142 case OMP_CLAUSE_FIRSTPRIVATE:
7143 decl = OMP_CLAUSE_DECL (c);
7144 if (is_variable_sized (decl))
7145 break;
7146 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7147 if (n == NULL)
7148 break;
7149 f = (tree) n->value;
7150 if (tcctx.cb.decl_map)
7151 f = *tcctx.cb.decl_map->get (f);
7152 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7153 if (n != NULL)
7155 sf = (tree) n->value;
7156 if (tcctx.cb.decl_map)
7157 sf = *tcctx.cb.decl_map->get (sf);
7158 src = build_simple_mem_ref_loc (loc, sarg);
7159 src = omp_build_component_ref (src, sf);
7160 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7161 src = build_simple_mem_ref_loc (loc, src);
7163 else
7164 src = decl;
7165 dst = build_simple_mem_ref_loc (loc, arg);
7166 dst = omp_build_component_ref (dst, f);
7167 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7168 append_to_statement_list (t, &list);
7169 break;
7170 case OMP_CLAUSE_PRIVATE:
7171 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7172 break;
7173 decl = OMP_CLAUSE_DECL (c);
7174 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7175 f = (tree) n->value;
7176 if (tcctx.cb.decl_map)
7177 f = *tcctx.cb.decl_map->get (f);
7178 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7179 if (n != NULL)
7181 sf = (tree) n->value;
7182 if (tcctx.cb.decl_map)
7183 sf = *tcctx.cb.decl_map->get (sf);
7184 src = build_simple_mem_ref_loc (loc, sarg);
7185 src = omp_build_component_ref (src, sf);
7186 if (use_pointer_for_field (decl, NULL))
7187 src = build_simple_mem_ref_loc (loc, src);
7189 else
7190 src = decl;
7191 dst = build_simple_mem_ref_loc (loc, arg);
7192 dst = omp_build_component_ref (dst, f);
7193 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7194 append_to_statement_list (t, &list);
7195 break;
7196 default:
7197 break;
7200 /* Last pass: handle VLA firstprivates. */
7201 if (tcctx.cb.decl_map)
7202 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7203 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7205 tree ind, ptr, df;
7207 decl = OMP_CLAUSE_DECL (c);
7208 if (!is_variable_sized (decl))
7209 continue;
7210 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7211 if (n == NULL)
7212 continue;
7213 f = (tree) n->value;
7214 f = *tcctx.cb.decl_map->get (f);
7215 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7216 ind = DECL_VALUE_EXPR (decl);
7217 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7218 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7219 n = splay_tree_lookup (ctx->sfield_map,
7220 (splay_tree_key) TREE_OPERAND (ind, 0));
7221 sf = (tree) n->value;
7222 sf = *tcctx.cb.decl_map->get (sf);
7223 src = build_simple_mem_ref_loc (loc, sarg);
7224 src = omp_build_component_ref (src, sf);
7225 src = build_simple_mem_ref_loc (loc, src);
7226 dst = build_simple_mem_ref_loc (loc, arg);
7227 dst = omp_build_component_ref (dst, f);
7228 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7229 append_to_statement_list (t, &list);
7230 n = splay_tree_lookup (ctx->field_map,
7231 (splay_tree_key) TREE_OPERAND (ind, 0));
7232 df = (tree) n->value;
7233 df = *tcctx.cb.decl_map->get (df);
7234 ptr = build_simple_mem_ref_loc (loc, arg);
7235 ptr = omp_build_component_ref (ptr, df);
7236 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7237 build_fold_addr_expr_loc (loc, dst));
7238 append_to_statement_list (t, &list);
7241 t = build1 (RETURN_EXPR, void_type_node, NULL);
7242 append_to_statement_list (t, &list);
7244 if (tcctx.cb.decl_map)
7245 delete tcctx.cb.decl_map;
7246 pop_gimplify_context (NULL);
7247 BIND_EXPR_BODY (bind) = list;
7248 pop_cfun ();
7251 static void
7252 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7254 tree c, clauses;
7255 gimple *g;
7256 size_t n_in = 0, n_out = 0, idx = 2, i;
7258 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7259 gcc_assert (clauses);
7260 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7261 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7262 switch (OMP_CLAUSE_DEPEND_KIND (c))
7264 case OMP_CLAUSE_DEPEND_IN:
7265 n_in++;
7266 break;
7267 case OMP_CLAUSE_DEPEND_OUT:
7268 case OMP_CLAUSE_DEPEND_INOUT:
7269 n_out++;
7270 break;
7271 case OMP_CLAUSE_DEPEND_SOURCE:
7272 case OMP_CLAUSE_DEPEND_SINK:
7273 /* FALLTHRU */
7274 default:
7275 gcc_unreachable ();
7277 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7278 tree array = create_tmp_var (type);
7279 TREE_ADDRESSABLE (array) = 1;
7280 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7281 NULL_TREE);
7282 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7283 gimple_seq_add_stmt (iseq, g);
7284 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7285 NULL_TREE);
7286 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7287 gimple_seq_add_stmt (iseq, g);
7288 for (i = 0; i < 2; i++)
7290 if ((i ? n_in : n_out) == 0)
7291 continue;
7292 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7293 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7294 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7296 tree t = OMP_CLAUSE_DECL (c);
7297 t = fold_convert (ptr_type_node, t);
7298 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7299 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7300 NULL_TREE, NULL_TREE);
7301 g = gimple_build_assign (r, t);
7302 gimple_seq_add_stmt (iseq, g);
7305 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7306 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7307 OMP_CLAUSE_CHAIN (c) = *pclauses;
7308 *pclauses = c;
7309 tree clobber = build_constructor (type, NULL);
7310 TREE_THIS_VOLATILE (clobber) = 1;
7311 g = gimple_build_assign (array, clobber);
7312 gimple_seq_add_stmt (oseq, g);
7315 /* Lower the OpenMP parallel or task directive in the current statement
7316 in GSI_P. CTX holds context information for the directive. */
7318 static void
7319 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7321 tree clauses;
7322 tree child_fn, t;
7323 gimple *stmt = gsi_stmt (*gsi_p);
7324 gbind *par_bind, *bind, *dep_bind = NULL;
7325 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7326 location_t loc = gimple_location (stmt);
7328 clauses = gimple_omp_taskreg_clauses (stmt);
7329 par_bind
7330 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7331 par_body = gimple_bind_body (par_bind);
7332 child_fn = ctx->cb.dst_fn;
7333 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7334 && !gimple_omp_parallel_combined_p (stmt))
7336 struct walk_stmt_info wi;
7337 int ws_num = 0;
7339 memset (&wi, 0, sizeof (wi));
7340 wi.info = &ws_num;
7341 wi.val_only = true;
7342 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7343 if (ws_num == 1)
7344 gimple_omp_parallel_set_combined_p (stmt, true);
7346 gimple_seq dep_ilist = NULL;
7347 gimple_seq dep_olist = NULL;
7348 if (gimple_code (stmt) == GIMPLE_OMP_TASK
7349 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7351 push_gimplify_context ();
7352 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7353 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7354 &dep_ilist, &dep_olist);
7357 if (ctx->srecord_type)
7358 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7360 push_gimplify_context ();
7362 par_olist = NULL;
7363 par_ilist = NULL;
7364 par_rlist = NULL;
7365 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7366 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7367 if (phony_construct && ctx->record_type)
7369 gcc_checking_assert (!ctx->receiver_decl);
7370 ctx->receiver_decl = create_tmp_var
7371 (build_reference_type (ctx->record_type), ".omp_rec");
7373 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7374 lower_omp (&par_body, ctx);
7375 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7376 lower_reduction_clauses (clauses, &par_rlist, ctx);
7378 /* Declare all the variables created by mapping and the variables
7379 declared in the scope of the parallel body. */
7380 record_vars_into (ctx->block_vars, child_fn);
7381 maybe_remove_omp_member_access_dummy_vars (par_bind);
7382 record_vars_into (gimple_bind_vars (par_bind), child_fn);
7384 if (ctx->record_type)
7386 ctx->sender_decl
7387 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7388 : ctx->record_type, ".omp_data_o");
7389 DECL_NAMELESS (ctx->sender_decl) = 1;
7390 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7391 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7394 olist = NULL;
7395 ilist = NULL;
7396 lower_send_clauses (clauses, &ilist, &olist, ctx);
7397 lower_send_shared_vars (&ilist, &olist, ctx);
7399 if (ctx->record_type)
7401 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7402 TREE_THIS_VOLATILE (clobber) = 1;
7403 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7404 clobber));
7407 /* Once all the expansions are done, sequence all the different
7408 fragments inside gimple_omp_body. */
7410 new_body = NULL;
7412 if (ctx->record_type)
7414 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7415 /* fixup_child_record_type might have changed receiver_decl's type. */
7416 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7417 gimple_seq_add_stmt (&new_body,
7418 gimple_build_assign (ctx->receiver_decl, t));
7421 gimple_seq_add_seq (&new_body, par_ilist);
7422 gimple_seq_add_seq (&new_body, par_body);
7423 gimple_seq_add_seq (&new_body, par_rlist);
7424 if (ctx->cancellable)
7425 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7426 gimple_seq_add_seq (&new_body, par_olist);
7427 new_body = maybe_catch_exception (new_body);
7428 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7429 gimple_seq_add_stmt (&new_body,
7430 gimple_build_omp_continue (integer_zero_node,
7431 integer_zero_node));
7432 if (!phony_construct)
7434 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7435 gimple_omp_set_body (stmt, new_body);
7438 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7439 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7440 gimple_bind_add_seq (bind, ilist);
7441 if (!phony_construct)
7442 gimple_bind_add_stmt (bind, stmt);
7443 else
7444 gimple_bind_add_seq (bind, new_body);
7445 gimple_bind_add_seq (bind, olist);
7447 pop_gimplify_context (NULL);
7449 if (dep_bind)
7451 gimple_bind_add_seq (dep_bind, dep_ilist);
7452 gimple_bind_add_stmt (dep_bind, bind);
7453 gimple_bind_add_seq (dep_bind, dep_olist);
7454 pop_gimplify_context (dep_bind);
7458 /* Lower the GIMPLE_OMP_TARGET in the current statement
7459 in GSI_P. CTX holds context information for the directive. */
7461 static void
7462 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7464 tree clauses;
7465 tree child_fn, t, c;
7466 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7467 gbind *tgt_bind, *bind, *dep_bind = NULL;
7468 gimple_seq tgt_body, olist, ilist, fplist, new_body;
7469 location_t loc = gimple_location (stmt);
7470 bool offloaded, data_region;
7471 unsigned int map_cnt = 0;
7473 offloaded = is_gimple_omp_offloaded (stmt);
7474 switch (gimple_omp_target_kind (stmt))
7476 case GF_OMP_TARGET_KIND_REGION:
7477 case GF_OMP_TARGET_KIND_UPDATE:
7478 case GF_OMP_TARGET_KIND_ENTER_DATA:
7479 case GF_OMP_TARGET_KIND_EXIT_DATA:
7480 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7481 case GF_OMP_TARGET_KIND_OACC_KERNELS:
7482 case GF_OMP_TARGET_KIND_OACC_UPDATE:
7483 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7484 case GF_OMP_TARGET_KIND_OACC_DECLARE:
7485 data_region = false;
7486 break;
7487 case GF_OMP_TARGET_KIND_DATA:
7488 case GF_OMP_TARGET_KIND_OACC_DATA:
7489 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7490 data_region = true;
7491 break;
7492 default:
7493 gcc_unreachable ();
7496 clauses = gimple_omp_target_clauses (stmt);
7498 gimple_seq dep_ilist = NULL;
7499 gimple_seq dep_olist = NULL;
7500 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7502 push_gimplify_context ();
7503 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7504 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7505 &dep_ilist, &dep_olist);
7508 tgt_bind = NULL;
7509 tgt_body = NULL;
7510 if (offloaded)
7512 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7513 tgt_body = gimple_bind_body (tgt_bind);
7515 else if (data_region)
7516 tgt_body = gimple_omp_body (stmt);
7517 child_fn = ctx->cb.dst_fn;
7519 push_gimplify_context ();
7520 fplist = NULL;
7522 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7523 switch (OMP_CLAUSE_CODE (c))
7525 tree var, x;
7527 default:
7528 break;
7529 case OMP_CLAUSE_MAP:
7530 #if CHECKING_P
7531 /* First check what we're prepared to handle in the following. */
7532 switch (OMP_CLAUSE_MAP_KIND (c))
7534 case GOMP_MAP_ALLOC:
7535 case GOMP_MAP_TO:
7536 case GOMP_MAP_FROM:
7537 case GOMP_MAP_TOFROM:
7538 case GOMP_MAP_POINTER:
7539 case GOMP_MAP_TO_PSET:
7540 case GOMP_MAP_DELETE:
7541 case GOMP_MAP_RELEASE:
7542 case GOMP_MAP_ALWAYS_TO:
7543 case GOMP_MAP_ALWAYS_FROM:
7544 case GOMP_MAP_ALWAYS_TOFROM:
7545 case GOMP_MAP_FIRSTPRIVATE_POINTER:
7546 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7547 case GOMP_MAP_STRUCT:
7548 case GOMP_MAP_ALWAYS_POINTER:
7549 break;
7550 case GOMP_MAP_FORCE_ALLOC:
7551 case GOMP_MAP_FORCE_TO:
7552 case GOMP_MAP_FORCE_FROM:
7553 case GOMP_MAP_FORCE_TOFROM:
7554 case GOMP_MAP_FORCE_PRESENT:
7555 case GOMP_MAP_FORCE_DEVICEPTR:
7556 case GOMP_MAP_DEVICE_RESIDENT:
7557 case GOMP_MAP_LINK:
7558 gcc_assert (is_gimple_omp_oacc (stmt));
7559 break;
7560 default:
7561 gcc_unreachable ();
7563 #endif
7564 /* FALLTHRU */
7565 case OMP_CLAUSE_TO:
7566 case OMP_CLAUSE_FROM:
7567 oacc_firstprivate:
7568 var = OMP_CLAUSE_DECL (c);
7569 if (!DECL_P (var))
7571 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7572 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7573 && (OMP_CLAUSE_MAP_KIND (c)
7574 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
7575 map_cnt++;
7576 continue;
7579 if (DECL_SIZE (var)
7580 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7582 tree var2 = DECL_VALUE_EXPR (var);
7583 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7584 var2 = TREE_OPERAND (var2, 0);
7585 gcc_assert (DECL_P (var2));
7586 var = var2;
7589 if (offloaded
7590 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7591 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7592 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7594 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7596 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7597 && varpool_node::get_create (var)->offloadable)
7598 continue;
7600 tree type = build_pointer_type (TREE_TYPE (var));
7601 tree new_var = lookup_decl (var, ctx);
7602 x = create_tmp_var_raw (type, get_name (new_var));
7603 gimple_add_tmp_var (x);
7604 x = build_simple_mem_ref (x);
7605 SET_DECL_VALUE_EXPR (new_var, x);
7606 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7608 continue;
7611 if (!maybe_lookup_field (var, ctx))
7612 continue;
7614 /* Don't remap oacc parallel reduction variables, because the
7615 intermediate result must be local to each gang. */
7616 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7617 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7619 x = build_receiver_ref (var, true, ctx);
7620 tree new_var = lookup_decl (var, ctx);
7622 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7623 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7624 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7625 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7626 x = build_simple_mem_ref (x);
7627 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7629 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7630 if (omp_is_reference (new_var)
7631 && TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE)
7633 /* Create a local object to hold the instance
7634 value. */
7635 tree type = TREE_TYPE (TREE_TYPE (new_var));
7636 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7637 tree inst = create_tmp_var (type, id);
7638 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7639 x = build_fold_addr_expr (inst);
7641 gimplify_assign (new_var, x, &fplist);
7643 else if (DECL_P (new_var))
7645 SET_DECL_VALUE_EXPR (new_var, x);
7646 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7648 else
7649 gcc_unreachable ();
7651 map_cnt++;
7652 break;
7654 case OMP_CLAUSE_FIRSTPRIVATE:
7655 if (is_oacc_parallel (ctx))
7656 goto oacc_firstprivate;
7657 map_cnt++;
7658 var = OMP_CLAUSE_DECL (c);
7659 if (!omp_is_reference (var)
7660 && !is_gimple_reg_type (TREE_TYPE (var)))
7662 tree new_var = lookup_decl (var, ctx);
7663 if (is_variable_sized (var))
7665 tree pvar = DECL_VALUE_EXPR (var);
7666 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7667 pvar = TREE_OPERAND (pvar, 0);
7668 gcc_assert (DECL_P (pvar));
7669 tree new_pvar = lookup_decl (pvar, ctx);
7670 x = build_fold_indirect_ref (new_pvar);
7671 TREE_THIS_NOTRAP (x) = 1;
7673 else
7674 x = build_receiver_ref (var, true, ctx);
7675 SET_DECL_VALUE_EXPR (new_var, x);
7676 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7678 break;
7680 case OMP_CLAUSE_PRIVATE:
7681 if (is_gimple_omp_oacc (ctx->stmt))
7682 break;
7683 var = OMP_CLAUSE_DECL (c);
7684 if (is_variable_sized (var))
7686 tree new_var = lookup_decl (var, ctx);
7687 tree pvar = DECL_VALUE_EXPR (var);
7688 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7689 pvar = TREE_OPERAND (pvar, 0);
7690 gcc_assert (DECL_P (pvar));
7691 tree new_pvar = lookup_decl (pvar, ctx);
7692 x = build_fold_indirect_ref (new_pvar);
7693 TREE_THIS_NOTRAP (x) = 1;
7694 SET_DECL_VALUE_EXPR (new_var, x);
7695 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7697 break;
7699 case OMP_CLAUSE_USE_DEVICE_PTR:
7700 case OMP_CLAUSE_IS_DEVICE_PTR:
7701 var = OMP_CLAUSE_DECL (c);
7702 map_cnt++;
7703 if (is_variable_sized (var))
7705 tree new_var = lookup_decl (var, ctx);
7706 tree pvar = DECL_VALUE_EXPR (var);
7707 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7708 pvar = TREE_OPERAND (pvar, 0);
7709 gcc_assert (DECL_P (pvar));
7710 tree new_pvar = lookup_decl (pvar, ctx);
7711 x = build_fold_indirect_ref (new_pvar);
7712 TREE_THIS_NOTRAP (x) = 1;
7713 SET_DECL_VALUE_EXPR (new_var, x);
7714 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7716 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7718 tree new_var = lookup_decl (var, ctx);
7719 tree type = build_pointer_type (TREE_TYPE (var));
7720 x = create_tmp_var_raw (type, get_name (new_var));
7721 gimple_add_tmp_var (x);
7722 x = build_simple_mem_ref (x);
7723 SET_DECL_VALUE_EXPR (new_var, x);
7724 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7726 else
7728 tree new_var = lookup_decl (var, ctx);
7729 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7730 gimple_add_tmp_var (x);
7731 SET_DECL_VALUE_EXPR (new_var, x);
7732 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7734 break;
7737 if (offloaded)
7739 target_nesting_level++;
7740 lower_omp (&tgt_body, ctx);
7741 target_nesting_level--;
7743 else if (data_region)
7744 lower_omp (&tgt_body, ctx);
7746 if (offloaded)
7748 /* Declare all the variables created by mapping and the variables
7749 declared in the scope of the target body. */
7750 record_vars_into (ctx->block_vars, child_fn);
7751 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
7752 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7755 olist = NULL;
7756 ilist = NULL;
7757 if (ctx->record_type)
7759 ctx->sender_decl
7760 = create_tmp_var (ctx->record_type, ".omp_data_arr");
7761 DECL_NAMELESS (ctx->sender_decl) = 1;
7762 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7763 t = make_tree_vec (3);
7764 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7765 TREE_VEC_ELT (t, 1)
7766 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7767 ".omp_data_sizes");
7768 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7769 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7770 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7771 tree tkind_type = short_unsigned_type_node;
7772 int talign_shift = 8;
7773 TREE_VEC_ELT (t, 2)
7774 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7775 ".omp_data_kinds");
7776 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7777 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7778 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7779 gimple_omp_target_set_data_arg (stmt, t);
7781 vec<constructor_elt, va_gc> *vsize;
7782 vec<constructor_elt, va_gc> *vkind;
7783 vec_alloc (vsize, map_cnt);
7784 vec_alloc (vkind, map_cnt);
7785 unsigned int map_idx = 0;
7787 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7788 switch (OMP_CLAUSE_CODE (c))
7790 tree ovar, nc, s, purpose, var, x, type;
7791 unsigned int talign;
7793 default:
7794 break;
7796 case OMP_CLAUSE_MAP:
7797 case OMP_CLAUSE_TO:
7798 case OMP_CLAUSE_FROM:
7799 oacc_firstprivate_map:
7800 nc = c;
7801 ovar = OMP_CLAUSE_DECL (c);
7802 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7803 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7804 || (OMP_CLAUSE_MAP_KIND (c)
7805 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7806 break;
7807 if (!DECL_P (ovar))
7809 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7810 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7812 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7813 == get_base_address (ovar));
7814 nc = OMP_CLAUSE_CHAIN (c);
7815 ovar = OMP_CLAUSE_DECL (nc);
7817 else
7819 tree x = build_sender_ref (ovar, ctx);
7820 tree v
7821 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7822 gimplify_assign (x, v, &ilist);
7823 nc = NULL_TREE;
7826 else
7828 if (DECL_SIZE (ovar)
7829 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7831 tree ovar2 = DECL_VALUE_EXPR (ovar);
7832 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7833 ovar2 = TREE_OPERAND (ovar2, 0);
7834 gcc_assert (DECL_P (ovar2));
7835 ovar = ovar2;
7837 if (!maybe_lookup_field (ovar, ctx))
7838 continue;
7841 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7842 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7843 talign = DECL_ALIGN_UNIT (ovar);
7844 if (nc)
7846 var = lookup_decl_in_outer_ctx (ovar, ctx);
7847 x = build_sender_ref (ovar, ctx);
7849 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7850 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7851 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7852 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7854 gcc_assert (offloaded);
7855 tree avar
7856 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7857 mark_addressable (avar);
7858 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7859 talign = DECL_ALIGN_UNIT (avar);
7860 avar = build_fold_addr_expr (avar);
7861 gimplify_assign (x, avar, &ilist);
7863 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7865 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7866 if (!omp_is_reference (var))
7868 if (is_gimple_reg (var)
7869 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7870 TREE_NO_WARNING (var) = 1;
7871 var = build_fold_addr_expr (var);
7873 else
7874 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7875 gimplify_assign (x, var, &ilist);
7877 else if (is_gimple_reg (var))
7879 gcc_assert (offloaded);
7880 tree avar = create_tmp_var (TREE_TYPE (var));
7881 mark_addressable (avar);
7882 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7883 if (GOMP_MAP_COPY_TO_P (map_kind)
7884 || map_kind == GOMP_MAP_POINTER
7885 || map_kind == GOMP_MAP_TO_PSET
7886 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7888 /* If we need to initialize a temporary
7889 with VAR because it is not addressable, and
7890 the variable hasn't been initialized yet, then
7891 we'll get a warning for the store to avar.
7892 Don't warn in that case, the mapping might
7893 be implicit. */
7894 TREE_NO_WARNING (var) = 1;
7895 gimplify_assign (avar, var, &ilist);
7897 avar = build_fold_addr_expr (avar);
7898 gimplify_assign (x, avar, &ilist);
7899 if ((GOMP_MAP_COPY_FROM_P (map_kind)
7900 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7901 && !TYPE_READONLY (TREE_TYPE (var)))
7903 x = unshare_expr (x);
7904 x = build_simple_mem_ref (x);
7905 gimplify_assign (var, x, &olist);
7908 else
7910 var = build_fold_addr_expr (var);
7911 gimplify_assign (x, var, &ilist);
7914 s = NULL_TREE;
7915 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7917 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7918 s = TREE_TYPE (ovar);
7919 if (TREE_CODE (s) == REFERENCE_TYPE)
7920 s = TREE_TYPE (s);
7921 s = TYPE_SIZE_UNIT (s);
7923 else
7924 s = OMP_CLAUSE_SIZE (c);
7925 if (s == NULL_TREE)
7926 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
7927 s = fold_convert (size_type_node, s);
7928 purpose = size_int (map_idx++);
7929 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
7930 if (TREE_CODE (s) != INTEGER_CST)
7931 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
7933 unsigned HOST_WIDE_INT tkind, tkind_zero;
7934 switch (OMP_CLAUSE_CODE (c))
7936 case OMP_CLAUSE_MAP:
7937 tkind = OMP_CLAUSE_MAP_KIND (c);
7938 tkind_zero = tkind;
7939 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
7940 switch (tkind)
7942 case GOMP_MAP_ALLOC:
7943 case GOMP_MAP_TO:
7944 case GOMP_MAP_FROM:
7945 case GOMP_MAP_TOFROM:
7946 case GOMP_MAP_ALWAYS_TO:
7947 case GOMP_MAP_ALWAYS_FROM:
7948 case GOMP_MAP_ALWAYS_TOFROM:
7949 case GOMP_MAP_RELEASE:
7950 case GOMP_MAP_FORCE_TO:
7951 case GOMP_MAP_FORCE_FROM:
7952 case GOMP_MAP_FORCE_TOFROM:
7953 case GOMP_MAP_FORCE_PRESENT:
7954 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
7955 break;
7956 case GOMP_MAP_DELETE:
7957 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
7958 default:
7959 break;
7961 if (tkind_zero != tkind)
7963 if (integer_zerop (s))
7964 tkind = tkind_zero;
7965 else if (integer_nonzerop (s))
7966 tkind_zero = tkind;
7968 break;
7969 case OMP_CLAUSE_FIRSTPRIVATE:
7970 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7971 tkind = GOMP_MAP_TO;
7972 tkind_zero = tkind;
7973 break;
7974 case OMP_CLAUSE_TO:
7975 tkind = GOMP_MAP_TO;
7976 tkind_zero = tkind;
7977 break;
7978 case OMP_CLAUSE_FROM:
7979 tkind = GOMP_MAP_FROM;
7980 tkind_zero = tkind;
7981 break;
7982 default:
7983 gcc_unreachable ();
7985 gcc_checking_assert (tkind
7986 < (HOST_WIDE_INT_C (1U) << talign_shift));
7987 gcc_checking_assert (tkind_zero
7988 < (HOST_WIDE_INT_C (1U) << talign_shift));
7989 talign = ceil_log2 (talign);
7990 tkind |= talign << talign_shift;
7991 tkind_zero |= talign << talign_shift;
7992 gcc_checking_assert (tkind
7993 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
7994 gcc_checking_assert (tkind_zero
7995 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
7996 if (tkind == tkind_zero)
7997 x = build_int_cstu (tkind_type, tkind);
7998 else
8000 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8001 x = build3 (COND_EXPR, tkind_type,
8002 fold_build2 (EQ_EXPR, boolean_type_node,
8003 unshare_expr (s), size_zero_node),
8004 build_int_cstu (tkind_type, tkind_zero),
8005 build_int_cstu (tkind_type, tkind));
8007 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8008 if (nc && nc != c)
8009 c = nc;
8010 break;
8012 case OMP_CLAUSE_FIRSTPRIVATE:
8013 if (is_oacc_parallel (ctx))
8014 goto oacc_firstprivate_map;
8015 ovar = OMP_CLAUSE_DECL (c);
8016 if (omp_is_reference (ovar))
8017 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8018 else
8019 talign = DECL_ALIGN_UNIT (ovar);
8020 var = lookup_decl_in_outer_ctx (ovar, ctx);
8021 x = build_sender_ref (ovar, ctx);
8022 tkind = GOMP_MAP_FIRSTPRIVATE;
8023 type = TREE_TYPE (ovar);
8024 if (omp_is_reference (ovar))
8025 type = TREE_TYPE (type);
8026 if ((INTEGRAL_TYPE_P (type)
8027 && TYPE_PRECISION (type) <= POINTER_SIZE)
8028 || TREE_CODE (type) == POINTER_TYPE)
8030 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8031 tree t = var;
8032 if (omp_is_reference (var))
8033 t = build_simple_mem_ref (var);
8034 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8035 TREE_NO_WARNING (var) = 1;
8036 if (TREE_CODE (type) != POINTER_TYPE)
8037 t = fold_convert (pointer_sized_int_node, t);
8038 t = fold_convert (TREE_TYPE (x), t);
8039 gimplify_assign (x, t, &ilist);
8041 else if (omp_is_reference (var))
8042 gimplify_assign (x, var, &ilist);
8043 else if (is_gimple_reg (var))
8045 tree avar = create_tmp_var (TREE_TYPE (var));
8046 mark_addressable (avar);
8047 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8048 TREE_NO_WARNING (var) = 1;
8049 gimplify_assign (avar, var, &ilist);
8050 avar = build_fold_addr_expr (avar);
8051 gimplify_assign (x, avar, &ilist);
8053 else
8055 var = build_fold_addr_expr (var);
8056 gimplify_assign (x, var, &ilist);
8058 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8059 s = size_int (0);
8060 else if (omp_is_reference (ovar))
8061 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8062 else
8063 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8064 s = fold_convert (size_type_node, s);
8065 purpose = size_int (map_idx++);
8066 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8067 if (TREE_CODE (s) != INTEGER_CST)
8068 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8070 gcc_checking_assert (tkind
8071 < (HOST_WIDE_INT_C (1U) << talign_shift));
8072 talign = ceil_log2 (talign);
8073 tkind |= talign << talign_shift;
8074 gcc_checking_assert (tkind
8075 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8076 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8077 build_int_cstu (tkind_type, tkind));
8078 break;
8080 case OMP_CLAUSE_USE_DEVICE_PTR:
8081 case OMP_CLAUSE_IS_DEVICE_PTR:
8082 ovar = OMP_CLAUSE_DECL (c);
8083 var = lookup_decl_in_outer_ctx (ovar, ctx);
8084 x = build_sender_ref (ovar, ctx);
8085 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8086 tkind = GOMP_MAP_USE_DEVICE_PTR;
8087 else
8088 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8089 type = TREE_TYPE (ovar);
8090 if (TREE_CODE (type) == ARRAY_TYPE)
8091 var = build_fold_addr_expr (var);
8092 else
8094 if (omp_is_reference (ovar))
8096 type = TREE_TYPE (type);
8097 if (TREE_CODE (type) != ARRAY_TYPE)
8098 var = build_simple_mem_ref (var);
8099 var = fold_convert (TREE_TYPE (x), var);
8102 gimplify_assign (x, var, &ilist);
8103 s = size_int (0);
8104 purpose = size_int (map_idx++);
8105 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8106 gcc_checking_assert (tkind
8107 < (HOST_WIDE_INT_C (1U) << talign_shift));
8108 gcc_checking_assert (tkind
8109 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8110 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8111 build_int_cstu (tkind_type, tkind));
8112 break;
8115 gcc_assert (map_idx == map_cnt);
8117 DECL_INITIAL (TREE_VEC_ELT (t, 1))
8118 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8119 DECL_INITIAL (TREE_VEC_ELT (t, 2))
8120 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8121 for (int i = 1; i <= 2; i++)
8122 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8124 gimple_seq initlist = NULL;
8125 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8126 TREE_VEC_ELT (t, i)),
8127 &initlist, true, NULL_TREE);
8128 gimple_seq_add_seq (&ilist, initlist);
8130 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8131 NULL);
8132 TREE_THIS_VOLATILE (clobber) = 1;
8133 gimple_seq_add_stmt (&olist,
8134 gimple_build_assign (TREE_VEC_ELT (t, i),
8135 clobber));
8138 tree clobber = build_constructor (ctx->record_type, NULL);
8139 TREE_THIS_VOLATILE (clobber) = 1;
8140 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8141 clobber));
8144 /* Once all the expansions are done, sequence all the different
8145 fragments inside gimple_omp_body. */
8147 new_body = NULL;
8149 if (offloaded
8150 && ctx->record_type)
8152 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8153 /* fixup_child_record_type might have changed receiver_decl's type. */
8154 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8155 gimple_seq_add_stmt (&new_body,
8156 gimple_build_assign (ctx->receiver_decl, t));
8158 gimple_seq_add_seq (&new_body, fplist);
8160 if (offloaded || data_region)
8162 tree prev = NULL_TREE;
8163 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8164 switch (OMP_CLAUSE_CODE (c))
8166 tree var, x;
8167 default:
8168 break;
8169 case OMP_CLAUSE_FIRSTPRIVATE:
8170 if (is_gimple_omp_oacc (ctx->stmt))
8171 break;
8172 var = OMP_CLAUSE_DECL (c);
8173 if (omp_is_reference (var)
8174 || is_gimple_reg_type (TREE_TYPE (var)))
8176 tree new_var = lookup_decl (var, ctx);
8177 tree type;
8178 type = TREE_TYPE (var);
8179 if (omp_is_reference (var))
8180 type = TREE_TYPE (type);
8181 if ((INTEGRAL_TYPE_P (type)
8182 && TYPE_PRECISION (type) <= POINTER_SIZE)
8183 || TREE_CODE (type) == POINTER_TYPE)
8185 x = build_receiver_ref (var, false, ctx);
8186 if (TREE_CODE (type) != POINTER_TYPE)
8187 x = fold_convert (pointer_sized_int_node, x);
8188 x = fold_convert (type, x);
8189 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8190 fb_rvalue);
8191 if (omp_is_reference (var))
8193 tree v = create_tmp_var_raw (type, get_name (var));
8194 gimple_add_tmp_var (v);
8195 TREE_ADDRESSABLE (v) = 1;
8196 gimple_seq_add_stmt (&new_body,
8197 gimple_build_assign (v, x));
8198 x = build_fold_addr_expr (v);
8200 gimple_seq_add_stmt (&new_body,
8201 gimple_build_assign (new_var, x));
8203 else
8205 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8206 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8207 fb_rvalue);
8208 gimple_seq_add_stmt (&new_body,
8209 gimple_build_assign (new_var, x));
8212 else if (is_variable_sized (var))
8214 tree pvar = DECL_VALUE_EXPR (var);
8215 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8216 pvar = TREE_OPERAND (pvar, 0);
8217 gcc_assert (DECL_P (pvar));
8218 tree new_var = lookup_decl (pvar, ctx);
8219 x = build_receiver_ref (var, false, ctx);
8220 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8221 gimple_seq_add_stmt (&new_body,
8222 gimple_build_assign (new_var, x));
8224 break;
8225 case OMP_CLAUSE_PRIVATE:
8226 if (is_gimple_omp_oacc (ctx->stmt))
8227 break;
8228 var = OMP_CLAUSE_DECL (c);
8229 if (omp_is_reference (var))
8231 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8232 tree new_var = lookup_decl (var, ctx);
8233 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8234 if (TREE_CONSTANT (x))
8236 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8237 get_name (var));
8238 gimple_add_tmp_var (x);
8239 TREE_ADDRESSABLE (x) = 1;
8240 x = build_fold_addr_expr_loc (clause_loc, x);
8242 else
8243 break;
8245 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8246 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8247 gimple_seq_add_stmt (&new_body,
8248 gimple_build_assign (new_var, x));
8250 break;
8251 case OMP_CLAUSE_USE_DEVICE_PTR:
8252 case OMP_CLAUSE_IS_DEVICE_PTR:
8253 var = OMP_CLAUSE_DECL (c);
8254 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8255 x = build_sender_ref (var, ctx);
8256 else
8257 x = build_receiver_ref (var, false, ctx);
8258 if (is_variable_sized (var))
8260 tree pvar = DECL_VALUE_EXPR (var);
8261 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8262 pvar = TREE_OPERAND (pvar, 0);
8263 gcc_assert (DECL_P (pvar));
8264 tree new_var = lookup_decl (pvar, ctx);
8265 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8266 gimple_seq_add_stmt (&new_body,
8267 gimple_build_assign (new_var, x));
8269 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8271 tree new_var = lookup_decl (var, ctx);
8272 new_var = DECL_VALUE_EXPR (new_var);
8273 gcc_assert (TREE_CODE (new_var) == MEM_REF);
8274 new_var = TREE_OPERAND (new_var, 0);
8275 gcc_assert (DECL_P (new_var));
8276 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8277 gimple_seq_add_stmt (&new_body,
8278 gimple_build_assign (new_var, x));
8280 else
8282 tree type = TREE_TYPE (var);
8283 tree new_var = lookup_decl (var, ctx);
8284 if (omp_is_reference (var))
8286 type = TREE_TYPE (type);
8287 if (TREE_CODE (type) != ARRAY_TYPE)
8289 tree v = create_tmp_var_raw (type, get_name (var));
8290 gimple_add_tmp_var (v);
8291 TREE_ADDRESSABLE (v) = 1;
8292 x = fold_convert (type, x);
8293 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8294 fb_rvalue);
8295 gimple_seq_add_stmt (&new_body,
8296 gimple_build_assign (v, x));
8297 x = build_fold_addr_expr (v);
8300 new_var = DECL_VALUE_EXPR (new_var);
8301 x = fold_convert (TREE_TYPE (new_var), x);
8302 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8303 gimple_seq_add_stmt (&new_body,
8304 gimple_build_assign (new_var, x));
8306 break;
8308 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8309 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8310 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8311 or references to VLAs. */
8312 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8313 switch (OMP_CLAUSE_CODE (c))
8315 tree var;
8316 default:
8317 break;
8318 case OMP_CLAUSE_MAP:
8319 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8320 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8322 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8323 poly_int64 offset = 0;
8324 gcc_assert (prev);
8325 var = OMP_CLAUSE_DECL (c);
8326 if (DECL_P (var)
8327 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8328 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8329 ctx))
8330 && varpool_node::get_create (var)->offloadable)
8331 break;
8332 if (TREE_CODE (var) == INDIRECT_REF
8333 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8334 var = TREE_OPERAND (var, 0);
8335 if (TREE_CODE (var) == COMPONENT_REF)
8337 var = get_addr_base_and_unit_offset (var, &offset);
8338 gcc_assert (var != NULL_TREE && DECL_P (var));
8340 else if (DECL_SIZE (var)
8341 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8343 tree var2 = DECL_VALUE_EXPR (var);
8344 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8345 var2 = TREE_OPERAND (var2, 0);
8346 gcc_assert (DECL_P (var2));
8347 var = var2;
8349 tree new_var = lookup_decl (var, ctx), x;
8350 tree type = TREE_TYPE (new_var);
8351 bool is_ref;
8352 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8353 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8354 == COMPONENT_REF))
8356 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8357 is_ref = true;
8358 new_var = build2 (MEM_REF, type,
8359 build_fold_addr_expr (new_var),
8360 build_int_cst (build_pointer_type (type),
8361 offset));
8363 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8365 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8366 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8367 new_var = build2 (MEM_REF, type,
8368 build_fold_addr_expr (new_var),
8369 build_int_cst (build_pointer_type (type),
8370 offset));
8372 else
8373 is_ref = omp_is_reference (var);
8374 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8375 is_ref = false;
8376 bool ref_to_array = false;
8377 if (is_ref)
8379 type = TREE_TYPE (type);
8380 if (TREE_CODE (type) == ARRAY_TYPE)
8382 type = build_pointer_type (type);
8383 ref_to_array = true;
8386 else if (TREE_CODE (type) == ARRAY_TYPE)
8388 tree decl2 = DECL_VALUE_EXPR (new_var);
8389 gcc_assert (TREE_CODE (decl2) == MEM_REF);
8390 decl2 = TREE_OPERAND (decl2, 0);
8391 gcc_assert (DECL_P (decl2));
8392 new_var = decl2;
8393 type = TREE_TYPE (new_var);
8395 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8396 x = fold_convert_loc (clause_loc, type, x);
8397 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8399 tree bias = OMP_CLAUSE_SIZE (c);
8400 if (DECL_P (bias))
8401 bias = lookup_decl (bias, ctx);
8402 bias = fold_convert_loc (clause_loc, sizetype, bias);
8403 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8404 bias);
8405 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8406 TREE_TYPE (x), x, bias);
8408 if (ref_to_array)
8409 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8410 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8411 if (is_ref && !ref_to_array)
8413 tree t = create_tmp_var_raw (type, get_name (var));
8414 gimple_add_tmp_var (t);
8415 TREE_ADDRESSABLE (t) = 1;
8416 gimple_seq_add_stmt (&new_body,
8417 gimple_build_assign (t, x));
8418 x = build_fold_addr_expr_loc (clause_loc, t);
8420 gimple_seq_add_stmt (&new_body,
8421 gimple_build_assign (new_var, x));
8422 prev = NULL_TREE;
8424 else if (OMP_CLAUSE_CHAIN (c)
8425 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8426 == OMP_CLAUSE_MAP
8427 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8428 == GOMP_MAP_FIRSTPRIVATE_POINTER
8429 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8430 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8431 prev = c;
8432 break;
8433 case OMP_CLAUSE_PRIVATE:
8434 var = OMP_CLAUSE_DECL (c);
8435 if (is_variable_sized (var))
8437 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8438 tree new_var = lookup_decl (var, ctx);
8439 tree pvar = DECL_VALUE_EXPR (var);
8440 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8441 pvar = TREE_OPERAND (pvar, 0);
8442 gcc_assert (DECL_P (pvar));
8443 tree new_pvar = lookup_decl (pvar, ctx);
8444 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8445 tree al = size_int (DECL_ALIGN (var));
8446 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8447 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8448 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8449 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8450 gimple_seq_add_stmt (&new_body,
8451 gimple_build_assign (new_pvar, x));
8453 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8455 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8456 tree new_var = lookup_decl (var, ctx);
8457 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8458 if (TREE_CONSTANT (x))
8459 break;
8460 else
8462 tree atmp
8463 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8464 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8465 tree al = size_int (TYPE_ALIGN (rtype));
8466 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8469 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8470 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8471 gimple_seq_add_stmt (&new_body,
8472 gimple_build_assign (new_var, x));
8474 break;
8477 gimple_seq fork_seq = NULL;
8478 gimple_seq join_seq = NULL;
8480 if (is_oacc_parallel (ctx))
8482 /* If there are reductions on the offloaded region itself, treat
8483 them as a dummy GANG loop. */
8484 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8486 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8487 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8490 gimple_seq_add_seq (&new_body, fork_seq);
8491 gimple_seq_add_seq (&new_body, tgt_body);
8492 gimple_seq_add_seq (&new_body, join_seq);
8494 if (offloaded)
8495 new_body = maybe_catch_exception (new_body);
8497 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8498 gimple_omp_set_body (stmt, new_body);
8501 bind = gimple_build_bind (NULL, NULL,
8502 tgt_bind ? gimple_bind_block (tgt_bind)
8503 : NULL_TREE);
8504 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8505 gimple_bind_add_seq (bind, ilist);
8506 gimple_bind_add_stmt (bind, stmt);
8507 gimple_bind_add_seq (bind, olist);
8509 pop_gimplify_context (NULL);
8511 if (dep_bind)
8513 gimple_bind_add_seq (dep_bind, dep_ilist);
8514 gimple_bind_add_stmt (dep_bind, bind);
8515 gimple_bind_add_seq (dep_bind, dep_olist);
8516 pop_gimplify_context (dep_bind);
8520 /* Expand code for an OpenMP teams directive. */
8522 static void
8523 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8525 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8526 push_gimplify_context ();
8528 tree block = make_node (BLOCK);
8529 gbind *bind = gimple_build_bind (NULL, NULL, block);
8530 gsi_replace (gsi_p, bind, true);
8531 gimple_seq bind_body = NULL;
8532 gimple_seq dlist = NULL;
8533 gimple_seq olist = NULL;
8535 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8536 OMP_CLAUSE_NUM_TEAMS);
8537 if (num_teams == NULL_TREE)
8538 num_teams = build_int_cst (unsigned_type_node, 0);
8539 else
8541 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8542 num_teams = fold_convert (unsigned_type_node, num_teams);
8543 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8545 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8546 OMP_CLAUSE_THREAD_LIMIT);
8547 if (thread_limit == NULL_TREE)
8548 thread_limit = build_int_cst (unsigned_type_node, 0);
8549 else
8551 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8552 thread_limit = fold_convert (unsigned_type_node, thread_limit);
8553 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8554 fb_rvalue);
8557 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8558 &bind_body, &dlist, ctx, NULL);
8559 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8560 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8561 if (!gimple_omp_teams_grid_phony (teams_stmt))
8563 gimple_seq_add_stmt (&bind_body, teams_stmt);
8564 location_t loc = gimple_location (teams_stmt);
8565 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8566 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8567 gimple_set_location (call, loc);
8568 gimple_seq_add_stmt (&bind_body, call);
8571 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8572 gimple_omp_set_body (teams_stmt, NULL);
8573 gimple_seq_add_seq (&bind_body, olist);
8574 gimple_seq_add_seq (&bind_body, dlist);
8575 if (!gimple_omp_teams_grid_phony (teams_stmt))
8576 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8577 gimple_bind_set_body (bind, bind_body);
8579 pop_gimplify_context (bind);
8581 gimple_bind_append_vars (bind, ctx->block_vars);
8582 BLOCK_VARS (block) = ctx->block_vars;
8583 if (BLOCK_VARS (block))
8584 TREE_USED (block) = 1;
8587 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8589 static void
8590 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8592 gimple *stmt = gsi_stmt (*gsi_p);
8593 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8594 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8595 gimple_build_omp_return (false));
8599 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8600 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8601 of OMP context, but with task_shared_vars set. */
8603 static tree
8604 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8605 void *data)
8607 tree t = *tp;
8609 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8610 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8611 return t;
8613 if (task_shared_vars
8614 && DECL_P (t)
8615 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8616 return t;
8618 /* If a global variable has been privatized, TREE_CONSTANT on
8619 ADDR_EXPR might be wrong. */
8620 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8621 recompute_tree_invariant_for_addr_expr (t);
8623 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8624 return NULL_TREE;
8627 /* Data to be communicated between lower_omp_regimplify_operands and
8628 lower_omp_regimplify_operands_p. */
8630 struct lower_omp_regimplify_operands_data
8632 omp_context *ctx;
8633 vec<tree> *decls;
8636 /* Helper function for lower_omp_regimplify_operands. Find
8637 omp_member_access_dummy_var vars and adjust temporarily their
8638 DECL_VALUE_EXPRs if needed. */
8640 static tree
8641 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8642 void *data)
8644 tree t = omp_member_access_dummy_var (*tp);
8645 if (t)
8647 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8648 lower_omp_regimplify_operands_data *ldata
8649 = (lower_omp_regimplify_operands_data *) wi->info;
8650 tree o = maybe_lookup_decl (t, ldata->ctx);
8651 if (o != t)
8653 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8654 ldata->decls->safe_push (*tp);
8655 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8656 SET_DECL_VALUE_EXPR (*tp, v);
8659 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8660 return NULL_TREE;
8663 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8664 of omp_member_access_dummy_var vars during regimplification. */
8666 static void
8667 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8668 gimple_stmt_iterator *gsi_p)
8670 auto_vec<tree, 10> decls;
8671 if (ctx)
8673 struct walk_stmt_info wi;
8674 memset (&wi, '\0', sizeof (wi));
8675 struct lower_omp_regimplify_operands_data data;
8676 data.ctx = ctx;
8677 data.decls = &decls;
8678 wi.info = &data;
8679 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8681 gimple_regimplify_operands (stmt, gsi_p);
8682 while (!decls.is_empty ())
8684 tree t = decls.pop ();
8685 tree v = decls.pop ();
8686 SET_DECL_VALUE_EXPR (t, v);
8690 static void
8691 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8693 gimple *stmt = gsi_stmt (*gsi_p);
8694 struct walk_stmt_info wi;
8695 gcall *call_stmt;
8697 if (gimple_has_location (stmt))
8698 input_location = gimple_location (stmt);
8700 if (task_shared_vars)
8701 memset (&wi, '\0', sizeof (wi));
8703 /* If we have issued syntax errors, avoid doing any heavy lifting.
8704 Just replace the OMP directives with a NOP to avoid
8705 confusing RTL expansion. */
8706 if (seen_error () && is_gimple_omp (stmt))
8708 gsi_replace (gsi_p, gimple_build_nop (), true);
8709 return;
8712 switch (gimple_code (stmt))
8714 case GIMPLE_COND:
8716 gcond *cond_stmt = as_a <gcond *> (stmt);
8717 if ((ctx || task_shared_vars)
8718 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8719 lower_omp_regimplify_p,
8720 ctx ? NULL : &wi, NULL)
8721 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8722 lower_omp_regimplify_p,
8723 ctx ? NULL : &wi, NULL)))
8724 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8726 break;
8727 case GIMPLE_CATCH:
8728 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8729 break;
8730 case GIMPLE_EH_FILTER:
8731 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8732 break;
8733 case GIMPLE_TRY:
8734 lower_omp (gimple_try_eval_ptr (stmt), ctx);
8735 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8736 break;
8737 case GIMPLE_TRANSACTION:
8738 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8739 ctx);
8740 break;
8741 case GIMPLE_BIND:
8742 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8743 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
8744 break;
8745 case GIMPLE_OMP_PARALLEL:
8746 case GIMPLE_OMP_TASK:
8747 ctx = maybe_lookup_ctx (stmt);
8748 gcc_assert (ctx);
8749 if (ctx->cancellable)
8750 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8751 lower_omp_taskreg (gsi_p, ctx);
8752 break;
8753 case GIMPLE_OMP_FOR:
8754 ctx = maybe_lookup_ctx (stmt);
8755 gcc_assert (ctx);
8756 if (ctx->cancellable)
8757 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8758 lower_omp_for (gsi_p, ctx);
8759 break;
8760 case GIMPLE_OMP_SECTIONS:
8761 ctx = maybe_lookup_ctx (stmt);
8762 gcc_assert (ctx);
8763 if (ctx->cancellable)
8764 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8765 lower_omp_sections (gsi_p, ctx);
8766 break;
8767 case GIMPLE_OMP_SINGLE:
8768 ctx = maybe_lookup_ctx (stmt);
8769 gcc_assert (ctx);
8770 lower_omp_single (gsi_p, ctx);
8771 break;
8772 case GIMPLE_OMP_MASTER:
8773 ctx = maybe_lookup_ctx (stmt);
8774 gcc_assert (ctx);
8775 lower_omp_master (gsi_p, ctx);
8776 break;
8777 case GIMPLE_OMP_TASKGROUP:
8778 ctx = maybe_lookup_ctx (stmt);
8779 gcc_assert (ctx);
8780 lower_omp_taskgroup (gsi_p, ctx);
8781 break;
8782 case GIMPLE_OMP_ORDERED:
8783 ctx = maybe_lookup_ctx (stmt);
8784 gcc_assert (ctx);
8785 lower_omp_ordered (gsi_p, ctx);
8786 break;
8787 case GIMPLE_OMP_CRITICAL:
8788 ctx = maybe_lookup_ctx (stmt);
8789 gcc_assert (ctx);
8790 lower_omp_critical (gsi_p, ctx);
8791 break;
8792 case GIMPLE_OMP_ATOMIC_LOAD:
8793 if ((ctx || task_shared_vars)
8794 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8795 as_a <gomp_atomic_load *> (stmt)),
8796 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8797 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8798 break;
8799 case GIMPLE_OMP_TARGET:
8800 ctx = maybe_lookup_ctx (stmt);
8801 gcc_assert (ctx);
8802 lower_omp_target (gsi_p, ctx);
8803 break;
8804 case GIMPLE_OMP_TEAMS:
8805 ctx = maybe_lookup_ctx (stmt);
8806 gcc_assert (ctx);
8807 lower_omp_teams (gsi_p, ctx);
8808 break;
8809 case GIMPLE_OMP_GRID_BODY:
8810 ctx = maybe_lookup_ctx (stmt);
8811 gcc_assert (ctx);
8812 lower_omp_grid_body (gsi_p, ctx);
8813 break;
8814 case GIMPLE_CALL:
8815 tree fndecl;
8816 call_stmt = as_a <gcall *> (stmt);
8817 fndecl = gimple_call_fndecl (call_stmt);
8818 if (fndecl
8819 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8820 switch (DECL_FUNCTION_CODE (fndecl))
8822 case BUILT_IN_GOMP_BARRIER:
8823 if (ctx == NULL)
8824 break;
8825 /* FALLTHRU */
8826 case BUILT_IN_GOMP_CANCEL:
8827 case BUILT_IN_GOMP_CANCELLATION_POINT:
8828 omp_context *cctx;
8829 cctx = ctx;
8830 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8831 cctx = cctx->outer;
8832 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8833 if (!cctx->cancellable)
8835 if (DECL_FUNCTION_CODE (fndecl)
8836 == BUILT_IN_GOMP_CANCELLATION_POINT)
8838 stmt = gimple_build_nop ();
8839 gsi_replace (gsi_p, stmt, false);
8841 break;
8843 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8845 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8846 gimple_call_set_fndecl (call_stmt, fndecl);
8847 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8849 tree lhs;
8850 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8851 gimple_call_set_lhs (call_stmt, lhs);
8852 tree fallthru_label;
8853 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8854 gimple *g;
8855 g = gimple_build_label (fallthru_label);
8856 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8857 g = gimple_build_cond (NE_EXPR, lhs,
8858 fold_convert (TREE_TYPE (lhs),
8859 boolean_false_node),
8860 cctx->cancel_label, fallthru_label);
8861 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8862 break;
8863 default:
8864 break;
8866 /* FALLTHRU */
8867 default:
8868 if ((ctx || task_shared_vars)
8869 && walk_gimple_op (stmt, lower_omp_regimplify_p,
8870 ctx ? NULL : &wi))
8872 /* Just remove clobbers, this should happen only if we have
8873 "privatized" local addressable variables in SIMD regions,
8874 the clobber isn't needed in that case and gimplifying address
8875 of the ARRAY_REF into a pointer and creating MEM_REF based
8876 clobber would create worse code than we get with the clobber
8877 dropped. */
8878 if (gimple_clobber_p (stmt))
8880 gsi_replace (gsi_p, gimple_build_nop (), true);
8881 break;
8883 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8885 break;
8889 static void
8890 lower_omp (gimple_seq *body, omp_context *ctx)
8892 location_t saved_location = input_location;
8893 gimple_stmt_iterator gsi;
8894 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8895 lower_omp_1 (&gsi, ctx);
8896 /* During gimplification, we haven't folded statments inside offloading
8897 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8898 if (target_nesting_level || taskreg_nesting_level)
8899 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8900 fold_stmt (&gsi);
8901 input_location = saved_location;
8904 /* Main entry point. */
8906 static unsigned int
8907 execute_lower_omp (void)
8909 gimple_seq body;
8910 int i;
8911 omp_context *ctx;
8913 /* This pass always runs, to provide PROP_gimple_lomp.
8914 But often, there is nothing to do. */
8915 if (flag_openacc == 0 && flag_openmp == 0
8916 && flag_openmp_simd == 0)
8917 return 0;
8919 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
8920 delete_omp_context);
8922 body = gimple_body (current_function_decl);
8924 if (hsa_gen_requested_p ())
8925 omp_grid_gridify_all_targets (&body);
8927 scan_omp (&body, NULL);
8928 gcc_assert (taskreg_nesting_level == 0);
8929 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
8930 finish_taskreg_scan (ctx);
8931 taskreg_contexts.release ();
8933 if (all_contexts->root)
8935 if (task_shared_vars)
8936 push_gimplify_context ();
8937 lower_omp (&body, NULL);
8938 if (task_shared_vars)
8939 pop_gimplify_context (NULL);
8942 if (all_contexts)
8944 splay_tree_delete (all_contexts);
8945 all_contexts = NULL;
8947 BITMAP_FREE (task_shared_vars);
8949 /* If current function is a method, remove artificial dummy VAR_DECL created
8950 for non-static data member privatization, they aren't needed for
8951 debuginfo nor anything else, have been already replaced everywhere in the
8952 IL and cause problems with LTO. */
8953 if (DECL_ARGUMENTS (current_function_decl)
8954 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
8955 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
8956 == POINTER_TYPE))
8957 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
8958 return 0;
8961 namespace {
8963 const pass_data pass_data_lower_omp =
8965 GIMPLE_PASS, /* type */
8966 "omplower", /* name */
8967 OPTGROUP_OMP, /* optinfo_flags */
8968 TV_NONE, /* tv_id */
8969 PROP_gimple_any, /* properties_required */
8970 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
8971 0, /* properties_destroyed */
8972 0, /* todo_flags_start */
8973 0, /* todo_flags_finish */
8976 class pass_lower_omp : public gimple_opt_pass
8978 public:
8979 pass_lower_omp (gcc::context *ctxt)
8980 : gimple_opt_pass (pass_data_lower_omp, ctxt)
8983 /* opt_pass methods: */
8984 virtual unsigned int execute (function *) { return execute_lower_omp (); }
8986 }; // class pass_lower_omp
8988 } // anon namespace
8990 gimple_opt_pass *
8991 make_pass_lower_omp (gcc::context *ctxt)
8993 return new pass_lower_omp (ctxt);
8996 /* The following is a utility to diagnose structured block violations.
8997 It is not part of the "omplower" pass, as that's invoked too late. It
8998 should be invoked by the respective front ends after gimplification. */
9000 static splay_tree all_labels;
9002 /* Check for mismatched contexts and generate an error if needed. Return
9003 true if an error is detected. */
9005 static bool
9006 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9007 gimple *branch_ctx, gimple *label_ctx)
9009 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9010 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9012 if (label_ctx == branch_ctx)
9013 return false;
9015 const char* kind = NULL;
9017 if (flag_openacc)
9019 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9020 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9022 gcc_checking_assert (kind == NULL);
9023 kind = "OpenACC";
9026 if (kind == NULL)
9028 gcc_checking_assert (flag_openmp || flag_openmp_simd);
9029 kind = "OpenMP";
9032 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9033 so we could traverse it and issue a correct "exit" or "enter" error
9034 message upon a structured block violation.
9036 We built the context by building a list with tree_cons'ing, but there is
9037 no easy counterpart in gimple tuples. It seems like far too much work
9038 for issuing exit/enter error messages. If someone really misses the
9039 distinct error message... patches welcome. */
9041 #if 0
9042 /* Try to avoid confusing the user by producing and error message
9043 with correct "exit" or "enter" verbiage. We prefer "exit"
9044 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9045 if (branch_ctx == NULL)
9046 exit_p = false;
9047 else
9049 while (label_ctx)
9051 if (TREE_VALUE (label_ctx) == branch_ctx)
9053 exit_p = false;
9054 break;
9056 label_ctx = TREE_CHAIN (label_ctx);
9060 if (exit_p)
9061 error ("invalid exit from %s structured block", kind);
9062 else
9063 error ("invalid entry to %s structured block", kind);
9064 #endif
9066 /* If it's obvious we have an invalid entry, be specific about the error. */
9067 if (branch_ctx == NULL)
9068 error ("invalid entry to %s structured block", kind);
9069 else
9071 /* Otherwise, be vague and lazy, but efficient. */
9072 error ("invalid branch to/from %s structured block", kind);
9075 gsi_replace (gsi_p, gimple_build_nop (), false);
9076 return true;
9079 /* Pass 1: Create a minimal tree of structured blocks, and record
9080 where each label is found. */
9082 static tree
9083 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9084 struct walk_stmt_info *wi)
9086 gimple *context = (gimple *) wi->info;
9087 gimple *inner_context;
9088 gimple *stmt = gsi_stmt (*gsi_p);
9090 *handled_ops_p = true;
9092 switch (gimple_code (stmt))
9094 WALK_SUBSTMTS;
9096 case GIMPLE_OMP_PARALLEL:
9097 case GIMPLE_OMP_TASK:
9098 case GIMPLE_OMP_SECTIONS:
9099 case GIMPLE_OMP_SINGLE:
9100 case GIMPLE_OMP_SECTION:
9101 case GIMPLE_OMP_MASTER:
9102 case GIMPLE_OMP_ORDERED:
9103 case GIMPLE_OMP_CRITICAL:
9104 case GIMPLE_OMP_TARGET:
9105 case GIMPLE_OMP_TEAMS:
9106 case GIMPLE_OMP_TASKGROUP:
9107 /* The minimal context here is just the current OMP construct. */
9108 inner_context = stmt;
9109 wi->info = inner_context;
9110 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9111 wi->info = context;
9112 break;
9114 case GIMPLE_OMP_FOR:
9115 inner_context = stmt;
9116 wi->info = inner_context;
9117 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9118 walk them. */
9119 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9120 diagnose_sb_1, NULL, wi);
9121 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9122 wi->info = context;
9123 break;
9125 case GIMPLE_LABEL:
9126 splay_tree_insert (all_labels,
9127 (splay_tree_key) gimple_label_label (
9128 as_a <glabel *> (stmt)),
9129 (splay_tree_value) context);
9130 break;
9132 default:
9133 break;
9136 return NULL_TREE;
9139 /* Pass 2: Check each branch and see if its context differs from that of
9140 the destination label's context. */
9142 static tree
9143 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9144 struct walk_stmt_info *wi)
9146 gimple *context = (gimple *) wi->info;
9147 splay_tree_node n;
9148 gimple *stmt = gsi_stmt (*gsi_p);
9150 *handled_ops_p = true;
9152 switch (gimple_code (stmt))
9154 WALK_SUBSTMTS;
9156 case GIMPLE_OMP_PARALLEL:
9157 case GIMPLE_OMP_TASK:
9158 case GIMPLE_OMP_SECTIONS:
9159 case GIMPLE_OMP_SINGLE:
9160 case GIMPLE_OMP_SECTION:
9161 case GIMPLE_OMP_MASTER:
9162 case GIMPLE_OMP_ORDERED:
9163 case GIMPLE_OMP_CRITICAL:
9164 case GIMPLE_OMP_TARGET:
9165 case GIMPLE_OMP_TEAMS:
9166 case GIMPLE_OMP_TASKGROUP:
9167 wi->info = stmt;
9168 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9169 wi->info = context;
9170 break;
9172 case GIMPLE_OMP_FOR:
9173 wi->info = stmt;
9174 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9175 walk them. */
9176 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9177 diagnose_sb_2, NULL, wi);
9178 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9179 wi->info = context;
9180 break;
9182 case GIMPLE_COND:
9184 gcond *cond_stmt = as_a <gcond *> (stmt);
9185 tree lab = gimple_cond_true_label (cond_stmt);
9186 if (lab)
9188 n = splay_tree_lookup (all_labels,
9189 (splay_tree_key) lab);
9190 diagnose_sb_0 (gsi_p, context,
9191 n ? (gimple *) n->value : NULL);
9193 lab = gimple_cond_false_label (cond_stmt);
9194 if (lab)
9196 n = splay_tree_lookup (all_labels,
9197 (splay_tree_key) lab);
9198 diagnose_sb_0 (gsi_p, context,
9199 n ? (gimple *) n->value : NULL);
9202 break;
9204 case GIMPLE_GOTO:
9206 tree lab = gimple_goto_dest (stmt);
9207 if (TREE_CODE (lab) != LABEL_DECL)
9208 break;
9210 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9211 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9213 break;
9215 case GIMPLE_SWITCH:
9217 gswitch *switch_stmt = as_a <gswitch *> (stmt);
9218 unsigned int i;
9219 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9221 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9222 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9223 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9224 break;
9227 break;
9229 case GIMPLE_RETURN:
9230 diagnose_sb_0 (gsi_p, context, NULL);
9231 break;
9233 default:
9234 break;
9237 return NULL_TREE;
9240 static unsigned int
9241 diagnose_omp_structured_block_errors (void)
9243 struct walk_stmt_info wi;
9244 gimple_seq body = gimple_body (current_function_decl);
9246 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9248 memset (&wi, 0, sizeof (wi));
9249 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9251 memset (&wi, 0, sizeof (wi));
9252 wi.want_locations = true;
9253 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9255 gimple_set_body (current_function_decl, body);
9257 splay_tree_delete (all_labels);
9258 all_labels = NULL;
9260 return 0;
9263 namespace {
9265 const pass_data pass_data_diagnose_omp_blocks =
9267 GIMPLE_PASS, /* type */
9268 "*diagnose_omp_blocks", /* name */
9269 OPTGROUP_OMP, /* optinfo_flags */
9270 TV_NONE, /* tv_id */
9271 PROP_gimple_any, /* properties_required */
9272 0, /* properties_provided */
9273 0, /* properties_destroyed */
9274 0, /* todo_flags_start */
9275 0, /* todo_flags_finish */
9278 class pass_diagnose_omp_blocks : public gimple_opt_pass
9280 public:
9281 pass_diagnose_omp_blocks (gcc::context *ctxt)
9282 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9285 /* opt_pass methods: */
9286 virtual bool gate (function *)
9288 return flag_openacc || flag_openmp || flag_openmp_simd;
9290 virtual unsigned int execute (function *)
9292 return diagnose_omp_structured_block_errors ();
9295 }; // class pass_diagnose_omp_blocks
9297 } // anon namespace
9299 gimple_opt_pass *
9300 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9302 return new pass_diagnose_omp_blocks (ctxt);
9306 #include "gt-omp-low.h"