gcc:
[official-gcc.git] / gcc / omp-low.c
blobfdabf67249b5827ae1bddf1762d31492ee5a3d3c
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2018 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* Nesting depth of this context. Used to beautify error messages re
118 invalid gotos. The outermost ctx is depth 1, with depth 0 being
119 reserved for the main body of the function. */
120 int depth;
122 /* True if this parallel directive is nested within another. */
123 bool is_nested;
125 /* True if this construct can be cancelled. */
126 bool cancellable;
129 static splay_tree all_contexts;
130 static int taskreg_nesting_level;
131 static int target_nesting_level;
132 static bitmap task_shared_vars;
133 static vec<omp_context *> taskreg_contexts;
135 static void scan_omp (gimple_seq *, omp_context *);
136 static tree scan_omp_1_op (tree *, int *, void *);
138 #define WALK_SUBSTMTS \
139 case GIMPLE_BIND: \
140 case GIMPLE_TRY: \
141 case GIMPLE_CATCH: \
142 case GIMPLE_EH_FILTER: \
143 case GIMPLE_TRANSACTION: \
144 /* The sub-statements for these should be walked. */ \
145 *handled_ops_p = false; \
146 break;
148 /* Return true if CTX corresponds to an oacc parallel region. */
150 static bool
151 is_oacc_parallel (omp_context *ctx)
153 enum gimple_code outer_type = gimple_code (ctx->stmt);
154 return ((outer_type == GIMPLE_OMP_TARGET)
155 && (gimple_omp_target_kind (ctx->stmt)
156 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
159 /* Return true if CTX corresponds to an oacc kernels region. */
161 static bool
162 is_oacc_kernels (omp_context *ctx)
164 enum gimple_code outer_type = gimple_code (ctx->stmt);
165 return ((outer_type == GIMPLE_OMP_TARGET)
166 && (gimple_omp_target_kind (ctx->stmt)
167 == GF_OMP_TARGET_KIND_OACC_KERNELS));
170 /* If DECL is the artificial dummy VAR_DECL created for non-static
171 data member privatization, return the underlying "this" parameter,
172 otherwise return NULL. */
174 tree
175 omp_member_access_dummy_var (tree decl)
177 if (!VAR_P (decl)
178 || !DECL_ARTIFICIAL (decl)
179 || !DECL_IGNORED_P (decl)
180 || !DECL_HAS_VALUE_EXPR_P (decl)
181 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
182 return NULL_TREE;
184 tree v = DECL_VALUE_EXPR (decl);
185 if (TREE_CODE (v) != COMPONENT_REF)
186 return NULL_TREE;
188 while (1)
189 switch (TREE_CODE (v))
191 case COMPONENT_REF:
192 case MEM_REF:
193 case INDIRECT_REF:
194 CASE_CONVERT:
195 case POINTER_PLUS_EXPR:
196 v = TREE_OPERAND (v, 0);
197 continue;
198 case PARM_DECL:
199 if (DECL_CONTEXT (v) == current_function_decl
200 && DECL_ARTIFICIAL (v)
201 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
202 return v;
203 return NULL_TREE;
204 default:
205 return NULL_TREE;
209 /* Helper for unshare_and_remap, called through walk_tree. */
211 static tree
212 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
214 tree *pair = (tree *) data;
215 if (*tp == pair[0])
217 *tp = unshare_expr (pair[1]);
218 *walk_subtrees = 0;
220 else if (IS_TYPE_OR_DECL_P (*tp))
221 *walk_subtrees = 0;
222 return NULL_TREE;
225 /* Return unshare_expr (X) with all occurrences of FROM
226 replaced with TO. */
228 static tree
229 unshare_and_remap (tree x, tree from, tree to)
231 tree pair[2] = { from, to };
232 x = unshare_expr (x);
233 walk_tree (&x, unshare_and_remap_1, pair, NULL);
234 return x;
237 /* Convenience function for calling scan_omp_1_op on tree operands. */
239 static inline tree
240 scan_omp_op (tree *tp, omp_context *ctx)
242 struct walk_stmt_info wi;
244 memset (&wi, 0, sizeof (wi));
245 wi.info = ctx;
246 wi.want_locations = true;
248 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
251 static void lower_omp (gimple_seq *, omp_context *);
252 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
253 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
255 /* Return true if CTX is for an omp parallel. */
257 static inline bool
258 is_parallel_ctx (omp_context *ctx)
260 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
264 /* Return true if CTX is for an omp task. */
266 static inline bool
267 is_task_ctx (omp_context *ctx)
269 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
273 /* Return true if CTX is for an omp taskloop. */
275 static inline bool
276 is_taskloop_ctx (omp_context *ctx)
278 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
279 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
283 /* Return true if CTX is for an omp parallel or omp task. */
285 static inline bool
286 is_taskreg_ctx (omp_context *ctx)
288 return is_parallel_ctx (ctx) || is_task_ctx (ctx);
291 /* Return true if EXPR is variable sized. */
293 static inline bool
294 is_variable_sized (const_tree expr)
296 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
299 /* Lookup variables. The "maybe" form
300 allows for the variable form to not have been entered, otherwise we
301 assert that the variable must have been entered. */
303 static inline tree
304 lookup_decl (tree var, omp_context *ctx)
306 tree *n = ctx->cb.decl_map->get (var);
307 return *n;
310 static inline tree
311 maybe_lookup_decl (const_tree var, omp_context *ctx)
313 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
314 return n ? *n : NULL_TREE;
317 static inline tree
318 lookup_field (tree var, omp_context *ctx)
320 splay_tree_node n;
321 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
322 return (tree) n->value;
325 static inline tree
326 lookup_sfield (splay_tree_key key, omp_context *ctx)
328 splay_tree_node n;
329 n = splay_tree_lookup (ctx->sfield_map
330 ? ctx->sfield_map : ctx->field_map, key);
331 return (tree) n->value;
334 static inline tree
335 lookup_sfield (tree var, omp_context *ctx)
337 return lookup_sfield ((splay_tree_key) var, ctx);
340 static inline tree
341 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
343 splay_tree_node n;
344 n = splay_tree_lookup (ctx->field_map, key);
345 return n ? (tree) n->value : NULL_TREE;
348 static inline tree
349 maybe_lookup_field (tree var, omp_context *ctx)
351 return maybe_lookup_field ((splay_tree_key) var, ctx);
354 /* Return true if DECL should be copied by pointer. SHARED_CTX is
355 the parallel context if DECL is to be shared. */
357 static bool
358 use_pointer_for_field (tree decl, omp_context *shared_ctx)
360 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
361 || TYPE_ATOMIC (TREE_TYPE (decl)))
362 return true;
364 /* We can only use copy-in/copy-out semantics for shared variables
365 when we know the value is not accessible from an outer scope. */
366 if (shared_ctx)
368 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
370 /* ??? Trivially accessible from anywhere. But why would we even
371 be passing an address in this case? Should we simply assert
372 this to be false, or should we have a cleanup pass that removes
373 these from the list of mappings? */
374 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
375 return true;
377 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
378 without analyzing the expression whether or not its location
379 is accessible to anyone else. In the case of nested parallel
380 regions it certainly may be. */
381 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
382 return true;
384 /* Do not use copy-in/copy-out for variables that have their
385 address taken. */
386 if (TREE_ADDRESSABLE (decl))
387 return true;
389 /* lower_send_shared_vars only uses copy-in, but not copy-out
390 for these. */
391 if (TREE_READONLY (decl)
392 || ((TREE_CODE (decl) == RESULT_DECL
393 || TREE_CODE (decl) == PARM_DECL)
394 && DECL_BY_REFERENCE (decl)))
395 return false;
397 /* Disallow copy-in/out in nested parallel if
398 decl is shared in outer parallel, otherwise
399 each thread could store the shared variable
400 in its own copy-in location, making the
401 variable no longer really shared. */
402 if (shared_ctx->is_nested)
404 omp_context *up;
406 for (up = shared_ctx->outer; up; up = up->outer)
407 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
408 break;
410 if (up)
412 tree c;
414 for (c = gimple_omp_taskreg_clauses (up->stmt);
415 c; c = OMP_CLAUSE_CHAIN (c))
416 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
417 && OMP_CLAUSE_DECL (c) == decl)
418 break;
420 if (c)
421 goto maybe_mark_addressable_and_ret;
425 /* For tasks avoid using copy-in/out. As tasks can be
426 deferred or executed in different thread, when GOMP_task
427 returns, the task hasn't necessarily terminated. */
428 if (is_task_ctx (shared_ctx))
430 tree outer;
431 maybe_mark_addressable_and_ret:
432 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
433 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
435 /* Taking address of OUTER in lower_send_shared_vars
436 might need regimplification of everything that uses the
437 variable. */
438 if (!task_shared_vars)
439 task_shared_vars = BITMAP_ALLOC (NULL);
440 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
441 TREE_ADDRESSABLE (outer) = 1;
443 return true;
447 return false;
450 /* Construct a new automatic decl similar to VAR. */
452 static tree
453 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
455 tree copy = copy_var_decl (var, name, type);
457 DECL_CONTEXT (copy) = current_function_decl;
458 DECL_CHAIN (copy) = ctx->block_vars;
459 /* If VAR is listed in task_shared_vars, it means it wasn't
460 originally addressable and is just because task needs to take
461 it's address. But we don't need to take address of privatizations
462 from that var. */
463 if (TREE_ADDRESSABLE (var)
464 && task_shared_vars
465 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
466 TREE_ADDRESSABLE (copy) = 0;
467 ctx->block_vars = copy;
469 return copy;
472 static tree
473 omp_copy_decl_1 (tree var, omp_context *ctx)
475 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
478 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
479 as appropriate. */
480 static tree
481 omp_build_component_ref (tree obj, tree field)
483 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
484 if (TREE_THIS_VOLATILE (field))
485 TREE_THIS_VOLATILE (ret) |= 1;
486 if (TREE_READONLY (field))
487 TREE_READONLY (ret) |= 1;
488 return ret;
491 /* Build tree nodes to access the field for VAR on the receiver side. */
493 static tree
494 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
496 tree x, field = lookup_field (var, ctx);
498 /* If the receiver record type was remapped in the child function,
499 remap the field into the new record type. */
500 x = maybe_lookup_field (field, ctx);
501 if (x != NULL)
502 field = x;
504 x = build_simple_mem_ref (ctx->receiver_decl);
505 TREE_THIS_NOTRAP (x) = 1;
506 x = omp_build_component_ref (x, field);
507 if (by_ref)
509 x = build_simple_mem_ref (x);
510 TREE_THIS_NOTRAP (x) = 1;
513 return x;
516 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
517 of a parallel, this is a component reference; for workshare constructs
518 this is some variable. */
520 static tree
521 build_outer_var_ref (tree var, omp_context *ctx,
522 enum omp_clause_code code = OMP_CLAUSE_ERROR)
524 tree x;
526 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
527 x = var;
528 else if (is_variable_sized (var))
530 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
531 x = build_outer_var_ref (x, ctx, code);
532 x = build_simple_mem_ref (x);
534 else if (is_taskreg_ctx (ctx))
536 bool by_ref = use_pointer_for_field (var, NULL);
537 x = build_receiver_ref (var, by_ref, ctx);
539 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
540 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
541 || (code == OMP_CLAUSE_PRIVATE
542 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
543 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
544 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
546 /* #pragma omp simd isn't a worksharing construct, and can reference
547 even private vars in its linear etc. clauses.
548 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
549 to private vars in all worksharing constructs. */
550 x = NULL_TREE;
551 if (ctx->outer && is_taskreg_ctx (ctx))
552 x = lookup_decl (var, ctx->outer);
553 else if (ctx->outer)
554 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
555 if (x == NULL_TREE)
556 x = var;
558 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
560 gcc_assert (ctx->outer);
561 splay_tree_node n
562 = splay_tree_lookup (ctx->outer->field_map,
563 (splay_tree_key) &DECL_UID (var));
564 if (n == NULL)
566 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
567 x = var;
568 else
569 x = lookup_decl (var, ctx->outer);
571 else
573 tree field = (tree) n->value;
574 /* If the receiver record type was remapped in the child function,
575 remap the field into the new record type. */
576 x = maybe_lookup_field (field, ctx->outer);
577 if (x != NULL)
578 field = x;
580 x = build_simple_mem_ref (ctx->outer->receiver_decl);
581 x = omp_build_component_ref (x, field);
582 if (use_pointer_for_field (var, ctx->outer))
583 x = build_simple_mem_ref (x);
586 else if (ctx->outer)
588 omp_context *outer = ctx->outer;
589 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
591 outer = outer->outer;
592 gcc_assert (outer
593 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
595 x = lookup_decl (var, outer);
597 else if (omp_is_reference (var))
598 /* This can happen with orphaned constructs. If var is reference, it is
599 possible it is shared and as such valid. */
600 x = var;
601 else if (omp_member_access_dummy_var (var))
602 x = var;
603 else
604 gcc_unreachable ();
606 if (x == var)
608 tree t = omp_member_access_dummy_var (var);
609 if (t)
611 x = DECL_VALUE_EXPR (var);
612 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
613 if (o != t)
614 x = unshare_and_remap (x, t, o);
615 else
616 x = unshare_expr (x);
620 if (omp_is_reference (var))
621 x = build_simple_mem_ref (x);
623 return x;
626 /* Build tree nodes to access the field for VAR on the sender side. */
628 static tree
629 build_sender_ref (splay_tree_key key, omp_context *ctx)
631 tree field = lookup_sfield (key, ctx);
632 return omp_build_component_ref (ctx->sender_decl, field);
635 static tree
636 build_sender_ref (tree var, omp_context *ctx)
638 return build_sender_ref ((splay_tree_key) var, ctx);
641 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
642 BASE_POINTERS_RESTRICT, declare the field with restrict. */
644 static void
645 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
647 tree field, type, sfield = NULL_TREE;
648 splay_tree_key key = (splay_tree_key) var;
650 if ((mask & 8) != 0)
652 key = (splay_tree_key) &DECL_UID (var);
653 gcc_checking_assert (key != (splay_tree_key) var);
655 gcc_assert ((mask & 1) == 0
656 || !splay_tree_lookup (ctx->field_map, key));
657 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
658 || !splay_tree_lookup (ctx->sfield_map, key));
659 gcc_assert ((mask & 3) == 3
660 || !is_gimple_omp_oacc (ctx->stmt));
662 type = TREE_TYPE (var);
663 /* Prevent redeclaring the var in the split-off function with a restrict
664 pointer type. Note that we only clear type itself, restrict qualifiers in
665 the pointed-to type will be ignored by points-to analysis. */
666 if (POINTER_TYPE_P (type)
667 && TYPE_RESTRICT (type))
668 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
670 if (mask & 4)
672 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
673 type = build_pointer_type (build_pointer_type (type));
675 else if (by_ref)
676 type = build_pointer_type (type);
677 else if ((mask & 3) == 1 && omp_is_reference (var))
678 type = TREE_TYPE (type);
680 field = build_decl (DECL_SOURCE_LOCATION (var),
681 FIELD_DECL, DECL_NAME (var), type);
683 /* Remember what variable this field was created for. This does have a
684 side effect of making dwarf2out ignore this member, so for helpful
685 debugging we clear it later in delete_omp_context. */
686 DECL_ABSTRACT_ORIGIN (field) = var;
687 if (type == TREE_TYPE (var))
689 SET_DECL_ALIGN (field, DECL_ALIGN (var));
690 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
691 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
693 else
694 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
696 if ((mask & 3) == 3)
698 insert_field_into_struct (ctx->record_type, field);
699 if (ctx->srecord_type)
701 sfield = build_decl (DECL_SOURCE_LOCATION (var),
702 FIELD_DECL, DECL_NAME (var), type);
703 DECL_ABSTRACT_ORIGIN (sfield) = var;
704 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
705 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
706 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
707 insert_field_into_struct (ctx->srecord_type, sfield);
710 else
712 if (ctx->srecord_type == NULL_TREE)
714 tree t;
716 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
717 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
718 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
720 sfield = build_decl (DECL_SOURCE_LOCATION (t),
721 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
722 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
723 insert_field_into_struct (ctx->srecord_type, sfield);
724 splay_tree_insert (ctx->sfield_map,
725 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
726 (splay_tree_value) sfield);
729 sfield = field;
730 insert_field_into_struct ((mask & 1) ? ctx->record_type
731 : ctx->srecord_type, field);
734 if (mask & 1)
735 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
736 if ((mask & 2) && ctx->sfield_map)
737 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
740 static tree
741 install_var_local (tree var, omp_context *ctx)
743 tree new_var = omp_copy_decl_1 (var, ctx);
744 insert_decl_map (&ctx->cb, var, new_var);
745 return new_var;
748 /* Adjust the replacement for DECL in CTX for the new context. This means
749 copying the DECL_VALUE_EXPR, and fixing up the type. */
751 static void
752 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
754 tree new_decl, size;
756 new_decl = lookup_decl (decl, ctx);
758 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
760 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
761 && DECL_HAS_VALUE_EXPR_P (decl))
763 tree ve = DECL_VALUE_EXPR (decl);
764 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
765 SET_DECL_VALUE_EXPR (new_decl, ve);
766 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
769 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
771 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
772 if (size == error_mark_node)
773 size = TYPE_SIZE (TREE_TYPE (new_decl));
774 DECL_SIZE (new_decl) = size;
776 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
777 if (size == error_mark_node)
778 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
779 DECL_SIZE_UNIT (new_decl) = size;
783 /* The callback for remap_decl. Search all containing contexts for a
784 mapping of the variable; this avoids having to duplicate the splay
785 tree ahead of time. We know a mapping doesn't already exist in the
786 given context. Create new mappings to implement default semantics. */
788 static tree
789 omp_copy_decl (tree var, copy_body_data *cb)
791 omp_context *ctx = (omp_context *) cb;
792 tree new_var;
794 if (TREE_CODE (var) == LABEL_DECL)
796 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
797 return var;
798 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
799 DECL_CONTEXT (new_var) = current_function_decl;
800 insert_decl_map (&ctx->cb, var, new_var);
801 return new_var;
804 while (!is_taskreg_ctx (ctx))
806 ctx = ctx->outer;
807 if (ctx == NULL)
808 return var;
809 new_var = maybe_lookup_decl (var, ctx);
810 if (new_var)
811 return new_var;
814 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
815 return var;
817 return error_mark_node;
820 /* Create a new context, with OUTER_CTX being the surrounding context. */
822 static omp_context *
823 new_omp_context (gimple *stmt, omp_context *outer_ctx)
825 omp_context *ctx = XCNEW (omp_context);
827 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
828 (splay_tree_value) ctx);
829 ctx->stmt = stmt;
831 if (outer_ctx)
833 ctx->outer = outer_ctx;
834 ctx->cb = outer_ctx->cb;
835 ctx->cb.block = NULL;
836 ctx->depth = outer_ctx->depth + 1;
838 else
840 ctx->cb.src_fn = current_function_decl;
841 ctx->cb.dst_fn = current_function_decl;
842 ctx->cb.src_node = cgraph_node::get (current_function_decl);
843 gcc_checking_assert (ctx->cb.src_node);
844 ctx->cb.dst_node = ctx->cb.src_node;
845 ctx->cb.src_cfun = cfun;
846 ctx->cb.copy_decl = omp_copy_decl;
847 ctx->cb.eh_lp_nr = 0;
848 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
849 ctx->depth = 1;
852 ctx->cb.decl_map = new hash_map<tree, tree>;
854 return ctx;
857 static gimple_seq maybe_catch_exception (gimple_seq);
859 /* Finalize task copyfn. */
861 static void
862 finalize_task_copyfn (gomp_task *task_stmt)
864 struct function *child_cfun;
865 tree child_fn;
866 gimple_seq seq = NULL, new_seq;
867 gbind *bind;
869 child_fn = gimple_omp_task_copy_fn (task_stmt);
870 if (child_fn == NULL_TREE)
871 return;
873 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
874 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
876 push_cfun (child_cfun);
877 bind = gimplify_body (child_fn, false);
878 gimple_seq_add_stmt (&seq, bind);
879 new_seq = maybe_catch_exception (seq);
880 if (new_seq != seq)
882 bind = gimple_build_bind (NULL, new_seq, NULL);
883 seq = NULL;
884 gimple_seq_add_stmt (&seq, bind);
886 gimple_set_body (child_fn, seq);
887 pop_cfun ();
889 /* Inform the callgraph about the new function. */
890 cgraph_node *node = cgraph_node::get_create (child_fn);
891 node->parallelized_function = 1;
892 cgraph_node::add_new_function (child_fn, false);
895 /* Destroy a omp_context data structures. Called through the splay tree
896 value delete callback. */
898 static void
899 delete_omp_context (splay_tree_value value)
901 omp_context *ctx = (omp_context *) value;
903 delete ctx->cb.decl_map;
905 if (ctx->field_map)
906 splay_tree_delete (ctx->field_map);
907 if (ctx->sfield_map)
908 splay_tree_delete (ctx->sfield_map);
910 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
911 it produces corrupt debug information. */
912 if (ctx->record_type)
914 tree t;
915 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
916 DECL_ABSTRACT_ORIGIN (t) = NULL;
918 if (ctx->srecord_type)
920 tree t;
921 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
922 DECL_ABSTRACT_ORIGIN (t) = NULL;
925 if (is_task_ctx (ctx))
926 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
928 XDELETE (ctx);
931 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
932 context. */
934 static void
935 fixup_child_record_type (omp_context *ctx)
937 tree f, type = ctx->record_type;
939 if (!ctx->receiver_decl)
940 return;
941 /* ??? It isn't sufficient to just call remap_type here, because
942 variably_modified_type_p doesn't work the way we expect for
943 record types. Testing each field for whether it needs remapping
944 and creating a new record by hand works, however. */
945 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
946 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
947 break;
948 if (f)
950 tree name, new_fields = NULL;
952 type = lang_hooks.types.make_type (RECORD_TYPE);
953 name = DECL_NAME (TYPE_NAME (ctx->record_type));
954 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
955 TYPE_DECL, name, type);
956 TYPE_NAME (type) = name;
958 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
960 tree new_f = copy_node (f);
961 DECL_CONTEXT (new_f) = type;
962 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
963 DECL_CHAIN (new_f) = new_fields;
964 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
965 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
966 &ctx->cb, NULL);
967 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
968 &ctx->cb, NULL);
969 new_fields = new_f;
971 /* Arrange to be able to look up the receiver field
972 given the sender field. */
973 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
974 (splay_tree_value) new_f);
976 TYPE_FIELDS (type) = nreverse (new_fields);
977 layout_type (type);
980 /* In a target region we never modify any of the pointers in *.omp_data_i,
981 so attempt to help the optimizers. */
982 if (is_gimple_omp_offloaded (ctx->stmt))
983 type = build_qualified_type (type, TYPE_QUAL_CONST);
985 TREE_TYPE (ctx->receiver_decl)
986 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
989 /* Instantiate decls as necessary in CTX to satisfy the data sharing
990 specified by CLAUSES. */
992 static void
993 scan_sharing_clauses (tree clauses, omp_context *ctx)
995 tree c, decl;
996 bool scan_array_reductions = false;
998 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1000 bool by_ref;
1002 switch (OMP_CLAUSE_CODE (c))
1004 case OMP_CLAUSE_PRIVATE:
1005 decl = OMP_CLAUSE_DECL (c);
1006 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1007 goto do_private;
1008 else if (!is_variable_sized (decl))
1009 install_var_local (decl, ctx);
1010 break;
1012 case OMP_CLAUSE_SHARED:
1013 decl = OMP_CLAUSE_DECL (c);
1014 /* Ignore shared directives in teams construct. */
1015 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1017 /* Global variables don't need to be copied,
1018 the receiver side will use them directly. */
1019 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1020 if (is_global_var (odecl))
1021 break;
1022 insert_decl_map (&ctx->cb, decl, odecl);
1023 break;
1025 gcc_assert (is_taskreg_ctx (ctx));
1026 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1027 || !is_variable_sized (decl));
1028 /* Global variables don't need to be copied,
1029 the receiver side will use them directly. */
1030 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1031 break;
1032 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1034 use_pointer_for_field (decl, ctx);
1035 break;
1037 by_ref = use_pointer_for_field (decl, NULL);
1038 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1039 || TREE_ADDRESSABLE (decl)
1040 || by_ref
1041 || omp_is_reference (decl))
1043 by_ref = use_pointer_for_field (decl, ctx);
1044 install_var_field (decl, by_ref, 3, ctx);
1045 install_var_local (decl, ctx);
1046 break;
1048 /* We don't need to copy const scalar vars back. */
1049 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1050 goto do_private;
1052 case OMP_CLAUSE_REDUCTION:
1053 decl = OMP_CLAUSE_DECL (c);
1054 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1055 && TREE_CODE (decl) == MEM_REF)
1057 tree t = TREE_OPERAND (decl, 0);
1058 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1059 t = TREE_OPERAND (t, 0);
1060 if (TREE_CODE (t) == INDIRECT_REF
1061 || TREE_CODE (t) == ADDR_EXPR)
1062 t = TREE_OPERAND (t, 0);
1063 install_var_local (t, ctx);
1064 if (is_taskreg_ctx (ctx)
1065 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1066 && !is_variable_sized (t))
1068 by_ref = use_pointer_for_field (t, ctx);
1069 install_var_field (t, by_ref, 3, ctx);
1071 break;
1073 goto do_private;
1075 case OMP_CLAUSE_LASTPRIVATE:
1076 /* Let the corresponding firstprivate clause create
1077 the variable. */
1078 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1079 break;
1080 /* FALLTHRU */
1082 case OMP_CLAUSE_FIRSTPRIVATE:
1083 case OMP_CLAUSE_LINEAR:
1084 decl = OMP_CLAUSE_DECL (c);
1085 do_private:
1086 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1087 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1088 && is_gimple_omp_offloaded (ctx->stmt))
1090 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1091 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1092 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1093 install_var_field (decl, true, 3, ctx);
1094 else
1095 install_var_field (decl, false, 3, ctx);
1097 if (is_variable_sized (decl))
1099 if (is_task_ctx (ctx))
1100 install_var_field (decl, false, 1, ctx);
1101 break;
1103 else if (is_taskreg_ctx (ctx))
1105 bool global
1106 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1107 by_ref = use_pointer_for_field (decl, NULL);
1109 if (is_task_ctx (ctx)
1110 && (global || by_ref || omp_is_reference (decl)))
1112 install_var_field (decl, false, 1, ctx);
1113 if (!global)
1114 install_var_field (decl, by_ref, 2, ctx);
1116 else if (!global)
1117 install_var_field (decl, by_ref, 3, ctx);
1119 install_var_local (decl, ctx);
1120 break;
1122 case OMP_CLAUSE_USE_DEVICE_PTR:
1123 decl = OMP_CLAUSE_DECL (c);
1124 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1125 install_var_field (decl, true, 3, ctx);
1126 else
1127 install_var_field (decl, false, 3, ctx);
1128 if (DECL_SIZE (decl)
1129 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1131 tree decl2 = DECL_VALUE_EXPR (decl);
1132 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1133 decl2 = TREE_OPERAND (decl2, 0);
1134 gcc_assert (DECL_P (decl2));
1135 install_var_local (decl2, ctx);
1137 install_var_local (decl, ctx);
1138 break;
1140 case OMP_CLAUSE_IS_DEVICE_PTR:
1141 decl = OMP_CLAUSE_DECL (c);
1142 goto do_private;
1144 case OMP_CLAUSE__LOOPTEMP_:
1145 gcc_assert (is_taskreg_ctx (ctx));
1146 decl = OMP_CLAUSE_DECL (c);
1147 install_var_field (decl, false, 3, ctx);
1148 install_var_local (decl, ctx);
1149 break;
1151 case OMP_CLAUSE_COPYPRIVATE:
1152 case OMP_CLAUSE_COPYIN:
1153 decl = OMP_CLAUSE_DECL (c);
1154 by_ref = use_pointer_for_field (decl, NULL);
1155 install_var_field (decl, by_ref, 3, ctx);
1156 break;
1158 case OMP_CLAUSE_FINAL:
1159 case OMP_CLAUSE_IF:
1160 case OMP_CLAUSE_NUM_THREADS:
1161 case OMP_CLAUSE_NUM_TEAMS:
1162 case OMP_CLAUSE_THREAD_LIMIT:
1163 case OMP_CLAUSE_DEVICE:
1164 case OMP_CLAUSE_SCHEDULE:
1165 case OMP_CLAUSE_DIST_SCHEDULE:
1166 case OMP_CLAUSE_DEPEND:
1167 case OMP_CLAUSE_PRIORITY:
1168 case OMP_CLAUSE_GRAINSIZE:
1169 case OMP_CLAUSE_NUM_TASKS:
1170 case OMP_CLAUSE_NUM_GANGS:
1171 case OMP_CLAUSE_NUM_WORKERS:
1172 case OMP_CLAUSE_VECTOR_LENGTH:
1173 if (ctx->outer)
1174 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1175 break;
1177 case OMP_CLAUSE_TO:
1178 case OMP_CLAUSE_FROM:
1179 case OMP_CLAUSE_MAP:
1180 if (ctx->outer)
1181 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1182 decl = OMP_CLAUSE_DECL (c);
1183 /* Global variables with "omp declare target" attribute
1184 don't need to be copied, the receiver side will use them
1185 directly. However, global variables with "omp declare target link"
1186 attribute need to be copied. Or when ALWAYS modifier is used. */
1187 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1188 && DECL_P (decl)
1189 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1190 && (OMP_CLAUSE_MAP_KIND (c)
1191 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1192 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1193 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1194 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1195 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1196 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1197 && varpool_node::get_create (decl)->offloadable
1198 && !lookup_attribute ("omp declare target link",
1199 DECL_ATTRIBUTES (decl)))
1200 break;
1201 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1202 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1204 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1205 not offloaded; there is nothing to map for those. */
1206 if (!is_gimple_omp_offloaded (ctx->stmt)
1207 && !POINTER_TYPE_P (TREE_TYPE (decl))
1208 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1209 break;
1211 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1212 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1213 || (OMP_CLAUSE_MAP_KIND (c)
1214 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1216 if (TREE_CODE (decl) == COMPONENT_REF
1217 || (TREE_CODE (decl) == INDIRECT_REF
1218 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1219 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1220 == REFERENCE_TYPE)))
1221 break;
1222 if (DECL_SIZE (decl)
1223 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1225 tree decl2 = DECL_VALUE_EXPR (decl);
1226 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1227 decl2 = TREE_OPERAND (decl2, 0);
1228 gcc_assert (DECL_P (decl2));
1229 install_var_local (decl2, ctx);
1231 install_var_local (decl, ctx);
1232 break;
1234 if (DECL_P (decl))
1236 if (DECL_SIZE (decl)
1237 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1239 tree decl2 = DECL_VALUE_EXPR (decl);
1240 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1241 decl2 = TREE_OPERAND (decl2, 0);
1242 gcc_assert (DECL_P (decl2));
1243 install_var_field (decl2, true, 3, ctx);
1244 install_var_local (decl2, ctx);
1245 install_var_local (decl, ctx);
1247 else
1249 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1250 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1251 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1252 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1253 install_var_field (decl, true, 7, ctx);
1254 else
1255 install_var_field (decl, true, 3, ctx);
1256 if (is_gimple_omp_offloaded (ctx->stmt)
1257 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1258 install_var_local (decl, ctx);
1261 else
1263 tree base = get_base_address (decl);
1264 tree nc = OMP_CLAUSE_CHAIN (c);
1265 if (DECL_P (base)
1266 && nc != NULL_TREE
1267 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1268 && OMP_CLAUSE_DECL (nc) == base
1269 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1270 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1272 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1273 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1275 else
1277 if (ctx->outer)
1279 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1280 decl = OMP_CLAUSE_DECL (c);
1282 gcc_assert (!splay_tree_lookup (ctx->field_map,
1283 (splay_tree_key) decl));
1284 tree field
1285 = build_decl (OMP_CLAUSE_LOCATION (c),
1286 FIELD_DECL, NULL_TREE, ptr_type_node);
1287 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1288 insert_field_into_struct (ctx->record_type, field);
1289 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1290 (splay_tree_value) field);
1293 break;
1295 case OMP_CLAUSE__GRIDDIM_:
1296 if (ctx->outer)
1298 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1299 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1301 break;
1303 case OMP_CLAUSE_NOWAIT:
1304 case OMP_CLAUSE_ORDERED:
1305 case OMP_CLAUSE_COLLAPSE:
1306 case OMP_CLAUSE_UNTIED:
1307 case OMP_CLAUSE_MERGEABLE:
1308 case OMP_CLAUSE_PROC_BIND:
1309 case OMP_CLAUSE_SAFELEN:
1310 case OMP_CLAUSE_SIMDLEN:
1311 case OMP_CLAUSE_THREADS:
1312 case OMP_CLAUSE_SIMD:
1313 case OMP_CLAUSE_NOGROUP:
1314 case OMP_CLAUSE_DEFAULTMAP:
1315 case OMP_CLAUSE_ASYNC:
1316 case OMP_CLAUSE_WAIT:
1317 case OMP_CLAUSE_GANG:
1318 case OMP_CLAUSE_WORKER:
1319 case OMP_CLAUSE_VECTOR:
1320 case OMP_CLAUSE_INDEPENDENT:
1321 case OMP_CLAUSE_AUTO:
1322 case OMP_CLAUSE_SEQ:
1323 case OMP_CLAUSE_TILE:
1324 case OMP_CLAUSE__SIMT_:
1325 case OMP_CLAUSE_DEFAULT:
1326 case OMP_CLAUSE_IF_PRESENT:
1327 case OMP_CLAUSE_FINALIZE:
1328 break;
1330 case OMP_CLAUSE_ALIGNED:
1331 decl = OMP_CLAUSE_DECL (c);
1332 if (is_global_var (decl)
1333 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1334 install_var_local (decl, ctx);
1335 break;
1337 case OMP_CLAUSE__CACHE_:
1338 default:
1339 gcc_unreachable ();
1343 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1345 switch (OMP_CLAUSE_CODE (c))
1347 case OMP_CLAUSE_LASTPRIVATE:
1348 /* Let the corresponding firstprivate clause create
1349 the variable. */
1350 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1351 scan_array_reductions = true;
1352 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1353 break;
1354 /* FALLTHRU */
1356 case OMP_CLAUSE_FIRSTPRIVATE:
1357 case OMP_CLAUSE_PRIVATE:
1358 case OMP_CLAUSE_LINEAR:
1359 case OMP_CLAUSE_IS_DEVICE_PTR:
1360 decl = OMP_CLAUSE_DECL (c);
1361 if (is_variable_sized (decl))
1363 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1364 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1365 && is_gimple_omp_offloaded (ctx->stmt))
1367 tree decl2 = DECL_VALUE_EXPR (decl);
1368 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1369 decl2 = TREE_OPERAND (decl2, 0);
1370 gcc_assert (DECL_P (decl2));
1371 install_var_local (decl2, ctx);
1372 fixup_remapped_decl (decl2, ctx, false);
1374 install_var_local (decl, ctx);
1376 fixup_remapped_decl (decl, ctx,
1377 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1378 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1379 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1380 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1381 scan_array_reductions = true;
1382 break;
1384 case OMP_CLAUSE_REDUCTION:
1385 decl = OMP_CLAUSE_DECL (c);
1386 if (TREE_CODE (decl) != MEM_REF)
1388 if (is_variable_sized (decl))
1389 install_var_local (decl, ctx);
1390 fixup_remapped_decl (decl, ctx, false);
1392 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1393 scan_array_reductions = true;
1394 break;
1396 case OMP_CLAUSE_SHARED:
1397 /* Ignore shared directives in teams construct. */
1398 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1399 break;
1400 decl = OMP_CLAUSE_DECL (c);
1401 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1402 break;
1403 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1405 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1406 ctx->outer)))
1407 break;
1408 bool by_ref = use_pointer_for_field (decl, ctx);
1409 install_var_field (decl, by_ref, 11, ctx);
1410 break;
1412 fixup_remapped_decl (decl, ctx, false);
1413 break;
1415 case OMP_CLAUSE_MAP:
1416 if (!is_gimple_omp_offloaded (ctx->stmt))
1417 break;
1418 decl = OMP_CLAUSE_DECL (c);
1419 if (DECL_P (decl)
1420 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1421 && (OMP_CLAUSE_MAP_KIND (c)
1422 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1423 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1424 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1425 && varpool_node::get_create (decl)->offloadable)
1426 break;
1427 if (DECL_P (decl))
1429 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1430 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1431 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1432 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1434 tree new_decl = lookup_decl (decl, ctx);
1435 TREE_TYPE (new_decl)
1436 = remap_type (TREE_TYPE (decl), &ctx->cb);
1438 else if (DECL_SIZE (decl)
1439 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1441 tree decl2 = DECL_VALUE_EXPR (decl);
1442 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1443 decl2 = TREE_OPERAND (decl2, 0);
1444 gcc_assert (DECL_P (decl2));
1445 fixup_remapped_decl (decl2, ctx, false);
1446 fixup_remapped_decl (decl, ctx, true);
1448 else
1449 fixup_remapped_decl (decl, ctx, false);
1451 break;
1453 case OMP_CLAUSE_COPYPRIVATE:
1454 case OMP_CLAUSE_COPYIN:
1455 case OMP_CLAUSE_DEFAULT:
1456 case OMP_CLAUSE_IF:
1457 case OMP_CLAUSE_NUM_THREADS:
1458 case OMP_CLAUSE_NUM_TEAMS:
1459 case OMP_CLAUSE_THREAD_LIMIT:
1460 case OMP_CLAUSE_DEVICE:
1461 case OMP_CLAUSE_SCHEDULE:
1462 case OMP_CLAUSE_DIST_SCHEDULE:
1463 case OMP_CLAUSE_NOWAIT:
1464 case OMP_CLAUSE_ORDERED:
1465 case OMP_CLAUSE_COLLAPSE:
1466 case OMP_CLAUSE_UNTIED:
1467 case OMP_CLAUSE_FINAL:
1468 case OMP_CLAUSE_MERGEABLE:
1469 case OMP_CLAUSE_PROC_BIND:
1470 case OMP_CLAUSE_SAFELEN:
1471 case OMP_CLAUSE_SIMDLEN:
1472 case OMP_CLAUSE_ALIGNED:
1473 case OMP_CLAUSE_DEPEND:
1474 case OMP_CLAUSE__LOOPTEMP_:
1475 case OMP_CLAUSE_TO:
1476 case OMP_CLAUSE_FROM:
1477 case OMP_CLAUSE_PRIORITY:
1478 case OMP_CLAUSE_GRAINSIZE:
1479 case OMP_CLAUSE_NUM_TASKS:
1480 case OMP_CLAUSE_THREADS:
1481 case OMP_CLAUSE_SIMD:
1482 case OMP_CLAUSE_NOGROUP:
1483 case OMP_CLAUSE_DEFAULTMAP:
1484 case OMP_CLAUSE_USE_DEVICE_PTR:
1485 case OMP_CLAUSE_ASYNC:
1486 case OMP_CLAUSE_WAIT:
1487 case OMP_CLAUSE_NUM_GANGS:
1488 case OMP_CLAUSE_NUM_WORKERS:
1489 case OMP_CLAUSE_VECTOR_LENGTH:
1490 case OMP_CLAUSE_GANG:
1491 case OMP_CLAUSE_WORKER:
1492 case OMP_CLAUSE_VECTOR:
1493 case OMP_CLAUSE_INDEPENDENT:
1494 case OMP_CLAUSE_AUTO:
1495 case OMP_CLAUSE_SEQ:
1496 case OMP_CLAUSE_TILE:
1497 case OMP_CLAUSE__GRIDDIM_:
1498 case OMP_CLAUSE__SIMT_:
1499 case OMP_CLAUSE_IF_PRESENT:
1500 case OMP_CLAUSE_FINALIZE:
1501 break;
1503 case OMP_CLAUSE__CACHE_:
1504 default:
1505 gcc_unreachable ();
1509 gcc_checking_assert (!scan_array_reductions
1510 || !is_gimple_omp_oacc (ctx->stmt));
1511 if (scan_array_reductions)
1513 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1514 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1515 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1517 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1518 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1520 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1521 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1522 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1523 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1524 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1525 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1529 /* Create a new name for omp child function. Returns an identifier. */
1531 static tree
1532 create_omp_child_function_name (bool task_copy)
1534 return clone_function_name (current_function_decl,
1535 task_copy ? "_omp_cpyfn" : "_omp_fn");
1538 /* Return true if CTX may belong to offloaded code: either if current function
1539 is offloaded, or any enclosing context corresponds to a target region. */
1541 static bool
1542 omp_maybe_offloaded_ctx (omp_context *ctx)
1544 if (cgraph_node::get (current_function_decl)->offloadable)
1545 return true;
1546 for (; ctx; ctx = ctx->outer)
1547 if (is_gimple_omp_offloaded (ctx->stmt))
1548 return true;
1549 return false;
1552 /* Build a decl for the omp child function. It'll not contain a body
1553 yet, just the bare decl. */
1555 static void
1556 create_omp_child_function (omp_context *ctx, bool task_copy)
1558 tree decl, type, name, t;
1560 name = create_omp_child_function_name (task_copy);
1561 if (task_copy)
1562 type = build_function_type_list (void_type_node, ptr_type_node,
1563 ptr_type_node, NULL_TREE);
1564 else
1565 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1567 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1569 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1570 || !task_copy);
1571 if (!task_copy)
1572 ctx->cb.dst_fn = decl;
1573 else
1574 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1576 TREE_STATIC (decl) = 1;
1577 TREE_USED (decl) = 1;
1578 DECL_ARTIFICIAL (decl) = 1;
1579 DECL_IGNORED_P (decl) = 0;
1580 TREE_PUBLIC (decl) = 0;
1581 DECL_UNINLINABLE (decl) = 1;
1582 DECL_EXTERNAL (decl) = 0;
1583 DECL_CONTEXT (decl) = NULL_TREE;
1584 DECL_INITIAL (decl) = make_node (BLOCK);
1585 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1586 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1587 /* Remove omp declare simd attribute from the new attributes. */
1588 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1590 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1591 a = a2;
1592 a = TREE_CHAIN (a);
1593 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1594 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1595 *p = TREE_CHAIN (*p);
1596 else
1598 tree chain = TREE_CHAIN (*p);
1599 *p = copy_node (*p);
1600 p = &TREE_CHAIN (*p);
1601 *p = chain;
1604 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1605 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1606 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1607 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1608 DECL_FUNCTION_VERSIONED (decl)
1609 = DECL_FUNCTION_VERSIONED (current_function_decl);
1611 if (omp_maybe_offloaded_ctx (ctx))
1613 cgraph_node::get_create (decl)->offloadable = 1;
1614 if (ENABLE_OFFLOADING)
1615 g->have_offload = true;
1618 if (cgraph_node::get_create (decl)->offloadable
1619 && !lookup_attribute ("omp declare target",
1620 DECL_ATTRIBUTES (current_function_decl)))
1622 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1623 ? "omp target entrypoint"
1624 : "omp declare target");
1625 DECL_ATTRIBUTES (decl)
1626 = tree_cons (get_identifier (target_attr),
1627 NULL_TREE, DECL_ATTRIBUTES (decl));
1630 t = build_decl (DECL_SOURCE_LOCATION (decl),
1631 RESULT_DECL, NULL_TREE, void_type_node);
1632 DECL_ARTIFICIAL (t) = 1;
1633 DECL_IGNORED_P (t) = 1;
1634 DECL_CONTEXT (t) = decl;
1635 DECL_RESULT (decl) = t;
1637 tree data_name = get_identifier (".omp_data_i");
1638 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1639 ptr_type_node);
1640 DECL_ARTIFICIAL (t) = 1;
1641 DECL_NAMELESS (t) = 1;
1642 DECL_ARG_TYPE (t) = ptr_type_node;
1643 DECL_CONTEXT (t) = current_function_decl;
1644 TREE_USED (t) = 1;
1645 TREE_READONLY (t) = 1;
1646 DECL_ARGUMENTS (decl) = t;
1647 if (!task_copy)
1648 ctx->receiver_decl = t;
1649 else
1651 t = build_decl (DECL_SOURCE_LOCATION (decl),
1652 PARM_DECL, get_identifier (".omp_data_o"),
1653 ptr_type_node);
1654 DECL_ARTIFICIAL (t) = 1;
1655 DECL_NAMELESS (t) = 1;
1656 DECL_ARG_TYPE (t) = ptr_type_node;
1657 DECL_CONTEXT (t) = current_function_decl;
1658 TREE_USED (t) = 1;
1659 TREE_ADDRESSABLE (t) = 1;
1660 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1661 DECL_ARGUMENTS (decl) = t;
1664 /* Allocate memory for the function structure. The call to
1665 allocate_struct_function clobbers CFUN, so we need to restore
1666 it afterward. */
1667 push_struct_function (decl);
1668 cfun->function_end_locus = gimple_location (ctx->stmt);
1669 init_tree_ssa (cfun);
1670 pop_cfun ();
1673 /* Callback for walk_gimple_seq. Check if combined parallel
1674 contains gimple_omp_for_combined_into_p OMP_FOR. */
1676 tree
1677 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1678 bool *handled_ops_p,
1679 struct walk_stmt_info *wi)
1681 gimple *stmt = gsi_stmt (*gsi_p);
1683 *handled_ops_p = true;
1684 switch (gimple_code (stmt))
1686 WALK_SUBSTMTS;
1688 case GIMPLE_OMP_FOR:
1689 if (gimple_omp_for_combined_into_p (stmt)
1690 && gimple_omp_for_kind (stmt)
1691 == *(const enum gf_mask *) (wi->info))
1693 wi->info = stmt;
1694 return integer_zero_node;
1696 break;
1697 default:
1698 break;
1700 return NULL;
1703 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1705 static void
1706 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1707 omp_context *outer_ctx)
1709 struct walk_stmt_info wi;
1711 memset (&wi, 0, sizeof (wi));
1712 wi.val_only = true;
1713 wi.info = (void *) &msk;
1714 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1715 if (wi.info != (void *) &msk)
1717 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1718 struct omp_for_data fd;
1719 omp_extract_for_data (for_stmt, &fd, NULL);
1720 /* We need two temporaries with fd.loop.v type (istart/iend)
1721 and then (fd.collapse - 1) temporaries with the same
1722 type for count2 ... countN-1 vars if not constant. */
1723 size_t count = 2, i;
1724 tree type = fd.iter_type;
1725 if (fd.collapse > 1
1726 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1728 count += fd.collapse - 1;
1729 /* If there are lastprivate clauses on the inner
1730 GIMPLE_OMP_FOR, add one more temporaries for the total number
1731 of iterations (product of count1 ... countN-1). */
1732 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1733 OMP_CLAUSE_LASTPRIVATE))
1734 count++;
1735 else if (msk == GF_OMP_FOR_KIND_FOR
1736 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1737 OMP_CLAUSE_LASTPRIVATE))
1738 count++;
1740 for (i = 0; i < count; i++)
1742 tree temp = create_tmp_var (type);
1743 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1744 insert_decl_map (&outer_ctx->cb, temp, temp);
1745 OMP_CLAUSE_DECL (c) = temp;
1746 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1747 gimple_omp_taskreg_set_clauses (stmt, c);
1752 /* Scan an OpenMP parallel directive. */
1754 static void
1755 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1757 omp_context *ctx;
1758 tree name;
1759 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1761 /* Ignore parallel directives with empty bodies, unless there
1762 are copyin clauses. */
1763 if (optimize > 0
1764 && empty_body_p (gimple_omp_body (stmt))
1765 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1766 OMP_CLAUSE_COPYIN) == NULL)
1768 gsi_replace (gsi, gimple_build_nop (), false);
1769 return;
1772 if (gimple_omp_parallel_combined_p (stmt))
1773 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1775 ctx = new_omp_context (stmt, outer_ctx);
1776 taskreg_contexts.safe_push (ctx);
1777 if (taskreg_nesting_level > 1)
1778 ctx->is_nested = true;
1779 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1780 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1781 name = create_tmp_var_name (".omp_data_s");
1782 name = build_decl (gimple_location (stmt),
1783 TYPE_DECL, name, ctx->record_type);
1784 DECL_ARTIFICIAL (name) = 1;
1785 DECL_NAMELESS (name) = 1;
1786 TYPE_NAME (ctx->record_type) = name;
1787 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1788 if (!gimple_omp_parallel_grid_phony (stmt))
1790 create_omp_child_function (ctx, false);
1791 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1794 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1795 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1797 if (TYPE_FIELDS (ctx->record_type) == NULL)
1798 ctx->record_type = ctx->receiver_decl = NULL;
1801 /* Scan an OpenMP task directive. */
1803 static void
1804 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1806 omp_context *ctx;
1807 tree name, t;
1808 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1810 /* Ignore task directives with empty bodies, unless they have depend
1811 clause. */
1812 if (optimize > 0
1813 && empty_body_p (gimple_omp_body (stmt))
1814 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1816 gsi_replace (gsi, gimple_build_nop (), false);
1817 return;
1820 if (gimple_omp_task_taskloop_p (stmt))
1821 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1823 ctx = new_omp_context (stmt, outer_ctx);
1824 taskreg_contexts.safe_push (ctx);
1825 if (taskreg_nesting_level > 1)
1826 ctx->is_nested = true;
1827 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1828 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1829 name = create_tmp_var_name (".omp_data_s");
1830 name = build_decl (gimple_location (stmt),
1831 TYPE_DECL, name, ctx->record_type);
1832 DECL_ARTIFICIAL (name) = 1;
1833 DECL_NAMELESS (name) = 1;
1834 TYPE_NAME (ctx->record_type) = name;
1835 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1836 create_omp_child_function (ctx, false);
1837 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1839 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1841 if (ctx->srecord_type)
1843 name = create_tmp_var_name (".omp_data_a");
1844 name = build_decl (gimple_location (stmt),
1845 TYPE_DECL, name, ctx->srecord_type);
1846 DECL_ARTIFICIAL (name) = 1;
1847 DECL_NAMELESS (name) = 1;
1848 TYPE_NAME (ctx->srecord_type) = name;
1849 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1850 create_omp_child_function (ctx, true);
1853 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1855 if (TYPE_FIELDS (ctx->record_type) == NULL)
1857 ctx->record_type = ctx->receiver_decl = NULL;
1858 t = build_int_cst (long_integer_type_node, 0);
1859 gimple_omp_task_set_arg_size (stmt, t);
1860 t = build_int_cst (long_integer_type_node, 1);
1861 gimple_omp_task_set_arg_align (stmt, t);
1865 /* Helper function for finish_taskreg_scan, called through walk_tree.
1866 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1867 tree, replace it in the expression. */
1869 static tree
1870 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1872 if (VAR_P (*tp))
1874 omp_context *ctx = (omp_context *) data;
1875 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1876 if (t != *tp)
1878 if (DECL_HAS_VALUE_EXPR_P (t))
1879 t = unshare_expr (DECL_VALUE_EXPR (t));
1880 *tp = t;
1882 *walk_subtrees = 0;
1884 else if (IS_TYPE_OR_DECL_P (*tp))
1885 *walk_subtrees = 0;
1886 return NULL_TREE;
1889 /* If any decls have been made addressable during scan_omp,
1890 adjust their fields if needed, and layout record types
1891 of parallel/task constructs. */
1893 static void
1894 finish_taskreg_scan (omp_context *ctx)
1896 if (ctx->record_type == NULL_TREE)
1897 return;
1899 /* If any task_shared_vars were needed, verify all
1900 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1901 statements if use_pointer_for_field hasn't changed
1902 because of that. If it did, update field types now. */
1903 if (task_shared_vars)
1905 tree c;
1907 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1908 c; c = OMP_CLAUSE_CHAIN (c))
1909 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1910 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1912 tree decl = OMP_CLAUSE_DECL (c);
1914 /* Global variables don't need to be copied,
1915 the receiver side will use them directly. */
1916 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1917 continue;
1918 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1919 || !use_pointer_for_field (decl, ctx))
1920 continue;
1921 tree field = lookup_field (decl, ctx);
1922 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1923 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1924 continue;
1925 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1926 TREE_THIS_VOLATILE (field) = 0;
1927 DECL_USER_ALIGN (field) = 0;
1928 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1929 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1930 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1931 if (ctx->srecord_type)
1933 tree sfield = lookup_sfield (decl, ctx);
1934 TREE_TYPE (sfield) = TREE_TYPE (field);
1935 TREE_THIS_VOLATILE (sfield) = 0;
1936 DECL_USER_ALIGN (sfield) = 0;
1937 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1938 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1939 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1944 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1946 layout_type (ctx->record_type);
1947 fixup_child_record_type (ctx);
1949 else
1951 location_t loc = gimple_location (ctx->stmt);
1952 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1953 /* Move VLA fields to the end. */
1954 p = &TYPE_FIELDS (ctx->record_type);
1955 while (*p)
1956 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1957 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1959 *q = *p;
1960 *p = TREE_CHAIN (*p);
1961 TREE_CHAIN (*q) = NULL_TREE;
1962 q = &TREE_CHAIN (*q);
1964 else
1965 p = &DECL_CHAIN (*p);
1966 *p = vla_fields;
1967 if (gimple_omp_task_taskloop_p (ctx->stmt))
1969 /* Move fields corresponding to first and second _looptemp_
1970 clause first. There are filled by GOMP_taskloop
1971 and thus need to be in specific positions. */
1972 tree c1 = gimple_omp_task_clauses (ctx->stmt);
1973 c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
1974 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
1975 OMP_CLAUSE__LOOPTEMP_);
1976 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
1977 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
1978 p = &TYPE_FIELDS (ctx->record_type);
1979 while (*p)
1980 if (*p == f1 || *p == f2)
1981 *p = DECL_CHAIN (*p);
1982 else
1983 p = &DECL_CHAIN (*p);
1984 DECL_CHAIN (f1) = f2;
1985 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
1986 TYPE_FIELDS (ctx->record_type) = f1;
1987 if (ctx->srecord_type)
1989 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
1990 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
1991 p = &TYPE_FIELDS (ctx->srecord_type);
1992 while (*p)
1993 if (*p == f1 || *p == f2)
1994 *p = DECL_CHAIN (*p);
1995 else
1996 p = &DECL_CHAIN (*p);
1997 DECL_CHAIN (f1) = f2;
1998 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
1999 TYPE_FIELDS (ctx->srecord_type) = f1;
2002 layout_type (ctx->record_type);
2003 fixup_child_record_type (ctx);
2004 if (ctx->srecord_type)
2005 layout_type (ctx->srecord_type);
2006 tree t = fold_convert_loc (loc, long_integer_type_node,
2007 TYPE_SIZE_UNIT (ctx->record_type));
2008 if (TREE_CODE (t) != INTEGER_CST)
2010 t = unshare_expr (t);
2011 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2013 gimple_omp_task_set_arg_size (ctx->stmt, t);
2014 t = build_int_cst (long_integer_type_node,
2015 TYPE_ALIGN_UNIT (ctx->record_type));
2016 gimple_omp_task_set_arg_align (ctx->stmt, t);
2020 /* Find the enclosing offload context. */
2022 static omp_context *
2023 enclosing_target_ctx (omp_context *ctx)
2025 for (; ctx; ctx = ctx->outer)
2026 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2027 break;
2029 return ctx;
2032 /* Return true if ctx is part of an oacc kernels region. */
2034 static bool
2035 ctx_in_oacc_kernels_region (omp_context *ctx)
2037 for (;ctx != NULL; ctx = ctx->outer)
2039 gimple *stmt = ctx->stmt;
2040 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2041 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2042 return true;
2045 return false;
2048 /* Check the parallelism clauses inside a kernels regions.
2049 Until kernels handling moves to use the same loop indirection
2050 scheme as parallel, we need to do this checking early. */
2052 static unsigned
2053 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2055 bool checking = true;
2056 unsigned outer_mask = 0;
2057 unsigned this_mask = 0;
2058 bool has_seq = false, has_auto = false;
2060 if (ctx->outer)
2061 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2062 if (!stmt)
2064 checking = false;
2065 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2066 return outer_mask;
2067 stmt = as_a <gomp_for *> (ctx->stmt);
2070 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2072 switch (OMP_CLAUSE_CODE (c))
2074 case OMP_CLAUSE_GANG:
2075 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2076 break;
2077 case OMP_CLAUSE_WORKER:
2078 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2079 break;
2080 case OMP_CLAUSE_VECTOR:
2081 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2082 break;
2083 case OMP_CLAUSE_SEQ:
2084 has_seq = true;
2085 break;
2086 case OMP_CLAUSE_AUTO:
2087 has_auto = true;
2088 break;
2089 default:
2090 break;
2094 if (checking)
2096 if (has_seq && (this_mask || has_auto))
2097 error_at (gimple_location (stmt), "%<seq%> overrides other"
2098 " OpenACC loop specifiers");
2099 else if (has_auto && this_mask)
2100 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2101 " OpenACC loop specifiers");
2103 if (this_mask & outer_mask)
2104 error_at (gimple_location (stmt), "inner loop uses same"
2105 " OpenACC parallelism as containing loop");
2108 return outer_mask | this_mask;
2111 /* Scan a GIMPLE_OMP_FOR. */
2113 static omp_context *
2114 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2116 omp_context *ctx;
2117 size_t i;
2118 tree clauses = gimple_omp_for_clauses (stmt);
2120 ctx = new_omp_context (stmt, outer_ctx);
2122 if (is_gimple_omp_oacc (stmt))
2124 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2126 if (!tgt || is_oacc_parallel (tgt))
2127 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2129 char const *check = NULL;
2131 switch (OMP_CLAUSE_CODE (c))
2133 case OMP_CLAUSE_GANG:
2134 check = "gang";
2135 break;
2137 case OMP_CLAUSE_WORKER:
2138 check = "worker";
2139 break;
2141 case OMP_CLAUSE_VECTOR:
2142 check = "vector";
2143 break;
2145 default:
2146 break;
2149 if (check && OMP_CLAUSE_OPERAND (c, 0))
2150 error_at (gimple_location (stmt),
2151 "argument not permitted on %qs clause in"
2152 " OpenACC %<parallel%>", check);
2155 if (tgt && is_oacc_kernels (tgt))
2157 /* Strip out reductions, as they are not handled yet. */
2158 tree *prev_ptr = &clauses;
2160 while (tree probe = *prev_ptr)
2162 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2164 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2165 *prev_ptr = *next_ptr;
2166 else
2167 prev_ptr = next_ptr;
2170 gimple_omp_for_set_clauses (stmt, clauses);
2171 check_oacc_kernel_gwv (stmt, ctx);
2175 scan_sharing_clauses (clauses, ctx);
2177 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2178 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2180 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2181 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2182 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2183 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2185 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2186 return ctx;
2189 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2191 static void
2192 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2193 omp_context *outer_ctx)
2195 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2196 gsi_replace (gsi, bind, false);
2197 gimple_seq seq = NULL;
2198 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2199 tree cond = create_tmp_var_raw (integer_type_node);
2200 DECL_CONTEXT (cond) = current_function_decl;
2201 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2202 gimple_bind_set_vars (bind, cond);
2203 gimple_call_set_lhs (g, cond);
2204 gimple_seq_add_stmt (&seq, g);
2205 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2206 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2207 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2208 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2209 gimple_seq_add_stmt (&seq, g);
2210 g = gimple_build_label (lab1);
2211 gimple_seq_add_stmt (&seq, g);
2212 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2213 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2214 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2215 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2216 gimple_omp_for_set_clauses (new_stmt, clause);
2217 gimple_seq_add_stmt (&seq, new_stmt);
2218 g = gimple_build_goto (lab3);
2219 gimple_seq_add_stmt (&seq, g);
2220 g = gimple_build_label (lab2);
2221 gimple_seq_add_stmt (&seq, g);
2222 gimple_seq_add_stmt (&seq, stmt);
2223 g = gimple_build_label (lab3);
2224 gimple_seq_add_stmt (&seq, g);
2225 gimple_bind_set_body (bind, seq);
2226 update_stmt (bind);
2227 scan_omp_for (new_stmt, outer_ctx);
2228 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2231 /* Scan an OpenMP sections directive. */
2233 static void
2234 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2236 omp_context *ctx;
2238 ctx = new_omp_context (stmt, outer_ctx);
2239 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2240 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2243 /* Scan an OpenMP single directive. */
2245 static void
2246 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2248 omp_context *ctx;
2249 tree name;
2251 ctx = new_omp_context (stmt, outer_ctx);
2252 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2253 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2254 name = create_tmp_var_name (".omp_copy_s");
2255 name = build_decl (gimple_location (stmt),
2256 TYPE_DECL, name, ctx->record_type);
2257 TYPE_NAME (ctx->record_type) = name;
2259 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2260 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2262 if (TYPE_FIELDS (ctx->record_type) == NULL)
2263 ctx->record_type = NULL;
2264 else
2265 layout_type (ctx->record_type);
2268 /* Scan a GIMPLE_OMP_TARGET. */
2270 static void
2271 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2273 omp_context *ctx;
2274 tree name;
2275 bool offloaded = is_gimple_omp_offloaded (stmt);
2276 tree clauses = gimple_omp_target_clauses (stmt);
2278 ctx = new_omp_context (stmt, outer_ctx);
2279 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2280 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2281 name = create_tmp_var_name (".omp_data_t");
2282 name = build_decl (gimple_location (stmt),
2283 TYPE_DECL, name, ctx->record_type);
2284 DECL_ARTIFICIAL (name) = 1;
2285 DECL_NAMELESS (name) = 1;
2286 TYPE_NAME (ctx->record_type) = name;
2287 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2289 if (offloaded)
2291 create_omp_child_function (ctx, false);
2292 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2295 scan_sharing_clauses (clauses, ctx);
2296 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2298 if (TYPE_FIELDS (ctx->record_type) == NULL)
2299 ctx->record_type = ctx->receiver_decl = NULL;
2300 else
2302 TYPE_FIELDS (ctx->record_type)
2303 = nreverse (TYPE_FIELDS (ctx->record_type));
2304 if (flag_checking)
2306 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2307 for (tree field = TYPE_FIELDS (ctx->record_type);
2308 field;
2309 field = DECL_CHAIN (field))
2310 gcc_assert (DECL_ALIGN (field) == align);
2312 layout_type (ctx->record_type);
2313 if (offloaded)
2314 fixup_child_record_type (ctx);
2318 /* Scan an OpenMP teams directive. */
2320 static void
2321 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2323 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2324 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2325 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2328 /* Check nesting restrictions. */
2329 static bool
2330 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2332 tree c;
2334 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2335 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2336 the original copy of its contents. */
2337 return true;
2339 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2340 inside an OpenACC CTX. */
2341 if (!(is_gimple_omp (stmt)
2342 && is_gimple_omp_oacc (stmt))
2343 /* Except for atomic codes that we share with OpenMP. */
2344 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2345 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2347 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2349 error_at (gimple_location (stmt),
2350 "non-OpenACC construct inside of OpenACC routine");
2351 return false;
2353 else
2354 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2355 if (is_gimple_omp (octx->stmt)
2356 && is_gimple_omp_oacc (octx->stmt))
2358 error_at (gimple_location (stmt),
2359 "non-OpenACC construct inside of OpenACC region");
2360 return false;
2364 if (ctx != NULL)
2366 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2367 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2369 c = NULL_TREE;
2370 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2372 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2373 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2375 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2376 && (ctx->outer == NULL
2377 || !gimple_omp_for_combined_into_p (ctx->stmt)
2378 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2379 || (gimple_omp_for_kind (ctx->outer->stmt)
2380 != GF_OMP_FOR_KIND_FOR)
2381 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2383 error_at (gimple_location (stmt),
2384 "%<ordered simd threads%> must be closely "
2385 "nested inside of %<for simd%> region");
2386 return false;
2388 return true;
2391 error_at (gimple_location (stmt),
2392 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2393 " may not be nested inside %<simd%> region");
2394 return false;
2396 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2398 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2399 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2400 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2401 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2403 error_at (gimple_location (stmt),
2404 "only %<distribute%> or %<parallel%> regions are "
2405 "allowed to be strictly nested inside %<teams%> "
2406 "region");
2407 return false;
2411 switch (gimple_code (stmt))
2413 case GIMPLE_OMP_FOR:
2414 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2415 return true;
2416 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2418 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2420 error_at (gimple_location (stmt),
2421 "%<distribute%> region must be strictly nested "
2422 "inside %<teams%> construct");
2423 return false;
2425 return true;
2427 /* We split taskloop into task and nested taskloop in it. */
2428 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2429 return true;
2430 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2432 bool ok = false;
2434 if (ctx)
2435 switch (gimple_code (ctx->stmt))
2437 case GIMPLE_OMP_FOR:
2438 ok = (gimple_omp_for_kind (ctx->stmt)
2439 == GF_OMP_FOR_KIND_OACC_LOOP);
2440 break;
2442 case GIMPLE_OMP_TARGET:
2443 switch (gimple_omp_target_kind (ctx->stmt))
2445 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2446 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2447 ok = true;
2448 break;
2450 default:
2451 break;
2454 default:
2455 break;
2457 else if (oacc_get_fn_attrib (current_function_decl))
2458 ok = true;
2459 if (!ok)
2461 error_at (gimple_location (stmt),
2462 "OpenACC loop directive must be associated with"
2463 " an OpenACC compute region");
2464 return false;
2467 /* FALLTHRU */
2468 case GIMPLE_CALL:
2469 if (is_gimple_call (stmt)
2470 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2471 == BUILT_IN_GOMP_CANCEL
2472 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2473 == BUILT_IN_GOMP_CANCELLATION_POINT))
2475 const char *bad = NULL;
2476 const char *kind = NULL;
2477 const char *construct
2478 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2479 == BUILT_IN_GOMP_CANCEL)
2480 ? "#pragma omp cancel"
2481 : "#pragma omp cancellation point";
2482 if (ctx == NULL)
2484 error_at (gimple_location (stmt), "orphaned %qs construct",
2485 construct);
2486 return false;
2488 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2489 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2490 : 0)
2492 case 1:
2493 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2494 bad = "#pragma omp parallel";
2495 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2496 == BUILT_IN_GOMP_CANCEL
2497 && !integer_zerop (gimple_call_arg (stmt, 1)))
2498 ctx->cancellable = true;
2499 kind = "parallel";
2500 break;
2501 case 2:
2502 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2503 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2504 bad = "#pragma omp for";
2505 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2506 == BUILT_IN_GOMP_CANCEL
2507 && !integer_zerop (gimple_call_arg (stmt, 1)))
2509 ctx->cancellable = true;
2510 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2511 OMP_CLAUSE_NOWAIT))
2512 warning_at (gimple_location (stmt), 0,
2513 "%<#pragma omp cancel for%> inside "
2514 "%<nowait%> for construct");
2515 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2516 OMP_CLAUSE_ORDERED))
2517 warning_at (gimple_location (stmt), 0,
2518 "%<#pragma omp cancel for%> inside "
2519 "%<ordered%> for construct");
2521 kind = "for";
2522 break;
2523 case 4:
2524 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2525 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2526 bad = "#pragma omp sections";
2527 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2528 == BUILT_IN_GOMP_CANCEL
2529 && !integer_zerop (gimple_call_arg (stmt, 1)))
2531 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2533 ctx->cancellable = true;
2534 if (omp_find_clause (gimple_omp_sections_clauses
2535 (ctx->stmt),
2536 OMP_CLAUSE_NOWAIT))
2537 warning_at (gimple_location (stmt), 0,
2538 "%<#pragma omp cancel sections%> inside "
2539 "%<nowait%> sections construct");
2541 else
2543 gcc_assert (ctx->outer
2544 && gimple_code (ctx->outer->stmt)
2545 == GIMPLE_OMP_SECTIONS);
2546 ctx->outer->cancellable = true;
2547 if (omp_find_clause (gimple_omp_sections_clauses
2548 (ctx->outer->stmt),
2549 OMP_CLAUSE_NOWAIT))
2550 warning_at (gimple_location (stmt), 0,
2551 "%<#pragma omp cancel sections%> inside "
2552 "%<nowait%> sections construct");
2555 kind = "sections";
2556 break;
2557 case 8:
2558 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2559 bad = "#pragma omp task";
2560 else
2562 for (omp_context *octx = ctx->outer;
2563 octx; octx = octx->outer)
2565 switch (gimple_code (octx->stmt))
2567 case GIMPLE_OMP_TASKGROUP:
2568 break;
2569 case GIMPLE_OMP_TARGET:
2570 if (gimple_omp_target_kind (octx->stmt)
2571 != GF_OMP_TARGET_KIND_REGION)
2572 continue;
2573 /* FALLTHRU */
2574 case GIMPLE_OMP_PARALLEL:
2575 case GIMPLE_OMP_TEAMS:
2576 error_at (gimple_location (stmt),
2577 "%<%s taskgroup%> construct not closely "
2578 "nested inside of %<taskgroup%> region",
2579 construct);
2580 return false;
2581 default:
2582 continue;
2584 break;
2586 ctx->cancellable = true;
2588 kind = "taskgroup";
2589 break;
2590 default:
2591 error_at (gimple_location (stmt), "invalid arguments");
2592 return false;
2594 if (bad)
2596 error_at (gimple_location (stmt),
2597 "%<%s %s%> construct not closely nested inside of %qs",
2598 construct, kind, bad);
2599 return false;
2602 /* FALLTHRU */
2603 case GIMPLE_OMP_SECTIONS:
2604 case GIMPLE_OMP_SINGLE:
2605 for (; ctx != NULL; ctx = ctx->outer)
2606 switch (gimple_code (ctx->stmt))
2608 case GIMPLE_OMP_FOR:
2609 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2610 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2611 break;
2612 /* FALLTHRU */
2613 case GIMPLE_OMP_SECTIONS:
2614 case GIMPLE_OMP_SINGLE:
2615 case GIMPLE_OMP_ORDERED:
2616 case GIMPLE_OMP_MASTER:
2617 case GIMPLE_OMP_TASK:
2618 case GIMPLE_OMP_CRITICAL:
2619 if (is_gimple_call (stmt))
2621 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2622 != BUILT_IN_GOMP_BARRIER)
2623 return true;
2624 error_at (gimple_location (stmt),
2625 "barrier region may not be closely nested inside "
2626 "of work-sharing, %<critical%>, %<ordered%>, "
2627 "%<master%>, explicit %<task%> or %<taskloop%> "
2628 "region");
2629 return false;
2631 error_at (gimple_location (stmt),
2632 "work-sharing region may not be closely nested inside "
2633 "of work-sharing, %<critical%>, %<ordered%>, "
2634 "%<master%>, explicit %<task%> or %<taskloop%> region");
2635 return false;
2636 case GIMPLE_OMP_PARALLEL:
2637 case GIMPLE_OMP_TEAMS:
2638 return true;
2639 case GIMPLE_OMP_TARGET:
2640 if (gimple_omp_target_kind (ctx->stmt)
2641 == GF_OMP_TARGET_KIND_REGION)
2642 return true;
2643 break;
2644 default:
2645 break;
2647 break;
2648 case GIMPLE_OMP_MASTER:
2649 for (; ctx != NULL; ctx = ctx->outer)
2650 switch (gimple_code (ctx->stmt))
2652 case GIMPLE_OMP_FOR:
2653 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2654 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2655 break;
2656 /* FALLTHRU */
2657 case GIMPLE_OMP_SECTIONS:
2658 case GIMPLE_OMP_SINGLE:
2659 case GIMPLE_OMP_TASK:
2660 error_at (gimple_location (stmt),
2661 "%<master%> region may not be closely nested inside "
2662 "of work-sharing, explicit %<task%> or %<taskloop%> "
2663 "region");
2664 return false;
2665 case GIMPLE_OMP_PARALLEL:
2666 case GIMPLE_OMP_TEAMS:
2667 return true;
2668 case GIMPLE_OMP_TARGET:
2669 if (gimple_omp_target_kind (ctx->stmt)
2670 == GF_OMP_TARGET_KIND_REGION)
2671 return true;
2672 break;
2673 default:
2674 break;
2676 break;
2677 case GIMPLE_OMP_TASK:
2678 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2679 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2680 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2681 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2683 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2684 error_at (OMP_CLAUSE_LOCATION (c),
2685 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2686 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2687 return false;
2689 break;
2690 case GIMPLE_OMP_ORDERED:
2691 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2692 c; c = OMP_CLAUSE_CHAIN (c))
2694 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2696 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2697 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2698 continue;
2700 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2701 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2702 || kind == OMP_CLAUSE_DEPEND_SINK)
2704 tree oclause;
2705 /* Look for containing ordered(N) loop. */
2706 if (ctx == NULL
2707 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2708 || (oclause
2709 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2710 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2712 error_at (OMP_CLAUSE_LOCATION (c),
2713 "%<ordered%> construct with %<depend%> clause "
2714 "must be closely nested inside an %<ordered%> "
2715 "loop");
2716 return false;
2718 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2720 error_at (OMP_CLAUSE_LOCATION (c),
2721 "%<ordered%> construct with %<depend%> clause "
2722 "must be closely nested inside a loop with "
2723 "%<ordered%> clause with a parameter");
2724 return false;
2727 else
2729 error_at (OMP_CLAUSE_LOCATION (c),
2730 "invalid depend kind in omp %<ordered%> %<depend%>");
2731 return false;
2734 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2735 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2737 /* ordered simd must be closely nested inside of simd region,
2738 and simd region must not encounter constructs other than
2739 ordered simd, therefore ordered simd may be either orphaned,
2740 or ctx->stmt must be simd. The latter case is handled already
2741 earlier. */
2742 if (ctx != NULL)
2744 error_at (gimple_location (stmt),
2745 "%<ordered%> %<simd%> must be closely nested inside "
2746 "%<simd%> region");
2747 return false;
2750 for (; ctx != NULL; ctx = ctx->outer)
2751 switch (gimple_code (ctx->stmt))
2753 case GIMPLE_OMP_CRITICAL:
2754 case GIMPLE_OMP_TASK:
2755 case GIMPLE_OMP_ORDERED:
2756 ordered_in_taskloop:
2757 error_at (gimple_location (stmt),
2758 "%<ordered%> region may not be closely nested inside "
2759 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2760 "%<taskloop%> region");
2761 return false;
2762 case GIMPLE_OMP_FOR:
2763 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2764 goto ordered_in_taskloop;
2765 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2766 OMP_CLAUSE_ORDERED) == NULL)
2768 error_at (gimple_location (stmt),
2769 "%<ordered%> region must be closely nested inside "
2770 "a loop region with an %<ordered%> clause");
2771 return false;
2773 return true;
2774 case GIMPLE_OMP_TARGET:
2775 if (gimple_omp_target_kind (ctx->stmt)
2776 != GF_OMP_TARGET_KIND_REGION)
2777 break;
2778 /* FALLTHRU */
2779 case GIMPLE_OMP_PARALLEL:
2780 case GIMPLE_OMP_TEAMS:
2781 error_at (gimple_location (stmt),
2782 "%<ordered%> region must be closely nested inside "
2783 "a loop region with an %<ordered%> clause");
2784 return false;
2785 default:
2786 break;
2788 break;
2789 case GIMPLE_OMP_CRITICAL:
2791 tree this_stmt_name
2792 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2793 for (; ctx != NULL; ctx = ctx->outer)
2794 if (gomp_critical *other_crit
2795 = dyn_cast <gomp_critical *> (ctx->stmt))
2796 if (this_stmt_name == gimple_omp_critical_name (other_crit))
2798 error_at (gimple_location (stmt),
2799 "%<critical%> region may not be nested inside "
2800 "a %<critical%> region with the same name");
2801 return false;
2804 break;
2805 case GIMPLE_OMP_TEAMS:
2806 if (ctx == NULL
2807 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2808 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2810 error_at (gimple_location (stmt),
2811 "%<teams%> construct not closely nested inside of "
2812 "%<target%> construct");
2813 return false;
2815 break;
2816 case GIMPLE_OMP_TARGET:
2817 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2818 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2819 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2820 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2822 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2823 error_at (OMP_CLAUSE_LOCATION (c),
2824 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2825 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2826 return false;
2828 if (is_gimple_omp_offloaded (stmt)
2829 && oacc_get_fn_attrib (cfun->decl) != NULL)
2831 error_at (gimple_location (stmt),
2832 "OpenACC region inside of OpenACC routine, nested "
2833 "parallelism not supported yet");
2834 return false;
2836 for (; ctx != NULL; ctx = ctx->outer)
2838 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2840 if (is_gimple_omp (stmt)
2841 && is_gimple_omp_oacc (stmt)
2842 && is_gimple_omp (ctx->stmt))
2844 error_at (gimple_location (stmt),
2845 "OpenACC construct inside of non-OpenACC region");
2846 return false;
2848 continue;
2851 const char *stmt_name, *ctx_stmt_name;
2852 switch (gimple_omp_target_kind (stmt))
2854 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2855 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2856 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2857 case GF_OMP_TARGET_KIND_ENTER_DATA:
2858 stmt_name = "target enter data"; break;
2859 case GF_OMP_TARGET_KIND_EXIT_DATA:
2860 stmt_name = "target exit data"; break;
2861 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2862 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2863 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2864 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
2865 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2866 stmt_name = "enter/exit data"; break;
2867 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2868 break;
2869 default: gcc_unreachable ();
2871 switch (gimple_omp_target_kind (ctx->stmt))
2873 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2874 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
2875 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2876 ctx_stmt_name = "parallel"; break;
2877 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2878 ctx_stmt_name = "kernels"; break;
2879 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
2880 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2881 ctx_stmt_name = "host_data"; break;
2882 default: gcc_unreachable ();
2885 /* OpenACC/OpenMP mismatch? */
2886 if (is_gimple_omp_oacc (stmt)
2887 != is_gimple_omp_oacc (ctx->stmt))
2889 error_at (gimple_location (stmt),
2890 "%s %qs construct inside of %s %qs region",
2891 (is_gimple_omp_oacc (stmt)
2892 ? "OpenACC" : "OpenMP"), stmt_name,
2893 (is_gimple_omp_oacc (ctx->stmt)
2894 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
2895 return false;
2897 if (is_gimple_omp_offloaded (ctx->stmt))
2899 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
2900 if (is_gimple_omp_oacc (ctx->stmt))
2902 error_at (gimple_location (stmt),
2903 "%qs construct inside of %qs region",
2904 stmt_name, ctx_stmt_name);
2905 return false;
2907 else
2909 warning_at (gimple_location (stmt), 0,
2910 "%qs construct inside of %qs region",
2911 stmt_name, ctx_stmt_name);
2915 break;
2916 default:
2917 break;
2919 return true;
2923 /* Helper function scan_omp.
2925 Callback for walk_tree or operators in walk_gimple_stmt used to
2926 scan for OMP directives in TP. */
2928 static tree
2929 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
2931 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2932 omp_context *ctx = (omp_context *) wi->info;
2933 tree t = *tp;
2935 switch (TREE_CODE (t))
2937 case VAR_DECL:
2938 case PARM_DECL:
2939 case LABEL_DECL:
2940 case RESULT_DECL:
2941 if (ctx)
2943 tree repl = remap_decl (t, &ctx->cb);
2944 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
2945 *tp = repl;
2947 break;
2949 default:
2950 if (ctx && TYPE_P (t))
2951 *tp = remap_type (t, &ctx->cb);
2952 else if (!DECL_P (t))
2954 *walk_subtrees = 1;
2955 if (ctx)
2957 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
2958 if (tem != TREE_TYPE (t))
2960 if (TREE_CODE (t) == INTEGER_CST)
2961 *tp = wide_int_to_tree (tem, wi::to_wide (t));
2962 else
2963 TREE_TYPE (t) = tem;
2967 break;
2970 return NULL_TREE;
2973 /* Return true if FNDECL is a setjmp or a longjmp. */
2975 static bool
2976 setjmp_or_longjmp_p (const_tree fndecl)
2978 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
2979 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
2980 return true;
2982 tree declname = DECL_NAME (fndecl);
2983 if (!declname)
2984 return false;
2985 const char *name = IDENTIFIER_POINTER (declname);
2986 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
2990 /* Helper function for scan_omp.
2992 Callback for walk_gimple_stmt used to scan for OMP directives in
2993 the current statement in GSI. */
2995 static tree
2996 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2997 struct walk_stmt_info *wi)
2999 gimple *stmt = gsi_stmt (*gsi);
3000 omp_context *ctx = (omp_context *) wi->info;
3002 if (gimple_has_location (stmt))
3003 input_location = gimple_location (stmt);
3005 /* Check the nesting restrictions. */
3006 bool remove = false;
3007 if (is_gimple_omp (stmt))
3008 remove = !check_omp_nesting_restrictions (stmt, ctx);
3009 else if (is_gimple_call (stmt))
3011 tree fndecl = gimple_call_fndecl (stmt);
3012 if (fndecl)
3014 if (setjmp_or_longjmp_p (fndecl)
3015 && ctx
3016 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3017 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3019 remove = true;
3020 error_at (gimple_location (stmt),
3021 "setjmp/longjmp inside simd construct");
3023 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3024 switch (DECL_FUNCTION_CODE (fndecl))
3026 case BUILT_IN_GOMP_BARRIER:
3027 case BUILT_IN_GOMP_CANCEL:
3028 case BUILT_IN_GOMP_CANCELLATION_POINT:
3029 case BUILT_IN_GOMP_TASKYIELD:
3030 case BUILT_IN_GOMP_TASKWAIT:
3031 case BUILT_IN_GOMP_TASKGROUP_START:
3032 case BUILT_IN_GOMP_TASKGROUP_END:
3033 remove = !check_omp_nesting_restrictions (stmt, ctx);
3034 break;
3035 default:
3036 break;
3040 if (remove)
3042 stmt = gimple_build_nop ();
3043 gsi_replace (gsi, stmt, false);
3046 *handled_ops_p = true;
3048 switch (gimple_code (stmt))
3050 case GIMPLE_OMP_PARALLEL:
3051 taskreg_nesting_level++;
3052 scan_omp_parallel (gsi, ctx);
3053 taskreg_nesting_level--;
3054 break;
3056 case GIMPLE_OMP_TASK:
3057 taskreg_nesting_level++;
3058 scan_omp_task (gsi, ctx);
3059 taskreg_nesting_level--;
3060 break;
3062 case GIMPLE_OMP_FOR:
3063 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3064 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3065 && omp_maybe_offloaded_ctx (ctx)
3066 && omp_max_simt_vf ())
3067 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3068 else
3069 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3070 break;
3072 case GIMPLE_OMP_SECTIONS:
3073 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3074 break;
3076 case GIMPLE_OMP_SINGLE:
3077 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3078 break;
3080 case GIMPLE_OMP_SECTION:
3081 case GIMPLE_OMP_MASTER:
3082 case GIMPLE_OMP_TASKGROUP:
3083 case GIMPLE_OMP_ORDERED:
3084 case GIMPLE_OMP_CRITICAL:
3085 case GIMPLE_OMP_GRID_BODY:
3086 ctx = new_omp_context (stmt, ctx);
3087 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3088 break;
3090 case GIMPLE_OMP_TARGET:
3091 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3092 break;
3094 case GIMPLE_OMP_TEAMS:
3095 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3096 break;
3098 case GIMPLE_BIND:
3100 tree var;
3102 *handled_ops_p = false;
3103 if (ctx)
3104 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3105 var ;
3106 var = DECL_CHAIN (var))
3107 insert_decl_map (&ctx->cb, var, var);
3109 break;
3110 default:
3111 *handled_ops_p = false;
3112 break;
3115 return NULL_TREE;
3119 /* Scan all the statements starting at the current statement. CTX
3120 contains context information about the OMP directives and
3121 clauses found during the scan. */
3123 static void
3124 scan_omp (gimple_seq *body_p, omp_context *ctx)
3126 location_t saved_location;
3127 struct walk_stmt_info wi;
3129 memset (&wi, 0, sizeof (wi));
3130 wi.info = ctx;
3131 wi.want_locations = true;
3133 saved_location = input_location;
3134 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3135 input_location = saved_location;
3138 /* Re-gimplification and code generation routines. */
3140 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3141 of BIND if in a method. */
3143 static void
3144 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3146 if (DECL_ARGUMENTS (current_function_decl)
3147 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3148 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3149 == POINTER_TYPE))
3151 tree vars = gimple_bind_vars (bind);
3152 for (tree *pvar = &vars; *pvar; )
3153 if (omp_member_access_dummy_var (*pvar))
3154 *pvar = DECL_CHAIN (*pvar);
3155 else
3156 pvar = &DECL_CHAIN (*pvar);
3157 gimple_bind_set_vars (bind, vars);
3161 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3162 block and its subblocks. */
3164 static void
3165 remove_member_access_dummy_vars (tree block)
3167 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3168 if (omp_member_access_dummy_var (*pvar))
3169 *pvar = DECL_CHAIN (*pvar);
3170 else
3171 pvar = &DECL_CHAIN (*pvar);
3173 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3174 remove_member_access_dummy_vars (block);
3177 /* If a context was created for STMT when it was scanned, return it. */
3179 static omp_context *
3180 maybe_lookup_ctx (gimple *stmt)
3182 splay_tree_node n;
3183 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3184 return n ? (omp_context *) n->value : NULL;
3188 /* Find the mapping for DECL in CTX or the immediately enclosing
3189 context that has a mapping for DECL.
3191 If CTX is a nested parallel directive, we may have to use the decl
3192 mappings created in CTX's parent context. Suppose that we have the
3193 following parallel nesting (variable UIDs showed for clarity):
3195 iD.1562 = 0;
3196 #omp parallel shared(iD.1562) -> outer parallel
3197 iD.1562 = iD.1562 + 1;
3199 #omp parallel shared (iD.1562) -> inner parallel
3200 iD.1562 = iD.1562 - 1;
3202 Each parallel structure will create a distinct .omp_data_s structure
3203 for copying iD.1562 in/out of the directive:
3205 outer parallel .omp_data_s.1.i -> iD.1562
3206 inner parallel .omp_data_s.2.i -> iD.1562
3208 A shared variable mapping will produce a copy-out operation before
3209 the parallel directive and a copy-in operation after it. So, in
3210 this case we would have:
3212 iD.1562 = 0;
3213 .omp_data_o.1.i = iD.1562;
3214 #omp parallel shared(iD.1562) -> outer parallel
3215 .omp_data_i.1 = &.omp_data_o.1
3216 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3218 .omp_data_o.2.i = iD.1562; -> **
3219 #omp parallel shared(iD.1562) -> inner parallel
3220 .omp_data_i.2 = &.omp_data_o.2
3221 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3224 ** This is a problem. The symbol iD.1562 cannot be referenced
3225 inside the body of the outer parallel region. But since we are
3226 emitting this copy operation while expanding the inner parallel
3227 directive, we need to access the CTX structure of the outer
3228 parallel directive to get the correct mapping:
3230 .omp_data_o.2.i = .omp_data_i.1->i
3232 Since there may be other workshare or parallel directives enclosing
3233 the parallel directive, it may be necessary to walk up the context
3234 parent chain. This is not a problem in general because nested
3235 parallelism happens only rarely. */
3237 static tree
3238 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3240 tree t;
3241 omp_context *up;
3243 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3244 t = maybe_lookup_decl (decl, up);
3246 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3248 return t ? t : decl;
3252 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3253 in outer contexts. */
3255 static tree
3256 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3258 tree t = NULL;
3259 omp_context *up;
3261 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3262 t = maybe_lookup_decl (decl, up);
3264 return t ? t : decl;
3268 /* Construct the initialization value for reduction operation OP. */
3270 tree
3271 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3273 switch (op)
3275 case PLUS_EXPR:
3276 case MINUS_EXPR:
3277 case BIT_IOR_EXPR:
3278 case BIT_XOR_EXPR:
3279 case TRUTH_OR_EXPR:
3280 case TRUTH_ORIF_EXPR:
3281 case TRUTH_XOR_EXPR:
3282 case NE_EXPR:
3283 return build_zero_cst (type);
3285 case MULT_EXPR:
3286 case TRUTH_AND_EXPR:
3287 case TRUTH_ANDIF_EXPR:
3288 case EQ_EXPR:
3289 return fold_convert_loc (loc, type, integer_one_node);
3291 case BIT_AND_EXPR:
3292 return fold_convert_loc (loc, type, integer_minus_one_node);
3294 case MAX_EXPR:
3295 if (SCALAR_FLOAT_TYPE_P (type))
3297 REAL_VALUE_TYPE max, min;
3298 if (HONOR_INFINITIES (type))
3300 real_inf (&max);
3301 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3303 else
3304 real_maxval (&min, 1, TYPE_MODE (type));
3305 return build_real (type, min);
3307 else if (POINTER_TYPE_P (type))
3309 wide_int min
3310 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3311 return wide_int_to_tree (type, min);
3313 else
3315 gcc_assert (INTEGRAL_TYPE_P (type));
3316 return TYPE_MIN_VALUE (type);
3319 case MIN_EXPR:
3320 if (SCALAR_FLOAT_TYPE_P (type))
3322 REAL_VALUE_TYPE max;
3323 if (HONOR_INFINITIES (type))
3324 real_inf (&max);
3325 else
3326 real_maxval (&max, 0, TYPE_MODE (type));
3327 return build_real (type, max);
3329 else if (POINTER_TYPE_P (type))
3331 wide_int max
3332 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3333 return wide_int_to_tree (type, max);
3335 else
3337 gcc_assert (INTEGRAL_TYPE_P (type));
3338 return TYPE_MAX_VALUE (type);
3341 default:
3342 gcc_unreachable ();
3346 /* Construct the initialization value for reduction CLAUSE. */
3348 tree
3349 omp_reduction_init (tree clause, tree type)
3351 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3352 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3355 /* Return alignment to be assumed for var in CLAUSE, which should be
3356 OMP_CLAUSE_ALIGNED. */
3358 static tree
3359 omp_clause_aligned_alignment (tree clause)
3361 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3362 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3364 /* Otherwise return implementation defined alignment. */
3365 unsigned int al = 1;
3366 opt_scalar_mode mode_iter;
3367 auto_vector_sizes sizes;
3368 targetm.vectorize.autovectorize_vector_sizes (&sizes);
3369 poly_uint64 vs = 0;
3370 for (unsigned int i = 0; i < sizes.length (); ++i)
3371 vs = ordered_max (vs, sizes[i]);
3372 static enum mode_class classes[]
3373 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3374 for (int i = 0; i < 4; i += 2)
3375 /* The for loop above dictates that we only walk through scalar classes. */
3376 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3378 scalar_mode mode = mode_iter.require ();
3379 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3380 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3381 continue;
3382 while (maybe_ne (vs, 0U)
3383 && known_lt (GET_MODE_SIZE (vmode), vs)
3384 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3385 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3387 tree type = lang_hooks.types.type_for_mode (mode, 1);
3388 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3389 continue;
3390 poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3391 GET_MODE_SIZE (mode));
3392 type = build_vector_type (type, nelts);
3393 if (TYPE_MODE (type) != vmode)
3394 continue;
3395 if (TYPE_ALIGN_UNIT (type) > al)
3396 al = TYPE_ALIGN_UNIT (type);
3398 return build_int_cst (integer_type_node, al);
3402 /* This structure is part of the interface between lower_rec_simd_input_clauses
3403 and lower_rec_input_clauses. */
3405 struct omplow_simd_context {
3406 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3407 tree idx;
3408 tree lane;
3409 vec<tree, va_heap> simt_eargs;
3410 gimple_seq simt_dlist;
3411 poly_uint64_pod max_vf;
3412 bool is_simt;
3415 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3416 privatization. */
3418 static bool
3419 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3420 omplow_simd_context *sctx, tree &ivar, tree &lvar)
3422 if (known_eq (sctx->max_vf, 0U))
3424 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3425 if (maybe_gt (sctx->max_vf, 1U))
3427 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3428 OMP_CLAUSE_SAFELEN);
3429 if (c)
3431 poly_uint64 safe_len;
3432 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3433 || maybe_lt (safe_len, 1U))
3434 sctx->max_vf = 1;
3435 else
3436 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3439 if (maybe_gt (sctx->max_vf, 1U))
3441 sctx->idx = create_tmp_var (unsigned_type_node);
3442 sctx->lane = create_tmp_var (unsigned_type_node);
3445 if (known_eq (sctx->max_vf, 1U))
3446 return false;
3448 if (sctx->is_simt)
3450 if (is_gimple_reg (new_var))
3452 ivar = lvar = new_var;
3453 return true;
3455 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3456 ivar = lvar = create_tmp_var (type);
3457 TREE_ADDRESSABLE (ivar) = 1;
3458 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3459 NULL, DECL_ATTRIBUTES (ivar));
3460 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3461 tree clobber = build_constructor (type, NULL);
3462 TREE_THIS_VOLATILE (clobber) = 1;
3463 gimple *g = gimple_build_assign (ivar, clobber);
3464 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3466 else
3468 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3469 tree avar = create_tmp_var_raw (atype);
3470 if (TREE_ADDRESSABLE (new_var))
3471 TREE_ADDRESSABLE (avar) = 1;
3472 DECL_ATTRIBUTES (avar)
3473 = tree_cons (get_identifier ("omp simd array"), NULL,
3474 DECL_ATTRIBUTES (avar));
3475 gimple_add_tmp_var (avar);
3476 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3477 NULL_TREE, NULL_TREE);
3478 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3479 NULL_TREE, NULL_TREE);
3481 if (DECL_P (new_var))
3483 SET_DECL_VALUE_EXPR (new_var, lvar);
3484 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3486 return true;
3489 /* Helper function of lower_rec_input_clauses. For a reference
3490 in simd reduction, add an underlying variable it will reference. */
3492 static void
3493 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3495 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3496 if (TREE_CONSTANT (z))
3498 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3499 get_name (new_vard));
3500 gimple_add_tmp_var (z);
3501 TREE_ADDRESSABLE (z) = 1;
3502 z = build_fold_addr_expr_loc (loc, z);
3503 gimplify_assign (new_vard, z, ilist);
3507 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3508 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3509 private variables. Initialization statements go in ILIST, while calls
3510 to destructors go in DLIST. */
3512 static void
3513 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3514 omp_context *ctx, struct omp_for_data *fd)
3516 tree c, dtor, copyin_seq, x, ptr;
3517 bool copyin_by_ref = false;
3518 bool lastprivate_firstprivate = false;
3519 bool reduction_omp_orig_ref = false;
3520 int pass;
3521 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3522 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3523 omplow_simd_context sctx = omplow_simd_context ();
3524 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3525 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3526 gimple_seq llist[3] = { };
3528 copyin_seq = NULL;
3529 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3531 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3532 with data sharing clauses referencing variable sized vars. That
3533 is unnecessarily hard to support and very unlikely to result in
3534 vectorized code anyway. */
3535 if (is_simd)
3536 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3537 switch (OMP_CLAUSE_CODE (c))
3539 case OMP_CLAUSE_LINEAR:
3540 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3541 sctx.max_vf = 1;
3542 /* FALLTHRU */
3543 case OMP_CLAUSE_PRIVATE:
3544 case OMP_CLAUSE_FIRSTPRIVATE:
3545 case OMP_CLAUSE_LASTPRIVATE:
3546 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3547 sctx.max_vf = 1;
3548 break;
3549 case OMP_CLAUSE_REDUCTION:
3550 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3551 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3552 sctx.max_vf = 1;
3553 break;
3554 default:
3555 continue;
3558 /* Add a placeholder for simduid. */
3559 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
3560 sctx.simt_eargs.safe_push (NULL_TREE);
3562 /* Do all the fixed sized types in the first pass, and the variable sized
3563 types in the second pass. This makes sure that the scalar arguments to
3564 the variable sized types are processed before we use them in the
3565 variable sized operations. */
3566 for (pass = 0; pass < 2; ++pass)
3568 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3570 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3571 tree var, new_var;
3572 bool by_ref;
3573 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3575 switch (c_kind)
3577 case OMP_CLAUSE_PRIVATE:
3578 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3579 continue;
3580 break;
3581 case OMP_CLAUSE_SHARED:
3582 /* Ignore shared directives in teams construct. */
3583 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3584 continue;
3585 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3587 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3588 || is_global_var (OMP_CLAUSE_DECL (c)));
3589 continue;
3591 case OMP_CLAUSE_FIRSTPRIVATE:
3592 case OMP_CLAUSE_COPYIN:
3593 break;
3594 case OMP_CLAUSE_LINEAR:
3595 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3596 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3597 lastprivate_firstprivate = true;
3598 break;
3599 case OMP_CLAUSE_REDUCTION:
3600 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3601 reduction_omp_orig_ref = true;
3602 break;
3603 case OMP_CLAUSE__LOOPTEMP_:
3604 /* Handle _looptemp_ clauses only on parallel/task. */
3605 if (fd)
3606 continue;
3607 break;
3608 case OMP_CLAUSE_LASTPRIVATE:
3609 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3611 lastprivate_firstprivate = true;
3612 if (pass != 0 || is_taskloop_ctx (ctx))
3613 continue;
3615 /* Even without corresponding firstprivate, if
3616 decl is Fortran allocatable, it needs outer var
3617 reference. */
3618 else if (pass == 0
3619 && lang_hooks.decls.omp_private_outer_ref
3620 (OMP_CLAUSE_DECL (c)))
3621 lastprivate_firstprivate = true;
3622 break;
3623 case OMP_CLAUSE_ALIGNED:
3624 if (pass == 0)
3625 continue;
3626 var = OMP_CLAUSE_DECL (c);
3627 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3628 && !is_global_var (var))
3630 new_var = maybe_lookup_decl (var, ctx);
3631 if (new_var == NULL_TREE)
3632 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3633 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3634 tree alarg = omp_clause_aligned_alignment (c);
3635 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3636 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3637 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3638 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3639 gimplify_and_add (x, ilist);
3641 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3642 && is_global_var (var))
3644 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3645 new_var = lookup_decl (var, ctx);
3646 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3647 t = build_fold_addr_expr_loc (clause_loc, t);
3648 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3649 tree alarg = omp_clause_aligned_alignment (c);
3650 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3651 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3652 t = fold_convert_loc (clause_loc, ptype, t);
3653 x = create_tmp_var (ptype);
3654 t = build2 (MODIFY_EXPR, ptype, x, t);
3655 gimplify_and_add (t, ilist);
3656 t = build_simple_mem_ref_loc (clause_loc, x);
3657 SET_DECL_VALUE_EXPR (new_var, t);
3658 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3660 continue;
3661 default:
3662 continue;
3665 new_var = var = OMP_CLAUSE_DECL (c);
3666 if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3668 var = TREE_OPERAND (var, 0);
3669 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3670 var = TREE_OPERAND (var, 0);
3671 if (TREE_CODE (var) == INDIRECT_REF
3672 || TREE_CODE (var) == ADDR_EXPR)
3673 var = TREE_OPERAND (var, 0);
3674 if (is_variable_sized (var))
3676 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3677 var = DECL_VALUE_EXPR (var);
3678 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3679 var = TREE_OPERAND (var, 0);
3680 gcc_assert (DECL_P (var));
3682 new_var = var;
3684 if (c_kind != OMP_CLAUSE_COPYIN)
3685 new_var = lookup_decl (var, ctx);
3687 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3689 if (pass != 0)
3690 continue;
3692 /* C/C++ array section reductions. */
3693 else if (c_kind == OMP_CLAUSE_REDUCTION
3694 && var != OMP_CLAUSE_DECL (c))
3696 if (pass == 0)
3697 continue;
3699 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3700 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3701 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3703 tree b = TREE_OPERAND (orig_var, 1);
3704 b = maybe_lookup_decl (b, ctx);
3705 if (b == NULL)
3707 b = TREE_OPERAND (orig_var, 1);
3708 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3710 if (integer_zerop (bias))
3711 bias = b;
3712 else
3714 bias = fold_convert_loc (clause_loc,
3715 TREE_TYPE (b), bias);
3716 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3717 TREE_TYPE (b), b, bias);
3719 orig_var = TREE_OPERAND (orig_var, 0);
3721 if (TREE_CODE (orig_var) == INDIRECT_REF
3722 || TREE_CODE (orig_var) == ADDR_EXPR)
3723 orig_var = TREE_OPERAND (orig_var, 0);
3724 tree d = OMP_CLAUSE_DECL (c);
3725 tree type = TREE_TYPE (d);
3726 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3727 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3728 const char *name = get_name (orig_var);
3729 if (TREE_CONSTANT (v))
3731 x = create_tmp_var_raw (type, name);
3732 gimple_add_tmp_var (x);
3733 TREE_ADDRESSABLE (x) = 1;
3734 x = build_fold_addr_expr_loc (clause_loc, x);
3736 else
3738 tree atmp
3739 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3740 tree t = maybe_lookup_decl (v, ctx);
3741 if (t)
3742 v = t;
3743 else
3744 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3745 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3746 t = fold_build2_loc (clause_loc, PLUS_EXPR,
3747 TREE_TYPE (v), v,
3748 build_int_cst (TREE_TYPE (v), 1));
3749 t = fold_build2_loc (clause_loc, MULT_EXPR,
3750 TREE_TYPE (v), t,
3751 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3752 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3753 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3756 tree ptype = build_pointer_type (TREE_TYPE (type));
3757 x = fold_convert_loc (clause_loc, ptype, x);
3758 tree y = create_tmp_var (ptype, name);
3759 gimplify_assign (y, x, ilist);
3760 x = y;
3761 tree yb = y;
3763 if (!integer_zerop (bias))
3765 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3766 bias);
3767 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3769 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3770 pointer_sized_int_node, yb, bias);
3771 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3772 yb = create_tmp_var (ptype, name);
3773 gimplify_assign (yb, x, ilist);
3774 x = yb;
3777 d = TREE_OPERAND (d, 0);
3778 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3779 d = TREE_OPERAND (d, 0);
3780 if (TREE_CODE (d) == ADDR_EXPR)
3782 if (orig_var != var)
3784 gcc_assert (is_variable_sized (orig_var));
3785 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3787 gimplify_assign (new_var, x, ilist);
3788 tree new_orig_var = lookup_decl (orig_var, ctx);
3789 tree t = build_fold_indirect_ref (new_var);
3790 DECL_IGNORED_P (new_var) = 0;
3791 TREE_THIS_NOTRAP (t);
3792 SET_DECL_VALUE_EXPR (new_orig_var, t);
3793 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3795 else
3797 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3798 build_int_cst (ptype, 0));
3799 SET_DECL_VALUE_EXPR (new_var, x);
3800 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3803 else
3805 gcc_assert (orig_var == var);
3806 if (TREE_CODE (d) == INDIRECT_REF)
3808 x = create_tmp_var (ptype, name);
3809 TREE_ADDRESSABLE (x) = 1;
3810 gimplify_assign (x, yb, ilist);
3811 x = build_fold_addr_expr_loc (clause_loc, x);
3813 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3814 gimplify_assign (new_var, x, ilist);
3816 tree y1 = create_tmp_var (ptype, NULL);
3817 gimplify_assign (y1, y, ilist);
3818 tree i2 = NULL_TREE, y2 = NULL_TREE;
3819 tree body2 = NULL_TREE, end2 = NULL_TREE;
3820 tree y3 = NULL_TREE, y4 = NULL_TREE;
3821 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3823 y2 = create_tmp_var (ptype, NULL);
3824 gimplify_assign (y2, y, ilist);
3825 tree ref = build_outer_var_ref (var, ctx);
3826 /* For ref build_outer_var_ref already performs this. */
3827 if (TREE_CODE (d) == INDIRECT_REF)
3828 gcc_assert (omp_is_reference (var));
3829 else if (TREE_CODE (d) == ADDR_EXPR)
3830 ref = build_fold_addr_expr (ref);
3831 else if (omp_is_reference (var))
3832 ref = build_fold_addr_expr (ref);
3833 ref = fold_convert_loc (clause_loc, ptype, ref);
3834 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3835 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3837 y3 = create_tmp_var (ptype, NULL);
3838 gimplify_assign (y3, unshare_expr (ref), ilist);
3840 if (is_simd)
3842 y4 = create_tmp_var (ptype, NULL);
3843 gimplify_assign (y4, ref, dlist);
3846 tree i = create_tmp_var (TREE_TYPE (v), NULL);
3847 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3848 tree body = create_artificial_label (UNKNOWN_LOCATION);
3849 tree end = create_artificial_label (UNKNOWN_LOCATION);
3850 gimple_seq_add_stmt (ilist, gimple_build_label (body));
3851 if (y2)
3853 i2 = create_tmp_var (TREE_TYPE (v), NULL);
3854 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3855 body2 = create_artificial_label (UNKNOWN_LOCATION);
3856 end2 = create_artificial_label (UNKNOWN_LOCATION);
3857 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3859 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3861 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3862 tree decl_placeholder
3863 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3864 SET_DECL_VALUE_EXPR (decl_placeholder,
3865 build_simple_mem_ref (y1));
3866 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3867 SET_DECL_VALUE_EXPR (placeholder,
3868 y3 ? build_simple_mem_ref (y3)
3869 : error_mark_node);
3870 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3871 x = lang_hooks.decls.omp_clause_default_ctor
3872 (c, build_simple_mem_ref (y1),
3873 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3874 if (x)
3875 gimplify_and_add (x, ilist);
3876 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3878 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3879 lower_omp (&tseq, ctx);
3880 gimple_seq_add_seq (ilist, tseq);
3882 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3883 if (is_simd)
3885 SET_DECL_VALUE_EXPR (decl_placeholder,
3886 build_simple_mem_ref (y2));
3887 SET_DECL_VALUE_EXPR (placeholder,
3888 build_simple_mem_ref (y4));
3889 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3890 lower_omp (&tseq, ctx);
3891 gimple_seq_add_seq (dlist, tseq);
3892 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3894 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3895 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3896 x = lang_hooks.decls.omp_clause_dtor
3897 (c, build_simple_mem_ref (y2));
3898 if (x)
3900 gimple_seq tseq = NULL;
3901 dtor = x;
3902 gimplify_stmt (&dtor, &tseq);
3903 gimple_seq_add_seq (dlist, tseq);
3906 else
3908 x = omp_reduction_init (c, TREE_TYPE (type));
3909 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3911 /* reduction(-:var) sums up the partial results, so it
3912 acts identically to reduction(+:var). */
3913 if (code == MINUS_EXPR)
3914 code = PLUS_EXPR;
3916 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
3917 if (is_simd)
3919 x = build2 (code, TREE_TYPE (type),
3920 build_simple_mem_ref (y4),
3921 build_simple_mem_ref (y2));
3922 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
3925 gimple *g
3926 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
3927 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3928 gimple_seq_add_stmt (ilist, g);
3929 if (y3)
3931 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
3932 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3933 gimple_seq_add_stmt (ilist, g);
3935 g = gimple_build_assign (i, PLUS_EXPR, i,
3936 build_int_cst (TREE_TYPE (i), 1));
3937 gimple_seq_add_stmt (ilist, g);
3938 g = gimple_build_cond (LE_EXPR, i, v, body, end);
3939 gimple_seq_add_stmt (ilist, g);
3940 gimple_seq_add_stmt (ilist, gimple_build_label (end));
3941 if (y2)
3943 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
3944 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3945 gimple_seq_add_stmt (dlist, g);
3946 if (y4)
3948 g = gimple_build_assign
3949 (y4, POINTER_PLUS_EXPR, y4,
3950 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3951 gimple_seq_add_stmt (dlist, g);
3953 g = gimple_build_assign (i2, PLUS_EXPR, i2,
3954 build_int_cst (TREE_TYPE (i2), 1));
3955 gimple_seq_add_stmt (dlist, g);
3956 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
3957 gimple_seq_add_stmt (dlist, g);
3958 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
3960 continue;
3962 else if (is_variable_sized (var))
3964 /* For variable sized types, we need to allocate the
3965 actual storage here. Call alloca and store the
3966 result in the pointer decl that we created elsewhere. */
3967 if (pass == 0)
3968 continue;
3970 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
3972 gcall *stmt;
3973 tree tmp, atmp;
3975 ptr = DECL_VALUE_EXPR (new_var);
3976 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
3977 ptr = TREE_OPERAND (ptr, 0);
3978 gcc_assert (DECL_P (ptr));
3979 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
3981 /* void *tmp = __builtin_alloca */
3982 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3983 stmt = gimple_build_call (atmp, 2, x,
3984 size_int (DECL_ALIGN (var)));
3985 tmp = create_tmp_var_raw (ptr_type_node);
3986 gimple_add_tmp_var (tmp);
3987 gimple_call_set_lhs (stmt, tmp);
3989 gimple_seq_add_stmt (ilist, stmt);
3991 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
3992 gimplify_assign (ptr, x, ilist);
3995 else if (omp_is_reference (var))
3997 /* For references that are being privatized for Fortran,
3998 allocate new backing storage for the new pointer
3999 variable. This allows us to avoid changing all the
4000 code that expects a pointer to something that expects
4001 a direct variable. */
4002 if (pass == 0)
4003 continue;
4005 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4006 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4008 x = build_receiver_ref (var, false, ctx);
4009 x = build_fold_addr_expr_loc (clause_loc, x);
4011 else if (TREE_CONSTANT (x))
4013 /* For reduction in SIMD loop, defer adding the
4014 initialization of the reference, because if we decide
4015 to use SIMD array for it, the initilization could cause
4016 expansion ICE. */
4017 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4018 x = NULL_TREE;
4019 else
4021 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4022 get_name (var));
4023 gimple_add_tmp_var (x);
4024 TREE_ADDRESSABLE (x) = 1;
4025 x = build_fold_addr_expr_loc (clause_loc, x);
4028 else
4030 tree atmp
4031 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4032 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4033 tree al = size_int (TYPE_ALIGN (rtype));
4034 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4037 if (x)
4039 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4040 gimplify_assign (new_var, x, ilist);
4043 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4045 else if (c_kind == OMP_CLAUSE_REDUCTION
4046 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4048 if (pass == 0)
4049 continue;
4051 else if (pass != 0)
4052 continue;
4054 switch (OMP_CLAUSE_CODE (c))
4056 case OMP_CLAUSE_SHARED:
4057 /* Ignore shared directives in teams construct. */
4058 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4059 continue;
4060 /* Shared global vars are just accessed directly. */
4061 if (is_global_var (new_var))
4062 break;
4063 /* For taskloop firstprivate/lastprivate, represented
4064 as firstprivate and shared clause on the task, new_var
4065 is the firstprivate var. */
4066 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4067 break;
4068 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4069 needs to be delayed until after fixup_child_record_type so
4070 that we get the correct type during the dereference. */
4071 by_ref = use_pointer_for_field (var, ctx);
4072 x = build_receiver_ref (var, by_ref, ctx);
4073 SET_DECL_VALUE_EXPR (new_var, x);
4074 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4076 /* ??? If VAR is not passed by reference, and the variable
4077 hasn't been initialized yet, then we'll get a warning for
4078 the store into the omp_data_s structure. Ideally, we'd be
4079 able to notice this and not store anything at all, but
4080 we're generating code too early. Suppress the warning. */
4081 if (!by_ref)
4082 TREE_NO_WARNING (var) = 1;
4083 break;
4085 case OMP_CLAUSE_LASTPRIVATE:
4086 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4087 break;
4088 /* FALLTHRU */
4090 case OMP_CLAUSE_PRIVATE:
4091 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4092 x = build_outer_var_ref (var, ctx);
4093 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4095 if (is_task_ctx (ctx))
4096 x = build_receiver_ref (var, false, ctx);
4097 else
4098 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4100 else
4101 x = NULL;
4102 do_private:
4103 tree nx;
4104 nx = lang_hooks.decls.omp_clause_default_ctor
4105 (c, unshare_expr (new_var), x);
4106 if (is_simd)
4108 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4109 if ((TREE_ADDRESSABLE (new_var) || nx || y
4110 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4111 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4112 ivar, lvar))
4114 if (nx)
4115 x = lang_hooks.decls.omp_clause_default_ctor
4116 (c, unshare_expr (ivar), x);
4117 if (nx && x)
4118 gimplify_and_add (x, &llist[0]);
4119 if (y)
4121 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4122 if (y)
4124 gimple_seq tseq = NULL;
4126 dtor = y;
4127 gimplify_stmt (&dtor, &tseq);
4128 gimple_seq_add_seq (&llist[1], tseq);
4131 break;
4134 if (nx)
4135 gimplify_and_add (nx, ilist);
4136 /* FALLTHRU */
4138 do_dtor:
4139 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4140 if (x)
4142 gimple_seq tseq = NULL;
4144 dtor = x;
4145 gimplify_stmt (&dtor, &tseq);
4146 gimple_seq_add_seq (dlist, tseq);
4148 break;
4150 case OMP_CLAUSE_LINEAR:
4151 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4152 goto do_firstprivate;
4153 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4154 x = NULL;
4155 else
4156 x = build_outer_var_ref (var, ctx);
4157 goto do_private;
4159 case OMP_CLAUSE_FIRSTPRIVATE:
4160 if (is_task_ctx (ctx))
4162 if (omp_is_reference (var) || is_variable_sized (var))
4163 goto do_dtor;
4164 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4165 ctx))
4166 || use_pointer_for_field (var, NULL))
4168 x = build_receiver_ref (var, false, ctx);
4169 SET_DECL_VALUE_EXPR (new_var, x);
4170 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4171 goto do_dtor;
4174 do_firstprivate:
4175 x = build_outer_var_ref (var, ctx);
4176 if (is_simd)
4178 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4179 && gimple_omp_for_combined_into_p (ctx->stmt))
4181 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4182 tree stept = TREE_TYPE (t);
4183 tree ct = omp_find_clause (clauses,
4184 OMP_CLAUSE__LOOPTEMP_);
4185 gcc_assert (ct);
4186 tree l = OMP_CLAUSE_DECL (ct);
4187 tree n1 = fd->loop.n1;
4188 tree step = fd->loop.step;
4189 tree itype = TREE_TYPE (l);
4190 if (POINTER_TYPE_P (itype))
4191 itype = signed_type_for (itype);
4192 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4193 if (TYPE_UNSIGNED (itype)
4194 && fd->loop.cond_code == GT_EXPR)
4195 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4196 fold_build1 (NEGATE_EXPR, itype, l),
4197 fold_build1 (NEGATE_EXPR,
4198 itype, step));
4199 else
4200 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4201 t = fold_build2 (MULT_EXPR, stept,
4202 fold_convert (stept, l), t);
4204 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4206 x = lang_hooks.decls.omp_clause_linear_ctor
4207 (c, new_var, x, t);
4208 gimplify_and_add (x, ilist);
4209 goto do_dtor;
4212 if (POINTER_TYPE_P (TREE_TYPE (x)))
4213 x = fold_build2 (POINTER_PLUS_EXPR,
4214 TREE_TYPE (x), x, t);
4215 else
4216 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4219 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4220 || TREE_ADDRESSABLE (new_var))
4221 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4222 ivar, lvar))
4224 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4226 tree iv = create_tmp_var (TREE_TYPE (new_var));
4227 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4228 gimplify_and_add (x, ilist);
4229 gimple_stmt_iterator gsi
4230 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4231 gassign *g
4232 = gimple_build_assign (unshare_expr (lvar), iv);
4233 gsi_insert_before_without_update (&gsi, g,
4234 GSI_SAME_STMT);
4235 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4236 enum tree_code code = PLUS_EXPR;
4237 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4238 code = POINTER_PLUS_EXPR;
4239 g = gimple_build_assign (iv, code, iv, t);
4240 gsi_insert_before_without_update (&gsi, g,
4241 GSI_SAME_STMT);
4242 break;
4244 x = lang_hooks.decls.omp_clause_copy_ctor
4245 (c, unshare_expr (ivar), x);
4246 gimplify_and_add (x, &llist[0]);
4247 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4248 if (x)
4250 gimple_seq tseq = NULL;
4252 dtor = x;
4253 gimplify_stmt (&dtor, &tseq);
4254 gimple_seq_add_seq (&llist[1], tseq);
4256 break;
4259 x = lang_hooks.decls.omp_clause_copy_ctor
4260 (c, unshare_expr (new_var), x);
4261 gimplify_and_add (x, ilist);
4262 goto do_dtor;
4264 case OMP_CLAUSE__LOOPTEMP_:
4265 gcc_assert (is_taskreg_ctx (ctx));
4266 x = build_outer_var_ref (var, ctx);
4267 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4268 gimplify_and_add (x, ilist);
4269 break;
4271 case OMP_CLAUSE_COPYIN:
4272 by_ref = use_pointer_for_field (var, NULL);
4273 x = build_receiver_ref (var, by_ref, ctx);
4274 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4275 append_to_statement_list (x, &copyin_seq);
4276 copyin_by_ref |= by_ref;
4277 break;
4279 case OMP_CLAUSE_REDUCTION:
4280 /* OpenACC reductions are initialized using the
4281 GOACC_REDUCTION internal function. */
4282 if (is_gimple_omp_oacc (ctx->stmt))
4283 break;
4284 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4286 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4287 gimple *tseq;
4288 x = build_outer_var_ref (var, ctx);
4290 if (omp_is_reference (var)
4291 && !useless_type_conversion_p (TREE_TYPE (placeholder),
4292 TREE_TYPE (x)))
4293 x = build_fold_addr_expr_loc (clause_loc, x);
4294 SET_DECL_VALUE_EXPR (placeholder, x);
4295 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4296 tree new_vard = new_var;
4297 if (omp_is_reference (var))
4299 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4300 new_vard = TREE_OPERAND (new_var, 0);
4301 gcc_assert (DECL_P (new_vard));
4303 if (is_simd
4304 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4305 ivar, lvar))
4307 if (new_vard == new_var)
4309 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4310 SET_DECL_VALUE_EXPR (new_var, ivar);
4312 else
4314 SET_DECL_VALUE_EXPR (new_vard,
4315 build_fold_addr_expr (ivar));
4316 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4318 x = lang_hooks.decls.omp_clause_default_ctor
4319 (c, unshare_expr (ivar),
4320 build_outer_var_ref (var, ctx));
4321 if (x)
4322 gimplify_and_add (x, &llist[0]);
4323 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4325 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4326 lower_omp (&tseq, ctx);
4327 gimple_seq_add_seq (&llist[0], tseq);
4329 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4330 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4331 lower_omp (&tseq, ctx);
4332 gimple_seq_add_seq (&llist[1], tseq);
4333 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4334 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4335 if (new_vard == new_var)
4336 SET_DECL_VALUE_EXPR (new_var, lvar);
4337 else
4338 SET_DECL_VALUE_EXPR (new_vard,
4339 build_fold_addr_expr (lvar));
4340 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4341 if (x)
4343 tseq = NULL;
4344 dtor = x;
4345 gimplify_stmt (&dtor, &tseq);
4346 gimple_seq_add_seq (&llist[1], tseq);
4348 break;
4350 /* If this is a reference to constant size reduction var
4351 with placeholder, we haven't emitted the initializer
4352 for it because it is undesirable if SIMD arrays are used.
4353 But if they aren't used, we need to emit the deferred
4354 initialization now. */
4355 else if (omp_is_reference (var) && is_simd)
4356 handle_simd_reference (clause_loc, new_vard, ilist);
4357 x = lang_hooks.decls.omp_clause_default_ctor
4358 (c, unshare_expr (new_var),
4359 build_outer_var_ref (var, ctx));
4360 if (x)
4361 gimplify_and_add (x, ilist);
4362 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4364 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4365 lower_omp (&tseq, ctx);
4366 gimple_seq_add_seq (ilist, tseq);
4368 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4369 if (is_simd)
4371 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4372 lower_omp (&tseq, ctx);
4373 gimple_seq_add_seq (dlist, tseq);
4374 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4376 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4377 goto do_dtor;
4379 else
4381 x = omp_reduction_init (c, TREE_TYPE (new_var));
4382 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4383 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4385 /* reduction(-:var) sums up the partial results, so it
4386 acts identically to reduction(+:var). */
4387 if (code == MINUS_EXPR)
4388 code = PLUS_EXPR;
4390 tree new_vard = new_var;
4391 if (is_simd && omp_is_reference (var))
4393 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4394 new_vard = TREE_OPERAND (new_var, 0);
4395 gcc_assert (DECL_P (new_vard));
4397 if (is_simd
4398 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4399 ivar, lvar))
4401 tree ref = build_outer_var_ref (var, ctx);
4403 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4405 if (sctx.is_simt)
4407 if (!simt_lane)
4408 simt_lane = create_tmp_var (unsigned_type_node);
4409 x = build_call_expr_internal_loc
4410 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4411 TREE_TYPE (ivar), 2, ivar, simt_lane);
4412 x = build2 (code, TREE_TYPE (ivar), ivar, x);
4413 gimplify_assign (ivar, x, &llist[2]);
4415 x = build2 (code, TREE_TYPE (ref), ref, ivar);
4416 ref = build_outer_var_ref (var, ctx);
4417 gimplify_assign (ref, x, &llist[1]);
4419 if (new_vard != new_var)
4421 SET_DECL_VALUE_EXPR (new_vard,
4422 build_fold_addr_expr (lvar));
4423 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4426 else
4428 if (omp_is_reference (var) && is_simd)
4429 handle_simd_reference (clause_loc, new_vard, ilist);
4430 gimplify_assign (new_var, x, ilist);
4431 if (is_simd)
4433 tree ref = build_outer_var_ref (var, ctx);
4435 x = build2 (code, TREE_TYPE (ref), ref, new_var);
4436 ref = build_outer_var_ref (var, ctx);
4437 gimplify_assign (ref, x, dlist);
4441 break;
4443 default:
4444 gcc_unreachable ();
4449 if (known_eq (sctx.max_vf, 1U))
4450 sctx.is_simt = false;
4452 if (sctx.lane || sctx.is_simt)
4454 uid = create_tmp_var (ptr_type_node, "simduid");
4455 /* Don't want uninit warnings on simduid, it is always uninitialized,
4456 but we use it not for the value, but for the DECL_UID only. */
4457 TREE_NO_WARNING (uid) = 1;
4458 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4459 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4460 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4461 gimple_omp_for_set_clauses (ctx->stmt, c);
4463 /* Emit calls denoting privatized variables and initializing a pointer to
4464 structure that holds private variables as fields after ompdevlow pass. */
4465 if (sctx.is_simt)
4467 sctx.simt_eargs[0] = uid;
4468 gimple *g
4469 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4470 gimple_call_set_lhs (g, uid);
4471 gimple_seq_add_stmt (ilist, g);
4472 sctx.simt_eargs.release ();
4474 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4475 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4476 gimple_call_set_lhs (g, simtrec);
4477 gimple_seq_add_stmt (ilist, g);
4479 if (sctx.lane)
4481 gimple *g
4482 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4483 gimple_call_set_lhs (g, sctx.lane);
4484 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4485 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4486 g = gimple_build_assign (sctx.lane, INTEGER_CST,
4487 build_int_cst (unsigned_type_node, 0));
4488 gimple_seq_add_stmt (ilist, g);
4489 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4490 if (llist[2])
4492 tree simt_vf = create_tmp_var (unsigned_type_node);
4493 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4494 gimple_call_set_lhs (g, simt_vf);
4495 gimple_seq_add_stmt (dlist, g);
4497 tree t = build_int_cst (unsigned_type_node, 1);
4498 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4499 gimple_seq_add_stmt (dlist, g);
4501 t = build_int_cst (unsigned_type_node, 0);
4502 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4503 gimple_seq_add_stmt (dlist, g);
4505 tree body = create_artificial_label (UNKNOWN_LOCATION);
4506 tree header = create_artificial_label (UNKNOWN_LOCATION);
4507 tree end = create_artificial_label (UNKNOWN_LOCATION);
4508 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4509 gimple_seq_add_stmt (dlist, gimple_build_label (body));
4511 gimple_seq_add_seq (dlist, llist[2]);
4513 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4514 gimple_seq_add_stmt (dlist, g);
4516 gimple_seq_add_stmt (dlist, gimple_build_label (header));
4517 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4518 gimple_seq_add_stmt (dlist, g);
4520 gimple_seq_add_stmt (dlist, gimple_build_label (end));
4522 for (int i = 0; i < 2; i++)
4523 if (llist[i])
4525 tree vf = create_tmp_var (unsigned_type_node);
4526 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4527 gimple_call_set_lhs (g, vf);
4528 gimple_seq *seq = i == 0 ? ilist : dlist;
4529 gimple_seq_add_stmt (seq, g);
4530 tree t = build_int_cst (unsigned_type_node, 0);
4531 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4532 gimple_seq_add_stmt (seq, g);
4533 tree body = create_artificial_label (UNKNOWN_LOCATION);
4534 tree header = create_artificial_label (UNKNOWN_LOCATION);
4535 tree end = create_artificial_label (UNKNOWN_LOCATION);
4536 gimple_seq_add_stmt (seq, gimple_build_goto (header));
4537 gimple_seq_add_stmt (seq, gimple_build_label (body));
4538 gimple_seq_add_seq (seq, llist[i]);
4539 t = build_int_cst (unsigned_type_node, 1);
4540 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
4541 gimple_seq_add_stmt (seq, g);
4542 gimple_seq_add_stmt (seq, gimple_build_label (header));
4543 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
4544 gimple_seq_add_stmt (seq, g);
4545 gimple_seq_add_stmt (seq, gimple_build_label (end));
4548 if (sctx.is_simt)
4550 gimple_seq_add_seq (dlist, sctx.simt_dlist);
4551 gimple *g
4552 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4553 gimple_seq_add_stmt (dlist, g);
4556 /* The copyin sequence is not to be executed by the main thread, since
4557 that would result in self-copies. Perhaps not visible to scalars,
4558 but it certainly is to C++ operator=. */
4559 if (copyin_seq)
4561 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4563 x = build2 (NE_EXPR, boolean_type_node, x,
4564 build_int_cst (TREE_TYPE (x), 0));
4565 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4566 gimplify_and_add (x, ilist);
4569 /* If any copyin variable is passed by reference, we must ensure the
4570 master thread doesn't modify it before it is copied over in all
4571 threads. Similarly for variables in both firstprivate and
4572 lastprivate clauses we need to ensure the lastprivate copying
4573 happens after firstprivate copying in all threads. And similarly
4574 for UDRs if initializer expression refers to omp_orig. */
4575 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4577 /* Don't add any barrier for #pragma omp simd or
4578 #pragma omp distribute. */
4579 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4580 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4581 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4584 /* If max_vf is non-zero, then we can use only a vectorization factor
4585 up to the max_vf we chose. So stick it into the safelen clause. */
4586 if (maybe_ne (sctx.max_vf, 0U))
4588 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4589 OMP_CLAUSE_SAFELEN);
4590 poly_uint64 safe_len;
4591 if (c == NULL_TREE
4592 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4593 && maybe_gt (safe_len, sctx.max_vf)))
4595 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4596 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4597 sctx.max_vf);
4598 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4599 gimple_omp_for_set_clauses (ctx->stmt, c);
4605 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4606 both parallel and workshare constructs. PREDICATE may be NULL if it's
4607 always true. */
4609 static void
4610 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4611 omp_context *ctx)
4613 tree x, c, label = NULL, orig_clauses = clauses;
4614 bool par_clauses = false;
4615 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4617 /* Early exit if there are no lastprivate or linear clauses. */
4618 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4619 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4620 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4621 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4622 break;
4623 if (clauses == NULL)
4625 /* If this was a workshare clause, see if it had been combined
4626 with its parallel. In that case, look for the clauses on the
4627 parallel statement itself. */
4628 if (is_parallel_ctx (ctx))
4629 return;
4631 ctx = ctx->outer;
4632 if (ctx == NULL || !is_parallel_ctx (ctx))
4633 return;
4635 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4636 OMP_CLAUSE_LASTPRIVATE);
4637 if (clauses == NULL)
4638 return;
4639 par_clauses = true;
4642 bool maybe_simt = false;
4643 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4644 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4646 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4647 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4648 if (simduid)
4649 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4652 if (predicate)
4654 gcond *stmt;
4655 tree label_true, arm1, arm2;
4656 enum tree_code pred_code = TREE_CODE (predicate);
4658 label = create_artificial_label (UNKNOWN_LOCATION);
4659 label_true = create_artificial_label (UNKNOWN_LOCATION);
4660 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4662 arm1 = TREE_OPERAND (predicate, 0);
4663 arm2 = TREE_OPERAND (predicate, 1);
4664 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4665 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4667 else
4669 arm1 = predicate;
4670 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4671 arm2 = boolean_false_node;
4672 pred_code = NE_EXPR;
4674 if (maybe_simt)
4676 c = build2 (pred_code, boolean_type_node, arm1, arm2);
4677 c = fold_convert (integer_type_node, c);
4678 simtcond = create_tmp_var (integer_type_node);
4679 gimplify_assign (simtcond, c, stmt_list);
4680 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4681 1, simtcond);
4682 c = create_tmp_var (integer_type_node);
4683 gimple_call_set_lhs (g, c);
4684 gimple_seq_add_stmt (stmt_list, g);
4685 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4686 label_true, label);
4688 else
4689 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4690 gimple_seq_add_stmt (stmt_list, stmt);
4691 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4694 for (c = clauses; c ;)
4696 tree var, new_var;
4697 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4699 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4700 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4701 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4703 var = OMP_CLAUSE_DECL (c);
4704 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4705 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4706 && is_taskloop_ctx (ctx))
4708 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4709 new_var = lookup_decl (var, ctx->outer);
4711 else
4713 new_var = lookup_decl (var, ctx);
4714 /* Avoid uninitialized warnings for lastprivate and
4715 for linear iterators. */
4716 if (predicate
4717 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4718 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4719 TREE_NO_WARNING (new_var) = 1;
4722 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4724 tree val = DECL_VALUE_EXPR (new_var);
4725 if (TREE_CODE (val) == ARRAY_REF
4726 && VAR_P (TREE_OPERAND (val, 0))
4727 && lookup_attribute ("omp simd array",
4728 DECL_ATTRIBUTES (TREE_OPERAND (val,
4729 0))))
4731 if (lastlane == NULL)
4733 lastlane = create_tmp_var (unsigned_type_node);
4734 gcall *g
4735 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4736 2, simduid,
4737 TREE_OPERAND (val, 1));
4738 gimple_call_set_lhs (g, lastlane);
4739 gimple_seq_add_stmt (stmt_list, g);
4741 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4742 TREE_OPERAND (val, 0), lastlane,
4743 NULL_TREE, NULL_TREE);
4746 else if (maybe_simt)
4748 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
4749 ? DECL_VALUE_EXPR (new_var)
4750 : new_var);
4751 if (simtlast == NULL)
4753 simtlast = create_tmp_var (unsigned_type_node);
4754 gcall *g = gimple_build_call_internal
4755 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4756 gimple_call_set_lhs (g, simtlast);
4757 gimple_seq_add_stmt (stmt_list, g);
4759 x = build_call_expr_internal_loc
4760 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4761 TREE_TYPE (val), 2, val, simtlast);
4762 new_var = unshare_expr (new_var);
4763 gimplify_assign (new_var, x, stmt_list);
4764 new_var = unshare_expr (new_var);
4767 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4768 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4770 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4771 gimple_seq_add_seq (stmt_list,
4772 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4773 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4775 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4776 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4778 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4779 gimple_seq_add_seq (stmt_list,
4780 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4781 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4784 x = NULL_TREE;
4785 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4786 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4788 gcc_checking_assert (is_taskloop_ctx (ctx));
4789 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4790 ctx->outer->outer);
4791 if (is_global_var (ovar))
4792 x = ovar;
4794 if (!x)
4795 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4796 if (omp_is_reference (var))
4797 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4798 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4799 gimplify_and_add (x, stmt_list);
4801 c = OMP_CLAUSE_CHAIN (c);
4802 if (c == NULL && !par_clauses)
4804 /* If this was a workshare clause, see if it had been combined
4805 with its parallel. In that case, continue looking for the
4806 clauses also on the parallel statement itself. */
4807 if (is_parallel_ctx (ctx))
4808 break;
4810 ctx = ctx->outer;
4811 if (ctx == NULL || !is_parallel_ctx (ctx))
4812 break;
4814 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4815 OMP_CLAUSE_LASTPRIVATE);
4816 par_clauses = true;
4820 if (label)
4821 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4824 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4825 (which might be a placeholder). INNER is true if this is an inner
4826 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4827 join markers. Generate the before-loop forking sequence in
4828 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4829 general form of these sequences is
4831 GOACC_REDUCTION_SETUP
4832 GOACC_FORK
4833 GOACC_REDUCTION_INIT
4835 GOACC_REDUCTION_FINI
4836 GOACC_JOIN
4837 GOACC_REDUCTION_TEARDOWN. */
4839 static void
4840 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4841 gcall *fork, gcall *join, gimple_seq *fork_seq,
4842 gimple_seq *join_seq, omp_context *ctx)
4844 gimple_seq before_fork = NULL;
4845 gimple_seq after_fork = NULL;
4846 gimple_seq before_join = NULL;
4847 gimple_seq after_join = NULL;
4848 tree init_code = NULL_TREE, fini_code = NULL_TREE,
4849 setup_code = NULL_TREE, teardown_code = NULL_TREE;
4850 unsigned offset = 0;
4852 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4853 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4855 tree orig = OMP_CLAUSE_DECL (c);
4856 tree var = maybe_lookup_decl (orig, ctx);
4857 tree ref_to_res = NULL_TREE;
4858 tree incoming, outgoing, v1, v2, v3;
4859 bool is_private = false;
4861 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4862 if (rcode == MINUS_EXPR)
4863 rcode = PLUS_EXPR;
4864 else if (rcode == TRUTH_ANDIF_EXPR)
4865 rcode = BIT_AND_EXPR;
4866 else if (rcode == TRUTH_ORIF_EXPR)
4867 rcode = BIT_IOR_EXPR;
4868 tree op = build_int_cst (unsigned_type_node, rcode);
4870 if (!var)
4871 var = orig;
4873 incoming = outgoing = var;
4875 if (!inner)
4877 /* See if an outer construct also reduces this variable. */
4878 omp_context *outer = ctx;
4880 while (omp_context *probe = outer->outer)
4882 enum gimple_code type = gimple_code (probe->stmt);
4883 tree cls;
4885 switch (type)
4887 case GIMPLE_OMP_FOR:
4888 cls = gimple_omp_for_clauses (probe->stmt);
4889 break;
4891 case GIMPLE_OMP_TARGET:
4892 if (gimple_omp_target_kind (probe->stmt)
4893 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
4894 goto do_lookup;
4896 cls = gimple_omp_target_clauses (probe->stmt);
4897 break;
4899 default:
4900 goto do_lookup;
4903 outer = probe;
4904 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
4905 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4906 && orig == OMP_CLAUSE_DECL (cls))
4908 incoming = outgoing = lookup_decl (orig, probe);
4909 goto has_outer_reduction;
4911 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4912 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4913 && orig == OMP_CLAUSE_DECL (cls))
4915 is_private = true;
4916 goto do_lookup;
4920 do_lookup:
4921 /* This is the outermost construct with this reduction,
4922 see if there's a mapping for it. */
4923 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
4924 && maybe_lookup_field (orig, outer) && !is_private)
4926 ref_to_res = build_receiver_ref (orig, false, outer);
4927 if (omp_is_reference (orig))
4928 ref_to_res = build_simple_mem_ref (ref_to_res);
4930 tree type = TREE_TYPE (var);
4931 if (POINTER_TYPE_P (type))
4932 type = TREE_TYPE (type);
4934 outgoing = var;
4935 incoming = omp_reduction_init_op (loc, rcode, type);
4937 else
4939 /* Try to look at enclosing contexts for reduction var,
4940 use original if no mapping found. */
4941 tree t = NULL_TREE;
4942 omp_context *c = ctx->outer;
4943 while (c && !t)
4945 t = maybe_lookup_decl (orig, c);
4946 c = c->outer;
4948 incoming = outgoing = (t ? t : orig);
4951 has_outer_reduction:;
4954 if (!ref_to_res)
4955 ref_to_res = integer_zero_node;
4957 if (omp_is_reference (orig))
4959 tree type = TREE_TYPE (var);
4960 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
4962 if (!inner)
4964 tree x = create_tmp_var (TREE_TYPE (type), id);
4965 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
4968 v1 = create_tmp_var (type, id);
4969 v2 = create_tmp_var (type, id);
4970 v3 = create_tmp_var (type, id);
4972 gimplify_assign (v1, var, fork_seq);
4973 gimplify_assign (v2, var, fork_seq);
4974 gimplify_assign (v3, var, fork_seq);
4976 var = build_simple_mem_ref (var);
4977 v1 = build_simple_mem_ref (v1);
4978 v2 = build_simple_mem_ref (v2);
4979 v3 = build_simple_mem_ref (v3);
4980 outgoing = build_simple_mem_ref (outgoing);
4982 if (!TREE_CONSTANT (incoming))
4983 incoming = build_simple_mem_ref (incoming);
4985 else
4986 v1 = v2 = v3 = var;
4988 /* Determine position in reduction buffer, which may be used
4989 by target. The parser has ensured that this is not a
4990 variable-sized type. */
4991 fixed_size_mode mode
4992 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
4993 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
4994 offset = (offset + align - 1) & ~(align - 1);
4995 tree off = build_int_cst (sizetype, offset);
4996 offset += GET_MODE_SIZE (mode);
4998 if (!init_code)
5000 init_code = build_int_cst (integer_type_node,
5001 IFN_GOACC_REDUCTION_INIT);
5002 fini_code = build_int_cst (integer_type_node,
5003 IFN_GOACC_REDUCTION_FINI);
5004 setup_code = build_int_cst (integer_type_node,
5005 IFN_GOACC_REDUCTION_SETUP);
5006 teardown_code = build_int_cst (integer_type_node,
5007 IFN_GOACC_REDUCTION_TEARDOWN);
5010 tree setup_call
5011 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5012 TREE_TYPE (var), 6, setup_code,
5013 unshare_expr (ref_to_res),
5014 incoming, level, op, off);
5015 tree init_call
5016 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5017 TREE_TYPE (var), 6, init_code,
5018 unshare_expr (ref_to_res),
5019 v1, level, op, off);
5020 tree fini_call
5021 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5022 TREE_TYPE (var), 6, fini_code,
5023 unshare_expr (ref_to_res),
5024 v2, level, op, off);
5025 tree teardown_call
5026 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5027 TREE_TYPE (var), 6, teardown_code,
5028 ref_to_res, v3, level, op, off);
5030 gimplify_assign (v1, setup_call, &before_fork);
5031 gimplify_assign (v2, init_call, &after_fork);
5032 gimplify_assign (v3, fini_call, &before_join);
5033 gimplify_assign (outgoing, teardown_call, &after_join);
5036 /* Now stitch things together. */
5037 gimple_seq_add_seq (fork_seq, before_fork);
5038 if (fork)
5039 gimple_seq_add_stmt (fork_seq, fork);
5040 gimple_seq_add_seq (fork_seq, after_fork);
5042 gimple_seq_add_seq (join_seq, before_join);
5043 if (join)
5044 gimple_seq_add_stmt (join_seq, join);
5045 gimple_seq_add_seq (join_seq, after_join);
5048 /* Generate code to implement the REDUCTION clauses. */
5050 static void
5051 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5053 gimple_seq sub_seq = NULL;
5054 gimple *stmt;
5055 tree x, c;
5056 int count = 0;
5058 /* OpenACC loop reductions are handled elsewhere. */
5059 if (is_gimple_omp_oacc (ctx->stmt))
5060 return;
5062 /* SIMD reductions are handled in lower_rec_input_clauses. */
5063 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5064 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5065 return;
5067 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5068 update in that case, otherwise use a lock. */
5069 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5070 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5072 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5073 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5075 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5076 count = -1;
5077 break;
5079 count++;
5082 if (count == 0)
5083 return;
5085 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5087 tree var, ref, new_var, orig_var;
5088 enum tree_code code;
5089 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5091 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5092 continue;
5094 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
5095 orig_var = var = OMP_CLAUSE_DECL (c);
5096 if (TREE_CODE (var) == MEM_REF)
5098 var = TREE_OPERAND (var, 0);
5099 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5100 var = TREE_OPERAND (var, 0);
5101 if (TREE_CODE (var) == ADDR_EXPR)
5102 var = TREE_OPERAND (var, 0);
5103 else
5105 /* If this is a pointer or referenced based array
5106 section, the var could be private in the outer
5107 context e.g. on orphaned loop construct. Pretend this
5108 is private variable's outer reference. */
5109 ccode = OMP_CLAUSE_PRIVATE;
5110 if (TREE_CODE (var) == INDIRECT_REF)
5111 var = TREE_OPERAND (var, 0);
5113 orig_var = var;
5114 if (is_variable_sized (var))
5116 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5117 var = DECL_VALUE_EXPR (var);
5118 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5119 var = TREE_OPERAND (var, 0);
5120 gcc_assert (DECL_P (var));
5123 new_var = lookup_decl (var, ctx);
5124 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5125 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5126 ref = build_outer_var_ref (var, ctx, ccode);
5127 code = OMP_CLAUSE_REDUCTION_CODE (c);
5129 /* reduction(-:var) sums up the partial results, so it acts
5130 identically to reduction(+:var). */
5131 if (code == MINUS_EXPR)
5132 code = PLUS_EXPR;
5134 if (count == 1)
5136 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5138 addr = save_expr (addr);
5139 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5140 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5141 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5142 gimplify_and_add (x, stmt_seqp);
5143 return;
5145 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5147 tree d = OMP_CLAUSE_DECL (c);
5148 tree type = TREE_TYPE (d);
5149 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5150 tree i = create_tmp_var (TREE_TYPE (v), NULL);
5151 tree ptype = build_pointer_type (TREE_TYPE (type));
5152 tree bias = TREE_OPERAND (d, 1);
5153 d = TREE_OPERAND (d, 0);
5154 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5156 tree b = TREE_OPERAND (d, 1);
5157 b = maybe_lookup_decl (b, ctx);
5158 if (b == NULL)
5160 b = TREE_OPERAND (d, 1);
5161 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5163 if (integer_zerop (bias))
5164 bias = b;
5165 else
5167 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5168 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5169 TREE_TYPE (b), b, bias);
5171 d = TREE_OPERAND (d, 0);
5173 /* For ref build_outer_var_ref already performs this, so
5174 only new_var needs a dereference. */
5175 if (TREE_CODE (d) == INDIRECT_REF)
5177 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5178 gcc_assert (omp_is_reference (var) && var == orig_var);
5180 else if (TREE_CODE (d) == ADDR_EXPR)
5182 if (orig_var == var)
5184 new_var = build_fold_addr_expr (new_var);
5185 ref = build_fold_addr_expr (ref);
5188 else
5190 gcc_assert (orig_var == var);
5191 if (omp_is_reference (var))
5192 ref = build_fold_addr_expr (ref);
5194 if (DECL_P (v))
5196 tree t = maybe_lookup_decl (v, ctx);
5197 if (t)
5198 v = t;
5199 else
5200 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5201 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5203 if (!integer_zerop (bias))
5205 bias = fold_convert_loc (clause_loc, sizetype, bias);
5206 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5207 TREE_TYPE (new_var), new_var,
5208 unshare_expr (bias));
5209 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5210 TREE_TYPE (ref), ref, bias);
5212 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5213 ref = fold_convert_loc (clause_loc, ptype, ref);
5214 tree m = create_tmp_var (ptype, NULL);
5215 gimplify_assign (m, new_var, stmt_seqp);
5216 new_var = m;
5217 m = create_tmp_var (ptype, NULL);
5218 gimplify_assign (m, ref, stmt_seqp);
5219 ref = m;
5220 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5221 tree body = create_artificial_label (UNKNOWN_LOCATION);
5222 tree end = create_artificial_label (UNKNOWN_LOCATION);
5223 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5224 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5225 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5226 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5228 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5229 tree decl_placeholder
5230 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5231 SET_DECL_VALUE_EXPR (placeholder, out);
5232 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5233 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5234 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5235 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5236 gimple_seq_add_seq (&sub_seq,
5237 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5238 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5239 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5240 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5242 else
5244 x = build2 (code, TREE_TYPE (out), out, priv);
5245 out = unshare_expr (out);
5246 gimplify_assign (out, x, &sub_seq);
5248 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5249 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5250 gimple_seq_add_stmt (&sub_seq, g);
5251 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5252 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5253 gimple_seq_add_stmt (&sub_seq, g);
5254 g = gimple_build_assign (i, PLUS_EXPR, i,
5255 build_int_cst (TREE_TYPE (i), 1));
5256 gimple_seq_add_stmt (&sub_seq, g);
5257 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5258 gimple_seq_add_stmt (&sub_seq, g);
5259 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5261 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5263 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5265 if (omp_is_reference (var)
5266 && !useless_type_conversion_p (TREE_TYPE (placeholder),
5267 TREE_TYPE (ref)))
5268 ref = build_fold_addr_expr_loc (clause_loc, ref);
5269 SET_DECL_VALUE_EXPR (placeholder, ref);
5270 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5271 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5272 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5273 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5274 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5276 else
5278 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5279 ref = build_outer_var_ref (var, ctx);
5280 gimplify_assign (ref, x, &sub_seq);
5284 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5286 gimple_seq_add_stmt (stmt_seqp, stmt);
5288 gimple_seq_add_seq (stmt_seqp, sub_seq);
5290 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5292 gimple_seq_add_stmt (stmt_seqp, stmt);
5296 /* Generate code to implement the COPYPRIVATE clauses. */
5298 static void
5299 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5300 omp_context *ctx)
5302 tree c;
5304 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5306 tree var, new_var, ref, x;
5307 bool by_ref;
5308 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5310 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5311 continue;
5313 var = OMP_CLAUSE_DECL (c);
5314 by_ref = use_pointer_for_field (var, NULL);
5316 ref = build_sender_ref (var, ctx);
5317 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5318 if (by_ref)
5320 x = build_fold_addr_expr_loc (clause_loc, new_var);
5321 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5323 gimplify_assign (ref, x, slist);
5325 ref = build_receiver_ref (var, false, ctx);
5326 if (by_ref)
5328 ref = fold_convert_loc (clause_loc,
5329 build_pointer_type (TREE_TYPE (new_var)),
5330 ref);
5331 ref = build_fold_indirect_ref_loc (clause_loc, ref);
5333 if (omp_is_reference (var))
5335 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5336 ref = build_simple_mem_ref_loc (clause_loc, ref);
5337 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5339 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5340 gimplify_and_add (x, rlist);
5345 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5346 and REDUCTION from the sender (aka parent) side. */
5348 static void
5349 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5350 omp_context *ctx)
5352 tree c, t;
5353 int ignored_looptemp = 0;
5354 bool is_taskloop = false;
5356 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5357 by GOMP_taskloop. */
5358 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5360 ignored_looptemp = 2;
5361 is_taskloop = true;
5364 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5366 tree val, ref, x, var;
5367 bool by_ref, do_in = false, do_out = false;
5368 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5370 switch (OMP_CLAUSE_CODE (c))
5372 case OMP_CLAUSE_PRIVATE:
5373 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5374 break;
5375 continue;
5376 case OMP_CLAUSE_FIRSTPRIVATE:
5377 case OMP_CLAUSE_COPYIN:
5378 case OMP_CLAUSE_LASTPRIVATE:
5379 case OMP_CLAUSE_REDUCTION:
5380 break;
5381 case OMP_CLAUSE_SHARED:
5382 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5383 break;
5384 continue;
5385 case OMP_CLAUSE__LOOPTEMP_:
5386 if (ignored_looptemp)
5388 ignored_looptemp--;
5389 continue;
5391 break;
5392 default:
5393 continue;
5396 val = OMP_CLAUSE_DECL (c);
5397 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5398 && TREE_CODE (val) == MEM_REF)
5400 val = TREE_OPERAND (val, 0);
5401 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5402 val = TREE_OPERAND (val, 0);
5403 if (TREE_CODE (val) == INDIRECT_REF
5404 || TREE_CODE (val) == ADDR_EXPR)
5405 val = TREE_OPERAND (val, 0);
5406 if (is_variable_sized (val))
5407 continue;
5410 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5411 outer taskloop region. */
5412 omp_context *ctx_for_o = ctx;
5413 if (is_taskloop
5414 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5415 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5416 ctx_for_o = ctx->outer;
5418 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5420 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5421 && is_global_var (var))
5422 continue;
5424 t = omp_member_access_dummy_var (var);
5425 if (t)
5427 var = DECL_VALUE_EXPR (var);
5428 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5429 if (o != t)
5430 var = unshare_and_remap (var, t, o);
5431 else
5432 var = unshare_expr (var);
5435 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5437 /* Handle taskloop firstprivate/lastprivate, where the
5438 lastprivate on GIMPLE_OMP_TASK is represented as
5439 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5440 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5441 x = omp_build_component_ref (ctx->sender_decl, f);
5442 if (use_pointer_for_field (val, ctx))
5443 var = build_fold_addr_expr (var);
5444 gimplify_assign (x, var, ilist);
5445 DECL_ABSTRACT_ORIGIN (f) = NULL;
5446 continue;
5449 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5450 || val == OMP_CLAUSE_DECL (c))
5451 && is_variable_sized (val))
5452 continue;
5453 by_ref = use_pointer_for_field (val, NULL);
5455 switch (OMP_CLAUSE_CODE (c))
5457 case OMP_CLAUSE_FIRSTPRIVATE:
5458 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5459 && !by_ref
5460 && is_task_ctx (ctx))
5461 TREE_NO_WARNING (var) = 1;
5462 do_in = true;
5463 break;
5465 case OMP_CLAUSE_PRIVATE:
5466 case OMP_CLAUSE_COPYIN:
5467 case OMP_CLAUSE__LOOPTEMP_:
5468 do_in = true;
5469 break;
5471 case OMP_CLAUSE_LASTPRIVATE:
5472 if (by_ref || omp_is_reference (val))
5474 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5475 continue;
5476 do_in = true;
5478 else
5480 do_out = true;
5481 if (lang_hooks.decls.omp_private_outer_ref (val))
5482 do_in = true;
5484 break;
5486 case OMP_CLAUSE_REDUCTION:
5487 do_in = true;
5488 if (val == OMP_CLAUSE_DECL (c))
5489 do_out = !(by_ref || omp_is_reference (val));
5490 else
5491 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5492 break;
5494 default:
5495 gcc_unreachable ();
5498 if (do_in)
5500 ref = build_sender_ref (val, ctx);
5501 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5502 gimplify_assign (ref, x, ilist);
5503 if (is_task_ctx (ctx))
5504 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5507 if (do_out)
5509 ref = build_sender_ref (val, ctx);
5510 gimplify_assign (var, ref, olist);
5515 /* Generate code to implement SHARED from the sender (aka parent)
5516 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5517 list things that got automatically shared. */
5519 static void
5520 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5522 tree var, ovar, nvar, t, f, x, record_type;
5524 if (ctx->record_type == NULL)
5525 return;
5527 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5528 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5530 ovar = DECL_ABSTRACT_ORIGIN (f);
5531 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5532 continue;
5534 nvar = maybe_lookup_decl (ovar, ctx);
5535 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5536 continue;
5538 /* If CTX is a nested parallel directive. Find the immediately
5539 enclosing parallel or workshare construct that contains a
5540 mapping for OVAR. */
5541 var = lookup_decl_in_outer_ctx (ovar, ctx);
5543 t = omp_member_access_dummy_var (var);
5544 if (t)
5546 var = DECL_VALUE_EXPR (var);
5547 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5548 if (o != t)
5549 var = unshare_and_remap (var, t, o);
5550 else
5551 var = unshare_expr (var);
5554 if (use_pointer_for_field (ovar, ctx))
5556 x = build_sender_ref (ovar, ctx);
5557 var = build_fold_addr_expr (var);
5558 gimplify_assign (x, var, ilist);
5560 else
5562 x = build_sender_ref (ovar, ctx);
5563 gimplify_assign (x, var, ilist);
5565 if (!TREE_READONLY (var)
5566 /* We don't need to receive a new reference to a result
5567 or parm decl. In fact we may not store to it as we will
5568 invalidate any pending RSO and generate wrong gimple
5569 during inlining. */
5570 && !((TREE_CODE (var) == RESULT_DECL
5571 || TREE_CODE (var) == PARM_DECL)
5572 && DECL_BY_REFERENCE (var)))
5574 x = build_sender_ref (ovar, ctx);
5575 gimplify_assign (var, x, olist);
5581 /* Emit an OpenACC head marker call, encapulating the partitioning and
5582 other information that must be processed by the target compiler.
5583 Return the maximum number of dimensions the associated loop might
5584 be partitioned over. */
5586 static unsigned
5587 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5588 gimple_seq *seq, omp_context *ctx)
5590 unsigned levels = 0;
5591 unsigned tag = 0;
5592 tree gang_static = NULL_TREE;
5593 auto_vec<tree, 5> args;
5595 args.quick_push (build_int_cst
5596 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5597 args.quick_push (ddvar);
5598 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5600 switch (OMP_CLAUSE_CODE (c))
5602 case OMP_CLAUSE_GANG:
5603 tag |= OLF_DIM_GANG;
5604 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5605 /* static:* is represented by -1, and we can ignore it, as
5606 scheduling is always static. */
5607 if (gang_static && integer_minus_onep (gang_static))
5608 gang_static = NULL_TREE;
5609 levels++;
5610 break;
5612 case OMP_CLAUSE_WORKER:
5613 tag |= OLF_DIM_WORKER;
5614 levels++;
5615 break;
5617 case OMP_CLAUSE_VECTOR:
5618 tag |= OLF_DIM_VECTOR;
5619 levels++;
5620 break;
5622 case OMP_CLAUSE_SEQ:
5623 tag |= OLF_SEQ;
5624 break;
5626 case OMP_CLAUSE_AUTO:
5627 tag |= OLF_AUTO;
5628 break;
5630 case OMP_CLAUSE_INDEPENDENT:
5631 tag |= OLF_INDEPENDENT;
5632 break;
5634 case OMP_CLAUSE_TILE:
5635 tag |= OLF_TILE;
5636 break;
5638 default:
5639 continue;
5643 if (gang_static)
5645 if (DECL_P (gang_static))
5646 gang_static = build_outer_var_ref (gang_static, ctx);
5647 tag |= OLF_GANG_STATIC;
5650 /* In a parallel region, loops are implicitly INDEPENDENT. */
5651 omp_context *tgt = enclosing_target_ctx (ctx);
5652 if (!tgt || is_oacc_parallel (tgt))
5653 tag |= OLF_INDEPENDENT;
5655 if (tag & OLF_TILE)
5656 /* Tiling could use all 3 levels. */
5657 levels = 3;
5658 else
5660 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5661 Ensure at least one level, or 2 for possible auto
5662 partitioning */
5663 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5664 << OLF_DIM_BASE) | OLF_SEQ));
5666 if (levels < 1u + maybe_auto)
5667 levels = 1u + maybe_auto;
5670 args.quick_push (build_int_cst (integer_type_node, levels));
5671 args.quick_push (build_int_cst (integer_type_node, tag));
5672 if (gang_static)
5673 args.quick_push (gang_static);
5675 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5676 gimple_set_location (call, loc);
5677 gimple_set_lhs (call, ddvar);
5678 gimple_seq_add_stmt (seq, call);
5680 return levels;
5683 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5684 partitioning level of the enclosed region. */
5686 static void
5687 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5688 tree tofollow, gimple_seq *seq)
5690 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5691 : IFN_UNIQUE_OACC_TAIL_MARK);
5692 tree marker = build_int_cst (integer_type_node, marker_kind);
5693 int nargs = 2 + (tofollow != NULL_TREE);
5694 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5695 marker, ddvar, tofollow);
5696 gimple_set_location (call, loc);
5697 gimple_set_lhs (call, ddvar);
5698 gimple_seq_add_stmt (seq, call);
5701 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5702 the loop clauses, from which we extract reductions. Initialize
5703 HEAD and TAIL. */
5705 static void
5706 lower_oacc_head_tail (location_t loc, tree clauses,
5707 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5709 bool inner = false;
5710 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5711 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5713 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5714 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5715 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5717 gcc_assert (count);
5718 for (unsigned done = 1; count; count--, done++)
5720 gimple_seq fork_seq = NULL;
5721 gimple_seq join_seq = NULL;
5723 tree place = build_int_cst (integer_type_node, -1);
5724 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5725 fork_kind, ddvar, place);
5726 gimple_set_location (fork, loc);
5727 gimple_set_lhs (fork, ddvar);
5729 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5730 join_kind, ddvar, place);
5731 gimple_set_location (join, loc);
5732 gimple_set_lhs (join, ddvar);
5734 /* Mark the beginning of this level sequence. */
5735 if (inner)
5736 lower_oacc_loop_marker (loc, ddvar, true,
5737 build_int_cst (integer_type_node, count),
5738 &fork_seq);
5739 lower_oacc_loop_marker (loc, ddvar, false,
5740 build_int_cst (integer_type_node, done),
5741 &join_seq);
5743 lower_oacc_reductions (loc, clauses, place, inner,
5744 fork, join, &fork_seq, &join_seq, ctx);
5746 /* Append this level to head. */
5747 gimple_seq_add_seq (head, fork_seq);
5748 /* Prepend it to tail. */
5749 gimple_seq_add_seq (&join_seq, *tail);
5750 *tail = join_seq;
5752 inner = true;
5755 /* Mark the end of the sequence. */
5756 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5757 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5760 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5761 catch handler and return it. This prevents programs from violating the
5762 structured block semantics with throws. */
5764 static gimple_seq
5765 maybe_catch_exception (gimple_seq body)
5767 gimple *g;
5768 tree decl;
5770 if (!flag_exceptions)
5771 return body;
5773 if (lang_hooks.eh_protect_cleanup_actions != NULL)
5774 decl = lang_hooks.eh_protect_cleanup_actions ();
5775 else
5776 decl = builtin_decl_explicit (BUILT_IN_TRAP);
5778 g = gimple_build_eh_must_not_throw (decl);
5779 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5780 GIMPLE_TRY_CATCH);
5782 return gimple_seq_alloc_with_stmt (g);
5786 /* Routines to lower OMP directives into OMP-GIMPLE. */
5788 /* If ctx is a worksharing context inside of a cancellable parallel
5789 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5790 and conditional branch to parallel's cancel_label to handle
5791 cancellation in the implicit barrier. */
5793 static void
5794 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5796 gimple *omp_return = gimple_seq_last_stmt (*body);
5797 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5798 if (gimple_omp_return_nowait_p (omp_return))
5799 return;
5800 if (ctx->outer
5801 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5802 && ctx->outer->cancellable)
5804 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5805 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5806 tree lhs = create_tmp_var (c_bool_type);
5807 gimple_omp_return_set_lhs (omp_return, lhs);
5808 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5809 gimple *g = gimple_build_cond (NE_EXPR, lhs,
5810 fold_convert (c_bool_type,
5811 boolean_false_node),
5812 ctx->outer->cancel_label, fallthru_label);
5813 gimple_seq_add_stmt (body, g);
5814 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5818 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5819 CTX is the enclosing OMP context for the current statement. */
5821 static void
5822 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5824 tree block, control;
5825 gimple_stmt_iterator tgsi;
5826 gomp_sections *stmt;
5827 gimple *t;
5828 gbind *new_stmt, *bind;
5829 gimple_seq ilist, dlist, olist, new_body;
5831 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5833 push_gimplify_context ();
5835 dlist = NULL;
5836 ilist = NULL;
5837 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5838 &ilist, &dlist, ctx, NULL);
5840 new_body = gimple_omp_body (stmt);
5841 gimple_omp_set_body (stmt, NULL);
5842 tgsi = gsi_start (new_body);
5843 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5845 omp_context *sctx;
5846 gimple *sec_start;
5848 sec_start = gsi_stmt (tgsi);
5849 sctx = maybe_lookup_ctx (sec_start);
5850 gcc_assert (sctx);
5852 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5853 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5854 GSI_CONTINUE_LINKING);
5855 gimple_omp_set_body (sec_start, NULL);
5857 if (gsi_one_before_end_p (tgsi))
5859 gimple_seq l = NULL;
5860 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5861 &l, ctx);
5862 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5863 gimple_omp_section_set_last (sec_start);
5866 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5867 GSI_CONTINUE_LINKING);
5870 block = make_node (BLOCK);
5871 bind = gimple_build_bind (NULL, new_body, block);
5873 olist = NULL;
5874 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
5876 block = make_node (BLOCK);
5877 new_stmt = gimple_build_bind (NULL, NULL, block);
5878 gsi_replace (gsi_p, new_stmt, true);
5880 pop_gimplify_context (new_stmt);
5881 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5882 BLOCK_VARS (block) = gimple_bind_vars (bind);
5883 if (BLOCK_VARS (block))
5884 TREE_USED (block) = 1;
5886 new_body = NULL;
5887 gimple_seq_add_seq (&new_body, ilist);
5888 gimple_seq_add_stmt (&new_body, stmt);
5889 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5890 gimple_seq_add_stmt (&new_body, bind);
5892 control = create_tmp_var (unsigned_type_node, ".section");
5893 t = gimple_build_omp_continue (control, control);
5894 gimple_omp_sections_set_control (stmt, control);
5895 gimple_seq_add_stmt (&new_body, t);
5897 gimple_seq_add_seq (&new_body, olist);
5898 if (ctx->cancellable)
5899 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5900 gimple_seq_add_seq (&new_body, dlist);
5902 new_body = maybe_catch_exception (new_body);
5904 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5905 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5906 t = gimple_build_omp_return (nowait);
5907 gimple_seq_add_stmt (&new_body, t);
5908 maybe_add_implicit_barrier_cancel (ctx, &new_body);
5910 gimple_bind_set_body (new_stmt, new_body);
5914 /* A subroutine of lower_omp_single. Expand the simple form of
5915 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5917 if (GOMP_single_start ())
5918 BODY;
5919 [ GOMP_barrier (); ] -> unless 'nowait' is present.
5921 FIXME. It may be better to delay expanding the logic of this until
5922 pass_expand_omp. The expanded logic may make the job more difficult
5923 to a synchronization analysis pass. */
5925 static void
5926 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
5928 location_t loc = gimple_location (single_stmt);
5929 tree tlabel = create_artificial_label (loc);
5930 tree flabel = create_artificial_label (loc);
5931 gimple *call, *cond;
5932 tree lhs, decl;
5934 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
5935 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
5936 call = gimple_build_call (decl, 0);
5937 gimple_call_set_lhs (call, lhs);
5938 gimple_seq_add_stmt (pre_p, call);
5940 cond = gimple_build_cond (EQ_EXPR, lhs,
5941 fold_convert_loc (loc, TREE_TYPE (lhs),
5942 boolean_true_node),
5943 tlabel, flabel);
5944 gimple_seq_add_stmt (pre_p, cond);
5945 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
5946 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
5947 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
5951 /* A subroutine of lower_omp_single. Expand the simple form of
5952 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
5954 #pragma omp single copyprivate (a, b, c)
5956 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
5959 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
5961 BODY;
5962 copyout.a = a;
5963 copyout.b = b;
5964 copyout.c = c;
5965 GOMP_single_copy_end (&copyout);
5967 else
5969 a = copyout_p->a;
5970 b = copyout_p->b;
5971 c = copyout_p->c;
5973 GOMP_barrier ();
5976 FIXME. It may be better to delay expanding the logic of this until
5977 pass_expand_omp. The expanded logic may make the job more difficult
5978 to a synchronization analysis pass. */
5980 static void
5981 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
5982 omp_context *ctx)
5984 tree ptr_type, t, l0, l1, l2, bfn_decl;
5985 gimple_seq copyin_seq;
5986 location_t loc = gimple_location (single_stmt);
5988 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
5990 ptr_type = build_pointer_type (ctx->record_type);
5991 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
5993 l0 = create_artificial_label (loc);
5994 l1 = create_artificial_label (loc);
5995 l2 = create_artificial_label (loc);
5997 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
5998 t = build_call_expr_loc (loc, bfn_decl, 0);
5999 t = fold_convert_loc (loc, ptr_type, t);
6000 gimplify_assign (ctx->receiver_decl, t, pre_p);
6002 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6003 build_int_cst (ptr_type, 0));
6004 t = build3 (COND_EXPR, void_type_node, t,
6005 build_and_jump (&l0), build_and_jump (&l1));
6006 gimplify_and_add (t, pre_p);
6008 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6010 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6012 copyin_seq = NULL;
6013 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6014 &copyin_seq, ctx);
6016 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6017 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6018 t = build_call_expr_loc (loc, bfn_decl, 1, t);
6019 gimplify_and_add (t, pre_p);
6021 t = build_and_jump (&l2);
6022 gimplify_and_add (t, pre_p);
6024 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6026 gimple_seq_add_seq (pre_p, copyin_seq);
6028 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6032 /* Expand code for an OpenMP single directive. */
6034 static void
6035 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6037 tree block;
6038 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6039 gbind *bind;
6040 gimple_seq bind_body, bind_body_tail = NULL, dlist;
6042 push_gimplify_context ();
6044 block = make_node (BLOCK);
6045 bind = gimple_build_bind (NULL, NULL, block);
6046 gsi_replace (gsi_p, bind, true);
6047 bind_body = NULL;
6048 dlist = NULL;
6049 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6050 &bind_body, &dlist, ctx, NULL);
6051 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6053 gimple_seq_add_stmt (&bind_body, single_stmt);
6055 if (ctx->record_type)
6056 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6057 else
6058 lower_omp_single_simple (single_stmt, &bind_body);
6060 gimple_omp_set_body (single_stmt, NULL);
6062 gimple_seq_add_seq (&bind_body, dlist);
6064 bind_body = maybe_catch_exception (bind_body);
6066 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6067 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6068 gimple *g = gimple_build_omp_return (nowait);
6069 gimple_seq_add_stmt (&bind_body_tail, g);
6070 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6071 if (ctx->record_type)
6073 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6074 tree clobber = build_constructor (ctx->record_type, NULL);
6075 TREE_THIS_VOLATILE (clobber) = 1;
6076 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6077 clobber), GSI_SAME_STMT);
6079 gimple_seq_add_seq (&bind_body, bind_body_tail);
6080 gimple_bind_set_body (bind, bind_body);
6082 pop_gimplify_context (bind);
6084 gimple_bind_append_vars (bind, ctx->block_vars);
6085 BLOCK_VARS (block) = ctx->block_vars;
6086 if (BLOCK_VARS (block))
6087 TREE_USED (block) = 1;
6091 /* Expand code for an OpenMP master directive. */
6093 static void
6094 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6096 tree block, lab = NULL, x, bfn_decl;
6097 gimple *stmt = gsi_stmt (*gsi_p);
6098 gbind *bind;
6099 location_t loc = gimple_location (stmt);
6100 gimple_seq tseq;
6102 push_gimplify_context ();
6104 block = make_node (BLOCK);
6105 bind = gimple_build_bind (NULL, NULL, block);
6106 gsi_replace (gsi_p, bind, true);
6107 gimple_bind_add_stmt (bind, stmt);
6109 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6110 x = build_call_expr_loc (loc, bfn_decl, 0);
6111 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6112 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6113 tseq = NULL;
6114 gimplify_and_add (x, &tseq);
6115 gimple_bind_add_seq (bind, tseq);
6117 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6118 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6119 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6120 gimple_omp_set_body (stmt, NULL);
6122 gimple_bind_add_stmt (bind, gimple_build_label (lab));
6124 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6126 pop_gimplify_context (bind);
6128 gimple_bind_append_vars (bind, ctx->block_vars);
6129 BLOCK_VARS (block) = ctx->block_vars;
6133 /* Expand code for an OpenMP taskgroup directive. */
6135 static void
6136 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6138 gimple *stmt = gsi_stmt (*gsi_p);
6139 gcall *x;
6140 gbind *bind;
6141 tree block = make_node (BLOCK);
6143 bind = gimple_build_bind (NULL, NULL, block);
6144 gsi_replace (gsi_p, bind, true);
6145 gimple_bind_add_stmt (bind, stmt);
6147 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6149 gimple_bind_add_stmt (bind, x);
6151 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6152 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6153 gimple_omp_set_body (stmt, NULL);
6155 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6157 gimple_bind_append_vars (bind, ctx->block_vars);
6158 BLOCK_VARS (block) = ctx->block_vars;
6162 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6164 static void
6165 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6166 omp_context *ctx)
6168 struct omp_for_data fd;
6169 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6170 return;
6172 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6173 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6174 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6175 if (!fd.ordered)
6176 return;
6178 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6179 tree c = gimple_omp_ordered_clauses (ord_stmt);
6180 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6181 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6183 /* Merge depend clauses from multiple adjacent
6184 #pragma omp ordered depend(sink:...) constructs
6185 into one #pragma omp ordered depend(sink:...), so that
6186 we can optimize them together. */
6187 gimple_stmt_iterator gsi = *gsi_p;
6188 gsi_next (&gsi);
6189 while (!gsi_end_p (gsi))
6191 gimple *stmt = gsi_stmt (gsi);
6192 if (is_gimple_debug (stmt)
6193 || gimple_code (stmt) == GIMPLE_NOP)
6195 gsi_next (&gsi);
6196 continue;
6198 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6199 break;
6200 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6201 c = gimple_omp_ordered_clauses (ord_stmt2);
6202 if (c == NULL_TREE
6203 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6204 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6205 break;
6206 while (*list_p)
6207 list_p = &OMP_CLAUSE_CHAIN (*list_p);
6208 *list_p = c;
6209 gsi_remove (&gsi, true);
6213 /* Canonicalize sink dependence clauses into one folded clause if
6214 possible.
6216 The basic algorithm is to create a sink vector whose first
6217 element is the GCD of all the first elements, and whose remaining
6218 elements are the minimum of the subsequent columns.
6220 We ignore dependence vectors whose first element is zero because
6221 such dependencies are known to be executed by the same thread.
6223 We take into account the direction of the loop, so a minimum
6224 becomes a maximum if the loop is iterating forwards. We also
6225 ignore sink clauses where the loop direction is unknown, or where
6226 the offsets are clearly invalid because they are not a multiple
6227 of the loop increment.
6229 For example:
6231 #pragma omp for ordered(2)
6232 for (i=0; i < N; ++i)
6233 for (j=0; j < M; ++j)
6235 #pragma omp ordered \
6236 depend(sink:i-8,j-2) \
6237 depend(sink:i,j-1) \ // Completely ignored because i+0.
6238 depend(sink:i-4,j-3) \
6239 depend(sink:i-6,j-4)
6240 #pragma omp ordered depend(source)
6243 Folded clause is:
6245 depend(sink:-gcd(8,4,6),-min(2,3,4))
6246 -or-
6247 depend(sink:-2,-2)
6250 /* FIXME: Computing GCD's where the first element is zero is
6251 non-trivial in the presence of collapsed loops. Do this later. */
6252 if (fd.collapse > 1)
6253 return;
6255 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6257 /* wide_int is not a POD so it must be default-constructed. */
6258 for (unsigned i = 0; i != 2 * len - 1; ++i)
6259 new (static_cast<void*>(folded_deps + i)) wide_int ();
6261 tree folded_dep = NULL_TREE;
6262 /* TRUE if the first dimension's offset is negative. */
6263 bool neg_offset_p = false;
6265 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6266 unsigned int i;
6267 while ((c = *list_p) != NULL)
6269 bool remove = false;
6271 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6272 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6273 goto next_ordered_clause;
6275 tree vec;
6276 for (vec = OMP_CLAUSE_DECL (c), i = 0;
6277 vec && TREE_CODE (vec) == TREE_LIST;
6278 vec = TREE_CHAIN (vec), ++i)
6280 gcc_assert (i < len);
6282 /* omp_extract_for_data has canonicalized the condition. */
6283 gcc_assert (fd.loops[i].cond_code == LT_EXPR
6284 || fd.loops[i].cond_code == GT_EXPR);
6285 bool forward = fd.loops[i].cond_code == LT_EXPR;
6286 bool maybe_lexically_later = true;
6288 /* While the committee makes up its mind, bail if we have any
6289 non-constant steps. */
6290 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6291 goto lower_omp_ordered_ret;
6293 tree itype = TREE_TYPE (TREE_VALUE (vec));
6294 if (POINTER_TYPE_P (itype))
6295 itype = sizetype;
6296 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
6297 TYPE_PRECISION (itype),
6298 TYPE_SIGN (itype));
6300 /* Ignore invalid offsets that are not multiples of the step. */
6301 if (!wi::multiple_of_p (wi::abs (offset),
6302 wi::abs (wi::to_wide (fd.loops[i].step)),
6303 UNSIGNED))
6305 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6306 "ignoring sink clause with offset that is not "
6307 "a multiple of the loop step");
6308 remove = true;
6309 goto next_ordered_clause;
6312 /* Calculate the first dimension. The first dimension of
6313 the folded dependency vector is the GCD of the first
6314 elements, while ignoring any first elements whose offset
6315 is 0. */
6316 if (i == 0)
6318 /* Ignore dependence vectors whose first dimension is 0. */
6319 if (offset == 0)
6321 remove = true;
6322 goto next_ordered_clause;
6324 else
6326 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6328 error_at (OMP_CLAUSE_LOCATION (c),
6329 "first offset must be in opposite direction "
6330 "of loop iterations");
6331 goto lower_omp_ordered_ret;
6333 if (forward)
6334 offset = -offset;
6335 neg_offset_p = forward;
6336 /* Initialize the first time around. */
6337 if (folded_dep == NULL_TREE)
6339 folded_dep = c;
6340 folded_deps[0] = offset;
6342 else
6343 folded_deps[0] = wi::gcd (folded_deps[0],
6344 offset, UNSIGNED);
6347 /* Calculate minimum for the remaining dimensions. */
6348 else
6350 folded_deps[len + i - 1] = offset;
6351 if (folded_dep == c)
6352 folded_deps[i] = offset;
6353 else if (maybe_lexically_later
6354 && !wi::eq_p (folded_deps[i], offset))
6356 if (forward ^ wi::gts_p (folded_deps[i], offset))
6358 unsigned int j;
6359 folded_dep = c;
6360 for (j = 1; j <= i; j++)
6361 folded_deps[j] = folded_deps[len + j - 1];
6363 else
6364 maybe_lexically_later = false;
6368 gcc_assert (i == len);
6370 remove = true;
6372 next_ordered_clause:
6373 if (remove)
6374 *list_p = OMP_CLAUSE_CHAIN (c);
6375 else
6376 list_p = &OMP_CLAUSE_CHAIN (c);
6379 if (folded_dep)
6381 if (neg_offset_p)
6382 folded_deps[0] = -folded_deps[0];
6384 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6385 if (POINTER_TYPE_P (itype))
6386 itype = sizetype;
6388 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6389 = wide_int_to_tree (itype, folded_deps[0]);
6390 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6391 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6394 lower_omp_ordered_ret:
6396 /* Ordered without clauses is #pragma omp threads, while we want
6397 a nop instead if we remove all clauses. */
6398 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6399 gsi_replace (gsi_p, gimple_build_nop (), true);
6403 /* Expand code for an OpenMP ordered directive. */
6405 static void
6406 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6408 tree block;
6409 gimple *stmt = gsi_stmt (*gsi_p), *g;
6410 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6411 gcall *x;
6412 gbind *bind;
6413 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6414 OMP_CLAUSE_SIMD);
6415 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6416 loop. */
6417 bool maybe_simt
6418 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6419 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6420 OMP_CLAUSE_THREADS);
6422 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6423 OMP_CLAUSE_DEPEND))
6425 /* FIXME: This is needs to be moved to the expansion to verify various
6426 conditions only testable on cfg with dominators computed, and also
6427 all the depend clauses to be merged still might need to be available
6428 for the runtime checks. */
6429 if (0)
6430 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6431 return;
6434 push_gimplify_context ();
6436 block = make_node (BLOCK);
6437 bind = gimple_build_bind (NULL, NULL, block);
6438 gsi_replace (gsi_p, bind, true);
6439 gimple_bind_add_stmt (bind, stmt);
6441 if (simd)
6443 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6444 build_int_cst (NULL_TREE, threads));
6445 cfun->has_simduid_loops = true;
6447 else
6448 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6450 gimple_bind_add_stmt (bind, x);
6452 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6453 if (maybe_simt)
6455 counter = create_tmp_var (integer_type_node);
6456 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6457 gimple_call_set_lhs (g, counter);
6458 gimple_bind_add_stmt (bind, g);
6460 body = create_artificial_label (UNKNOWN_LOCATION);
6461 test = create_artificial_label (UNKNOWN_LOCATION);
6462 gimple_bind_add_stmt (bind, gimple_build_label (body));
6464 tree simt_pred = create_tmp_var (integer_type_node);
6465 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6466 gimple_call_set_lhs (g, simt_pred);
6467 gimple_bind_add_stmt (bind, g);
6469 tree t = create_artificial_label (UNKNOWN_LOCATION);
6470 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6471 gimple_bind_add_stmt (bind, g);
6473 gimple_bind_add_stmt (bind, gimple_build_label (t));
6475 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6476 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6477 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6478 gimple_omp_set_body (stmt, NULL);
6480 if (maybe_simt)
6482 gimple_bind_add_stmt (bind, gimple_build_label (test));
6483 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6484 gimple_bind_add_stmt (bind, g);
6486 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6487 tree nonneg = create_tmp_var (integer_type_node);
6488 gimple_seq tseq = NULL;
6489 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6490 gimple_bind_add_seq (bind, tseq);
6492 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6493 gimple_call_set_lhs (g, nonneg);
6494 gimple_bind_add_stmt (bind, g);
6496 tree end = create_artificial_label (UNKNOWN_LOCATION);
6497 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6498 gimple_bind_add_stmt (bind, g);
6500 gimple_bind_add_stmt (bind, gimple_build_label (end));
6502 if (simd)
6503 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6504 build_int_cst (NULL_TREE, threads));
6505 else
6506 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6508 gimple_bind_add_stmt (bind, x);
6510 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6512 pop_gimplify_context (bind);
6514 gimple_bind_append_vars (bind, ctx->block_vars);
6515 BLOCK_VARS (block) = gimple_bind_vars (bind);
6519 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6520 substitution of a couple of function calls. But in the NAMED case,
6521 requires that languages coordinate a symbol name. It is therefore
6522 best put here in common code. */
6524 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6526 static void
6527 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6529 tree block;
6530 tree name, lock, unlock;
6531 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6532 gbind *bind;
6533 location_t loc = gimple_location (stmt);
6534 gimple_seq tbody;
6536 name = gimple_omp_critical_name (stmt);
6537 if (name)
6539 tree decl;
6541 if (!critical_name_mutexes)
6542 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6544 tree *n = critical_name_mutexes->get (name);
6545 if (n == NULL)
6547 char *new_str;
6549 decl = create_tmp_var_raw (ptr_type_node);
6551 new_str = ACONCAT ((".gomp_critical_user_",
6552 IDENTIFIER_POINTER (name), NULL));
6553 DECL_NAME (decl) = get_identifier (new_str);
6554 TREE_PUBLIC (decl) = 1;
6555 TREE_STATIC (decl) = 1;
6556 DECL_COMMON (decl) = 1;
6557 DECL_ARTIFICIAL (decl) = 1;
6558 DECL_IGNORED_P (decl) = 1;
6560 varpool_node::finalize_decl (decl);
6562 critical_name_mutexes->put (name, decl);
6564 else
6565 decl = *n;
6567 /* If '#pragma omp critical' is inside offloaded region or
6568 inside function marked as offloadable, the symbol must be
6569 marked as offloadable too. */
6570 omp_context *octx;
6571 if (cgraph_node::get (current_function_decl)->offloadable)
6572 varpool_node::get_create (decl)->offloadable = 1;
6573 else
6574 for (octx = ctx->outer; octx; octx = octx->outer)
6575 if (is_gimple_omp_offloaded (octx->stmt))
6577 varpool_node::get_create (decl)->offloadable = 1;
6578 break;
6581 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6582 lock = build_call_expr_loc (loc, lock, 1,
6583 build_fold_addr_expr_loc (loc, decl));
6585 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6586 unlock = build_call_expr_loc (loc, unlock, 1,
6587 build_fold_addr_expr_loc (loc, decl));
6589 else
6591 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6592 lock = build_call_expr_loc (loc, lock, 0);
6594 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6595 unlock = build_call_expr_loc (loc, unlock, 0);
6598 push_gimplify_context ();
6600 block = make_node (BLOCK);
6601 bind = gimple_build_bind (NULL, NULL, block);
6602 gsi_replace (gsi_p, bind, true);
6603 gimple_bind_add_stmt (bind, stmt);
6605 tbody = gimple_bind_body (bind);
6606 gimplify_and_add (lock, &tbody);
6607 gimple_bind_set_body (bind, tbody);
6609 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6610 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6611 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6612 gimple_omp_set_body (stmt, NULL);
6614 tbody = gimple_bind_body (bind);
6615 gimplify_and_add (unlock, &tbody);
6616 gimple_bind_set_body (bind, tbody);
6618 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6620 pop_gimplify_context (bind);
6621 gimple_bind_append_vars (bind, ctx->block_vars);
6622 BLOCK_VARS (block) = gimple_bind_vars (bind);
6625 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6626 for a lastprivate clause. Given a loop control predicate of (V
6627 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6628 is appended to *DLIST, iterator initialization is appended to
6629 *BODY_P. */
6631 static void
6632 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6633 gimple_seq *dlist, struct omp_context *ctx)
6635 tree clauses, cond, vinit;
6636 enum tree_code cond_code;
6637 gimple_seq stmts;
6639 cond_code = fd->loop.cond_code;
6640 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6642 /* When possible, use a strict equality expression. This can let VRP
6643 type optimizations deduce the value and remove a copy. */
6644 if (tree_fits_shwi_p (fd->loop.step))
6646 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6647 if (step == 1 || step == -1)
6648 cond_code = EQ_EXPR;
6651 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6652 || gimple_omp_for_grid_phony (fd->for_stmt))
6653 cond = omp_grid_lastprivate_predicate (fd);
6654 else
6656 tree n2 = fd->loop.n2;
6657 if (fd->collapse > 1
6658 && TREE_CODE (n2) != INTEGER_CST
6659 && gimple_omp_for_combined_into_p (fd->for_stmt))
6661 struct omp_context *taskreg_ctx = NULL;
6662 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6664 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6665 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6666 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6668 if (gimple_omp_for_combined_into_p (gfor))
6670 gcc_assert (ctx->outer->outer
6671 && is_parallel_ctx (ctx->outer->outer));
6672 taskreg_ctx = ctx->outer->outer;
6674 else
6676 struct omp_for_data outer_fd;
6677 omp_extract_for_data (gfor, &outer_fd, NULL);
6678 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6681 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6682 taskreg_ctx = ctx->outer->outer;
6684 else if (is_taskreg_ctx (ctx->outer))
6685 taskreg_ctx = ctx->outer;
6686 if (taskreg_ctx)
6688 int i;
6689 tree taskreg_clauses
6690 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6691 tree innerc = omp_find_clause (taskreg_clauses,
6692 OMP_CLAUSE__LOOPTEMP_);
6693 gcc_assert (innerc);
6694 for (i = 0; i < fd->collapse; i++)
6696 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6697 OMP_CLAUSE__LOOPTEMP_);
6698 gcc_assert (innerc);
6700 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6701 OMP_CLAUSE__LOOPTEMP_);
6702 if (innerc)
6703 n2 = fold_convert (TREE_TYPE (n2),
6704 lookup_decl (OMP_CLAUSE_DECL (innerc),
6705 taskreg_ctx));
6708 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6711 clauses = gimple_omp_for_clauses (fd->for_stmt);
6712 stmts = NULL;
6713 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6714 if (!gimple_seq_empty_p (stmts))
6716 gimple_seq_add_seq (&stmts, *dlist);
6717 *dlist = stmts;
6719 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6720 vinit = fd->loop.n1;
6721 if (cond_code == EQ_EXPR
6722 && tree_fits_shwi_p (fd->loop.n2)
6723 && ! integer_zerop (fd->loop.n2))
6724 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6725 else
6726 vinit = unshare_expr (vinit);
6728 /* Initialize the iterator variable, so that threads that don't execute
6729 any iterations don't execute the lastprivate clauses by accident. */
6730 gimplify_assign (fd->loop.v, vinit, body_p);
6735 /* Lower code for an OMP loop directive. */
6737 static void
6738 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6740 tree *rhs_p, block;
6741 struct omp_for_data fd, *fdp = NULL;
6742 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6743 gbind *new_stmt;
6744 gimple_seq omp_for_body, body, dlist;
6745 gimple_seq oacc_head = NULL, oacc_tail = NULL;
6746 size_t i;
6748 push_gimplify_context ();
6750 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6752 block = make_node (BLOCK);
6753 new_stmt = gimple_build_bind (NULL, NULL, block);
6754 /* Replace at gsi right away, so that 'stmt' is no member
6755 of a sequence anymore as we're going to add to a different
6756 one below. */
6757 gsi_replace (gsi_p, new_stmt, true);
6759 /* Move declaration of temporaries in the loop body before we make
6760 it go away. */
6761 omp_for_body = gimple_omp_body (stmt);
6762 if (!gimple_seq_empty_p (omp_for_body)
6763 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6765 gbind *inner_bind
6766 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6767 tree vars = gimple_bind_vars (inner_bind);
6768 gimple_bind_append_vars (new_stmt, vars);
6769 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6770 keep them on the inner_bind and it's block. */
6771 gimple_bind_set_vars (inner_bind, NULL_TREE);
6772 if (gimple_bind_block (inner_bind))
6773 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6776 if (gimple_omp_for_combined_into_p (stmt))
6778 omp_extract_for_data (stmt, &fd, NULL);
6779 fdp = &fd;
6781 /* We need two temporaries with fd.loop.v type (istart/iend)
6782 and then (fd.collapse - 1) temporaries with the same
6783 type for count2 ... countN-1 vars if not constant. */
6784 size_t count = 2;
6785 tree type = fd.iter_type;
6786 if (fd.collapse > 1
6787 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6788 count += fd.collapse - 1;
6789 bool taskreg_for
6790 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6791 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6792 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6793 tree simtc = NULL;
6794 tree clauses = *pc;
6795 if (taskreg_for)
6796 outerc
6797 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6798 OMP_CLAUSE__LOOPTEMP_);
6799 if (ctx->simt_stmt)
6800 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6801 OMP_CLAUSE__LOOPTEMP_);
6802 for (i = 0; i < count; i++)
6804 tree temp;
6805 if (taskreg_for)
6807 gcc_assert (outerc);
6808 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6809 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6810 OMP_CLAUSE__LOOPTEMP_);
6812 else
6814 /* If there are 2 adjacent SIMD stmts, one with _simt_
6815 clause, another without, make sure they have the same
6816 decls in _looptemp_ clauses, because the outer stmt
6817 they are combined into will look up just one inner_stmt. */
6818 if (ctx->simt_stmt)
6819 temp = OMP_CLAUSE_DECL (simtc);
6820 else
6821 temp = create_tmp_var (type);
6822 insert_decl_map (&ctx->outer->cb, temp, temp);
6824 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6825 OMP_CLAUSE_DECL (*pc) = temp;
6826 pc = &OMP_CLAUSE_CHAIN (*pc);
6827 if (ctx->simt_stmt)
6828 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6829 OMP_CLAUSE__LOOPTEMP_);
6831 *pc = clauses;
6834 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6835 dlist = NULL;
6836 body = NULL;
6837 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6838 fdp);
6839 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6841 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6843 /* Lower the header expressions. At this point, we can assume that
6844 the header is of the form:
6846 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6848 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6849 using the .omp_data_s mapping, if needed. */
6850 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6852 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6853 if (!is_gimple_min_invariant (*rhs_p))
6854 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6855 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6856 recompute_tree_invariant_for_addr_expr (*rhs_p);
6858 rhs_p = gimple_omp_for_final_ptr (stmt, i);
6859 if (!is_gimple_min_invariant (*rhs_p))
6860 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6861 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6862 recompute_tree_invariant_for_addr_expr (*rhs_p);
6864 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6865 if (!is_gimple_min_invariant (*rhs_p))
6866 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6869 /* Once lowered, extract the bounds and clauses. */
6870 omp_extract_for_data (stmt, &fd, NULL);
6872 if (is_gimple_omp_oacc (ctx->stmt)
6873 && !ctx_in_oacc_kernels_region (ctx))
6874 lower_oacc_head_tail (gimple_location (stmt),
6875 gimple_omp_for_clauses (stmt),
6876 &oacc_head, &oacc_tail, ctx);
6878 /* Add OpenACC partitioning and reduction markers just before the loop. */
6879 if (oacc_head)
6880 gimple_seq_add_seq (&body, oacc_head);
6882 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
6884 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6885 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
6886 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6887 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6889 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6890 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6891 OMP_CLAUSE_LINEAR_STEP (c)
6892 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6893 ctx);
6896 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6897 && gimple_omp_for_grid_phony (stmt));
6898 if (!phony_loop)
6899 gimple_seq_add_stmt (&body, stmt);
6900 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6902 if (!phony_loop)
6903 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6904 fd.loop.v));
6906 /* After the loop, add exit clauses. */
6907 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
6909 if (ctx->cancellable)
6910 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
6912 gimple_seq_add_seq (&body, dlist);
6914 body = maybe_catch_exception (body);
6916 if (!phony_loop)
6918 /* Region exit marker goes at the end of the loop body. */
6919 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
6920 maybe_add_implicit_barrier_cancel (ctx, &body);
6923 /* Add OpenACC joining and reduction markers just after the loop. */
6924 if (oacc_tail)
6925 gimple_seq_add_seq (&body, oacc_tail);
6927 pop_gimplify_context (new_stmt);
6929 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6930 maybe_remove_omp_member_access_dummy_vars (new_stmt);
6931 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
6932 if (BLOCK_VARS (block))
6933 TREE_USED (block) = 1;
6935 gimple_bind_set_body (new_stmt, body);
6936 gimple_omp_set_body (stmt, NULL);
6937 gimple_omp_for_set_pre_body (stmt, NULL);
6940 /* Callback for walk_stmts. Check if the current statement only contains
6941 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
6943 static tree
6944 check_combined_parallel (gimple_stmt_iterator *gsi_p,
6945 bool *handled_ops_p,
6946 struct walk_stmt_info *wi)
6948 int *info = (int *) wi->info;
6949 gimple *stmt = gsi_stmt (*gsi_p);
6951 *handled_ops_p = true;
6952 switch (gimple_code (stmt))
6954 WALK_SUBSTMTS;
6956 case GIMPLE_DEBUG:
6957 break;
6958 case GIMPLE_OMP_FOR:
6959 case GIMPLE_OMP_SECTIONS:
6960 *info = *info == 0 ? 1 : -1;
6961 break;
6962 default:
6963 *info = -1;
6964 break;
6966 return NULL;
6969 struct omp_taskcopy_context
6971 /* This field must be at the beginning, as we do "inheritance": Some
6972 callback functions for tree-inline.c (e.g., omp_copy_decl)
6973 receive a copy_body_data pointer that is up-casted to an
6974 omp_context pointer. */
6975 copy_body_data cb;
6976 omp_context *ctx;
6979 static tree
6980 task_copyfn_copy_decl (tree var, copy_body_data *cb)
6982 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
6984 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
6985 return create_tmp_var (TREE_TYPE (var));
6987 return var;
6990 static tree
6991 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
6993 tree name, new_fields = NULL, type, f;
6995 type = lang_hooks.types.make_type (RECORD_TYPE);
6996 name = DECL_NAME (TYPE_NAME (orig_type));
6997 name = build_decl (gimple_location (tcctx->ctx->stmt),
6998 TYPE_DECL, name, type);
6999 TYPE_NAME (type) = name;
7001 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
7003 tree new_f = copy_node (f);
7004 DECL_CONTEXT (new_f) = type;
7005 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7006 TREE_CHAIN (new_f) = new_fields;
7007 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7008 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7009 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7010 &tcctx->cb, NULL);
7011 new_fields = new_f;
7012 tcctx->cb.decl_map->put (f, new_f);
7014 TYPE_FIELDS (type) = nreverse (new_fields);
7015 layout_type (type);
7016 return type;
7019 /* Create task copyfn. */
7021 static void
7022 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7024 struct function *child_cfun;
7025 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7026 tree record_type, srecord_type, bind, list;
7027 bool record_needs_remap = false, srecord_needs_remap = false;
7028 splay_tree_node n;
7029 struct omp_taskcopy_context tcctx;
7030 location_t loc = gimple_location (task_stmt);
7031 size_t looptempno = 0;
7033 child_fn = gimple_omp_task_copy_fn (task_stmt);
7034 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7035 gcc_assert (child_cfun->cfg == NULL);
7036 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
7038 /* Reset DECL_CONTEXT on function arguments. */
7039 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7040 DECL_CONTEXT (t) = child_fn;
7042 /* Populate the function. */
7043 push_gimplify_context ();
7044 push_cfun (child_cfun);
7046 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7047 TREE_SIDE_EFFECTS (bind) = 1;
7048 list = NULL;
7049 DECL_SAVED_TREE (child_fn) = bind;
7050 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
7052 /* Remap src and dst argument types if needed. */
7053 record_type = ctx->record_type;
7054 srecord_type = ctx->srecord_type;
7055 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7056 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7058 record_needs_remap = true;
7059 break;
7061 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7062 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7064 srecord_needs_remap = true;
7065 break;
7068 if (record_needs_remap || srecord_needs_remap)
7070 memset (&tcctx, '\0', sizeof (tcctx));
7071 tcctx.cb.src_fn = ctx->cb.src_fn;
7072 tcctx.cb.dst_fn = child_fn;
7073 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7074 gcc_checking_assert (tcctx.cb.src_node);
7075 tcctx.cb.dst_node = tcctx.cb.src_node;
7076 tcctx.cb.src_cfun = ctx->cb.src_cfun;
7077 tcctx.cb.copy_decl = task_copyfn_copy_decl;
7078 tcctx.cb.eh_lp_nr = 0;
7079 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7080 tcctx.cb.decl_map = new hash_map<tree, tree>;
7081 tcctx.ctx = ctx;
7083 if (record_needs_remap)
7084 record_type = task_copyfn_remap_type (&tcctx, record_type);
7085 if (srecord_needs_remap)
7086 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7088 else
7089 tcctx.cb.decl_map = NULL;
7091 arg = DECL_ARGUMENTS (child_fn);
7092 TREE_TYPE (arg) = build_pointer_type (record_type);
7093 sarg = DECL_CHAIN (arg);
7094 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7096 /* First pass: initialize temporaries used in record_type and srecord_type
7097 sizes and field offsets. */
7098 if (tcctx.cb.decl_map)
7099 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7100 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7102 tree *p;
7104 decl = OMP_CLAUSE_DECL (c);
7105 p = tcctx.cb.decl_map->get (decl);
7106 if (p == NULL)
7107 continue;
7108 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7109 sf = (tree) n->value;
7110 sf = *tcctx.cb.decl_map->get (sf);
7111 src = build_simple_mem_ref_loc (loc, sarg);
7112 src = omp_build_component_ref (src, sf);
7113 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7114 append_to_statement_list (t, &list);
7117 /* Second pass: copy shared var pointers and copy construct non-VLA
7118 firstprivate vars. */
7119 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7120 switch (OMP_CLAUSE_CODE (c))
7122 splay_tree_key key;
7123 case OMP_CLAUSE_SHARED:
7124 decl = OMP_CLAUSE_DECL (c);
7125 key = (splay_tree_key) decl;
7126 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7127 key = (splay_tree_key) &DECL_UID (decl);
7128 n = splay_tree_lookup (ctx->field_map, key);
7129 if (n == NULL)
7130 break;
7131 f = (tree) n->value;
7132 if (tcctx.cb.decl_map)
7133 f = *tcctx.cb.decl_map->get (f);
7134 n = splay_tree_lookup (ctx->sfield_map, key);
7135 sf = (tree) n->value;
7136 if (tcctx.cb.decl_map)
7137 sf = *tcctx.cb.decl_map->get (sf);
7138 src = build_simple_mem_ref_loc (loc, sarg);
7139 src = omp_build_component_ref (src, sf);
7140 dst = build_simple_mem_ref_loc (loc, arg);
7141 dst = omp_build_component_ref (dst, f);
7142 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7143 append_to_statement_list (t, &list);
7144 break;
7145 case OMP_CLAUSE__LOOPTEMP_:
7146 /* Fields for first two _looptemp_ clauses are initialized by
7147 GOMP_taskloop*, the rest are handled like firstprivate. */
7148 if (looptempno < 2)
7150 looptempno++;
7151 break;
7153 /* FALLTHRU */
7154 case OMP_CLAUSE_FIRSTPRIVATE:
7155 decl = OMP_CLAUSE_DECL (c);
7156 if (is_variable_sized (decl))
7157 break;
7158 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7159 if (n == NULL)
7160 break;
7161 f = (tree) n->value;
7162 if (tcctx.cb.decl_map)
7163 f = *tcctx.cb.decl_map->get (f);
7164 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7165 if (n != NULL)
7167 sf = (tree) n->value;
7168 if (tcctx.cb.decl_map)
7169 sf = *tcctx.cb.decl_map->get (sf);
7170 src = build_simple_mem_ref_loc (loc, sarg);
7171 src = omp_build_component_ref (src, sf);
7172 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7173 src = build_simple_mem_ref_loc (loc, src);
7175 else
7176 src = decl;
7177 dst = build_simple_mem_ref_loc (loc, arg);
7178 dst = omp_build_component_ref (dst, f);
7179 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__LOOPTEMP_)
7180 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7181 else
7182 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7183 append_to_statement_list (t, &list);
7184 break;
7185 case OMP_CLAUSE_PRIVATE:
7186 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7187 break;
7188 decl = OMP_CLAUSE_DECL (c);
7189 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7190 f = (tree) n->value;
7191 if (tcctx.cb.decl_map)
7192 f = *tcctx.cb.decl_map->get (f);
7193 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7194 if (n != NULL)
7196 sf = (tree) n->value;
7197 if (tcctx.cb.decl_map)
7198 sf = *tcctx.cb.decl_map->get (sf);
7199 src = build_simple_mem_ref_loc (loc, sarg);
7200 src = omp_build_component_ref (src, sf);
7201 if (use_pointer_for_field (decl, NULL))
7202 src = build_simple_mem_ref_loc (loc, src);
7204 else
7205 src = decl;
7206 dst = build_simple_mem_ref_loc (loc, arg);
7207 dst = omp_build_component_ref (dst, f);
7208 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7209 append_to_statement_list (t, &list);
7210 break;
7211 default:
7212 break;
7215 /* Last pass: handle VLA firstprivates. */
7216 if (tcctx.cb.decl_map)
7217 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7218 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7220 tree ind, ptr, df;
7222 decl = OMP_CLAUSE_DECL (c);
7223 if (!is_variable_sized (decl))
7224 continue;
7225 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7226 if (n == NULL)
7227 continue;
7228 f = (tree) n->value;
7229 f = *tcctx.cb.decl_map->get (f);
7230 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7231 ind = DECL_VALUE_EXPR (decl);
7232 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7233 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7234 n = splay_tree_lookup (ctx->sfield_map,
7235 (splay_tree_key) TREE_OPERAND (ind, 0));
7236 sf = (tree) n->value;
7237 sf = *tcctx.cb.decl_map->get (sf);
7238 src = build_simple_mem_ref_loc (loc, sarg);
7239 src = omp_build_component_ref (src, sf);
7240 src = build_simple_mem_ref_loc (loc, src);
7241 dst = build_simple_mem_ref_loc (loc, arg);
7242 dst = omp_build_component_ref (dst, f);
7243 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7244 append_to_statement_list (t, &list);
7245 n = splay_tree_lookup (ctx->field_map,
7246 (splay_tree_key) TREE_OPERAND (ind, 0));
7247 df = (tree) n->value;
7248 df = *tcctx.cb.decl_map->get (df);
7249 ptr = build_simple_mem_ref_loc (loc, arg);
7250 ptr = omp_build_component_ref (ptr, df);
7251 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7252 build_fold_addr_expr_loc (loc, dst));
7253 append_to_statement_list (t, &list);
7256 t = build1 (RETURN_EXPR, void_type_node, NULL);
7257 append_to_statement_list (t, &list);
7259 if (tcctx.cb.decl_map)
7260 delete tcctx.cb.decl_map;
7261 pop_gimplify_context (NULL);
7262 BIND_EXPR_BODY (bind) = list;
7263 pop_cfun ();
7266 static void
7267 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7269 tree c, clauses;
7270 gimple *g;
7271 size_t n_in = 0, n_out = 0, idx = 2, i;
7273 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7274 gcc_assert (clauses);
7275 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7276 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7277 switch (OMP_CLAUSE_DEPEND_KIND (c))
7279 case OMP_CLAUSE_DEPEND_IN:
7280 n_in++;
7281 break;
7282 case OMP_CLAUSE_DEPEND_OUT:
7283 case OMP_CLAUSE_DEPEND_INOUT:
7284 n_out++;
7285 break;
7286 case OMP_CLAUSE_DEPEND_SOURCE:
7287 case OMP_CLAUSE_DEPEND_SINK:
7288 /* FALLTHRU */
7289 default:
7290 gcc_unreachable ();
7292 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7293 tree array = create_tmp_var (type);
7294 TREE_ADDRESSABLE (array) = 1;
7295 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7296 NULL_TREE);
7297 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7298 gimple_seq_add_stmt (iseq, g);
7299 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7300 NULL_TREE);
7301 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7302 gimple_seq_add_stmt (iseq, g);
7303 for (i = 0; i < 2; i++)
7305 if ((i ? n_in : n_out) == 0)
7306 continue;
7307 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7308 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7309 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7311 tree t = OMP_CLAUSE_DECL (c);
7312 t = fold_convert (ptr_type_node, t);
7313 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7314 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7315 NULL_TREE, NULL_TREE);
7316 g = gimple_build_assign (r, t);
7317 gimple_seq_add_stmt (iseq, g);
7320 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7321 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7322 OMP_CLAUSE_CHAIN (c) = *pclauses;
7323 *pclauses = c;
7324 tree clobber = build_constructor (type, NULL);
7325 TREE_THIS_VOLATILE (clobber) = 1;
7326 g = gimple_build_assign (array, clobber);
7327 gimple_seq_add_stmt (oseq, g);
7330 /* Lower the OpenMP parallel or task directive in the current statement
7331 in GSI_P. CTX holds context information for the directive. */
7333 static void
7334 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7336 tree clauses;
7337 tree child_fn, t;
7338 gimple *stmt = gsi_stmt (*gsi_p);
7339 gbind *par_bind, *bind, *dep_bind = NULL;
7340 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7341 location_t loc = gimple_location (stmt);
7343 clauses = gimple_omp_taskreg_clauses (stmt);
7344 par_bind
7345 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7346 par_body = gimple_bind_body (par_bind);
7347 child_fn = ctx->cb.dst_fn;
7348 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7349 && !gimple_omp_parallel_combined_p (stmt))
7351 struct walk_stmt_info wi;
7352 int ws_num = 0;
7354 memset (&wi, 0, sizeof (wi));
7355 wi.info = &ws_num;
7356 wi.val_only = true;
7357 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7358 if (ws_num == 1)
7359 gimple_omp_parallel_set_combined_p (stmt, true);
7361 gimple_seq dep_ilist = NULL;
7362 gimple_seq dep_olist = NULL;
7363 if (gimple_code (stmt) == GIMPLE_OMP_TASK
7364 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7366 push_gimplify_context ();
7367 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7368 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7369 &dep_ilist, &dep_olist);
7372 if (ctx->srecord_type)
7373 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7375 push_gimplify_context ();
7377 par_olist = NULL;
7378 par_ilist = NULL;
7379 par_rlist = NULL;
7380 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7381 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7382 if (phony_construct && ctx->record_type)
7384 gcc_checking_assert (!ctx->receiver_decl);
7385 ctx->receiver_decl = create_tmp_var
7386 (build_reference_type (ctx->record_type), ".omp_rec");
7388 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7389 lower_omp (&par_body, ctx);
7390 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7391 lower_reduction_clauses (clauses, &par_rlist, ctx);
7393 /* Declare all the variables created by mapping and the variables
7394 declared in the scope of the parallel body. */
7395 record_vars_into (ctx->block_vars, child_fn);
7396 maybe_remove_omp_member_access_dummy_vars (par_bind);
7397 record_vars_into (gimple_bind_vars (par_bind), child_fn);
7399 if (ctx->record_type)
7401 ctx->sender_decl
7402 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7403 : ctx->record_type, ".omp_data_o");
7404 DECL_NAMELESS (ctx->sender_decl) = 1;
7405 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7406 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7409 olist = NULL;
7410 ilist = NULL;
7411 lower_send_clauses (clauses, &ilist, &olist, ctx);
7412 lower_send_shared_vars (&ilist, &olist, ctx);
7414 if (ctx->record_type)
7416 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7417 TREE_THIS_VOLATILE (clobber) = 1;
7418 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7419 clobber));
7422 /* Once all the expansions are done, sequence all the different
7423 fragments inside gimple_omp_body. */
7425 new_body = NULL;
7427 if (ctx->record_type)
7429 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7430 /* fixup_child_record_type might have changed receiver_decl's type. */
7431 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7432 gimple_seq_add_stmt (&new_body,
7433 gimple_build_assign (ctx->receiver_decl, t));
7436 gimple_seq_add_seq (&new_body, par_ilist);
7437 gimple_seq_add_seq (&new_body, par_body);
7438 gimple_seq_add_seq (&new_body, par_rlist);
7439 if (ctx->cancellable)
7440 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7441 gimple_seq_add_seq (&new_body, par_olist);
7442 new_body = maybe_catch_exception (new_body);
7443 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7444 gimple_seq_add_stmt (&new_body,
7445 gimple_build_omp_continue (integer_zero_node,
7446 integer_zero_node));
7447 if (!phony_construct)
7449 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7450 gimple_omp_set_body (stmt, new_body);
7453 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7454 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7455 gimple_bind_add_seq (bind, ilist);
7456 if (!phony_construct)
7457 gimple_bind_add_stmt (bind, stmt);
7458 else
7459 gimple_bind_add_seq (bind, new_body);
7460 gimple_bind_add_seq (bind, olist);
7462 pop_gimplify_context (NULL);
7464 if (dep_bind)
7466 gimple_bind_add_seq (dep_bind, dep_ilist);
7467 gimple_bind_add_stmt (dep_bind, bind);
7468 gimple_bind_add_seq (dep_bind, dep_olist);
7469 pop_gimplify_context (dep_bind);
7473 /* Lower the GIMPLE_OMP_TARGET in the current statement
7474 in GSI_P. CTX holds context information for the directive. */
7476 static void
7477 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7479 tree clauses;
7480 tree child_fn, t, c;
7481 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7482 gbind *tgt_bind, *bind, *dep_bind = NULL;
7483 gimple_seq tgt_body, olist, ilist, fplist, new_body;
7484 location_t loc = gimple_location (stmt);
7485 bool offloaded, data_region;
7486 unsigned int map_cnt = 0;
7488 offloaded = is_gimple_omp_offloaded (stmt);
7489 switch (gimple_omp_target_kind (stmt))
7491 case GF_OMP_TARGET_KIND_REGION:
7492 case GF_OMP_TARGET_KIND_UPDATE:
7493 case GF_OMP_TARGET_KIND_ENTER_DATA:
7494 case GF_OMP_TARGET_KIND_EXIT_DATA:
7495 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7496 case GF_OMP_TARGET_KIND_OACC_KERNELS:
7497 case GF_OMP_TARGET_KIND_OACC_UPDATE:
7498 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7499 case GF_OMP_TARGET_KIND_OACC_DECLARE:
7500 data_region = false;
7501 break;
7502 case GF_OMP_TARGET_KIND_DATA:
7503 case GF_OMP_TARGET_KIND_OACC_DATA:
7504 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7505 data_region = true;
7506 break;
7507 default:
7508 gcc_unreachable ();
7511 clauses = gimple_omp_target_clauses (stmt);
7513 gimple_seq dep_ilist = NULL;
7514 gimple_seq dep_olist = NULL;
7515 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7517 push_gimplify_context ();
7518 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7519 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7520 &dep_ilist, &dep_olist);
7523 tgt_bind = NULL;
7524 tgt_body = NULL;
7525 if (offloaded)
7527 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7528 tgt_body = gimple_bind_body (tgt_bind);
7530 else if (data_region)
7531 tgt_body = gimple_omp_body (stmt);
7532 child_fn = ctx->cb.dst_fn;
7534 push_gimplify_context ();
7535 fplist = NULL;
7537 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7538 switch (OMP_CLAUSE_CODE (c))
7540 tree var, x;
7542 default:
7543 break;
7544 case OMP_CLAUSE_MAP:
7545 #if CHECKING_P
7546 /* First check what we're prepared to handle in the following. */
7547 switch (OMP_CLAUSE_MAP_KIND (c))
7549 case GOMP_MAP_ALLOC:
7550 case GOMP_MAP_TO:
7551 case GOMP_MAP_FROM:
7552 case GOMP_MAP_TOFROM:
7553 case GOMP_MAP_POINTER:
7554 case GOMP_MAP_TO_PSET:
7555 case GOMP_MAP_DELETE:
7556 case GOMP_MAP_RELEASE:
7557 case GOMP_MAP_ALWAYS_TO:
7558 case GOMP_MAP_ALWAYS_FROM:
7559 case GOMP_MAP_ALWAYS_TOFROM:
7560 case GOMP_MAP_FIRSTPRIVATE_POINTER:
7561 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7562 case GOMP_MAP_STRUCT:
7563 case GOMP_MAP_ALWAYS_POINTER:
7564 break;
7565 case GOMP_MAP_FORCE_ALLOC:
7566 case GOMP_MAP_FORCE_TO:
7567 case GOMP_MAP_FORCE_FROM:
7568 case GOMP_MAP_FORCE_TOFROM:
7569 case GOMP_MAP_FORCE_PRESENT:
7570 case GOMP_MAP_FORCE_DEVICEPTR:
7571 case GOMP_MAP_DEVICE_RESIDENT:
7572 case GOMP_MAP_LINK:
7573 gcc_assert (is_gimple_omp_oacc (stmt));
7574 break;
7575 default:
7576 gcc_unreachable ();
7578 #endif
7579 /* FALLTHRU */
7580 case OMP_CLAUSE_TO:
7581 case OMP_CLAUSE_FROM:
7582 oacc_firstprivate:
7583 var = OMP_CLAUSE_DECL (c);
7584 if (!DECL_P (var))
7586 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7587 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7588 && (OMP_CLAUSE_MAP_KIND (c)
7589 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
7590 map_cnt++;
7591 continue;
7594 if (DECL_SIZE (var)
7595 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7597 tree var2 = DECL_VALUE_EXPR (var);
7598 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7599 var2 = TREE_OPERAND (var2, 0);
7600 gcc_assert (DECL_P (var2));
7601 var = var2;
7604 if (offloaded
7605 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7606 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7607 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7609 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7611 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7612 && varpool_node::get_create (var)->offloadable)
7613 continue;
7615 tree type = build_pointer_type (TREE_TYPE (var));
7616 tree new_var = lookup_decl (var, ctx);
7617 x = create_tmp_var_raw (type, get_name (new_var));
7618 gimple_add_tmp_var (x);
7619 x = build_simple_mem_ref (x);
7620 SET_DECL_VALUE_EXPR (new_var, x);
7621 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7623 continue;
7626 if (!maybe_lookup_field (var, ctx))
7627 continue;
7629 /* Don't remap oacc parallel reduction variables, because the
7630 intermediate result must be local to each gang. */
7631 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7632 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7634 x = build_receiver_ref (var, true, ctx);
7635 tree new_var = lookup_decl (var, ctx);
7637 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7638 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7639 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7640 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7641 x = build_simple_mem_ref (x);
7642 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7644 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7645 if (omp_is_reference (new_var)
7646 && TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE)
7648 /* Create a local object to hold the instance
7649 value. */
7650 tree type = TREE_TYPE (TREE_TYPE (new_var));
7651 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7652 tree inst = create_tmp_var (type, id);
7653 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7654 x = build_fold_addr_expr (inst);
7656 gimplify_assign (new_var, x, &fplist);
7658 else if (DECL_P (new_var))
7660 SET_DECL_VALUE_EXPR (new_var, x);
7661 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7663 else
7664 gcc_unreachable ();
7666 map_cnt++;
7667 break;
7669 case OMP_CLAUSE_FIRSTPRIVATE:
7670 if (is_oacc_parallel (ctx))
7671 goto oacc_firstprivate;
7672 map_cnt++;
7673 var = OMP_CLAUSE_DECL (c);
7674 if (!omp_is_reference (var)
7675 && !is_gimple_reg_type (TREE_TYPE (var)))
7677 tree new_var = lookup_decl (var, ctx);
7678 if (is_variable_sized (var))
7680 tree pvar = DECL_VALUE_EXPR (var);
7681 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7682 pvar = TREE_OPERAND (pvar, 0);
7683 gcc_assert (DECL_P (pvar));
7684 tree new_pvar = lookup_decl (pvar, ctx);
7685 x = build_fold_indirect_ref (new_pvar);
7686 TREE_THIS_NOTRAP (x) = 1;
7688 else
7689 x = build_receiver_ref (var, true, ctx);
7690 SET_DECL_VALUE_EXPR (new_var, x);
7691 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7693 break;
7695 case OMP_CLAUSE_PRIVATE:
7696 if (is_gimple_omp_oacc (ctx->stmt))
7697 break;
7698 var = OMP_CLAUSE_DECL (c);
7699 if (is_variable_sized (var))
7701 tree new_var = lookup_decl (var, ctx);
7702 tree pvar = DECL_VALUE_EXPR (var);
7703 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7704 pvar = TREE_OPERAND (pvar, 0);
7705 gcc_assert (DECL_P (pvar));
7706 tree new_pvar = lookup_decl (pvar, ctx);
7707 x = build_fold_indirect_ref (new_pvar);
7708 TREE_THIS_NOTRAP (x) = 1;
7709 SET_DECL_VALUE_EXPR (new_var, x);
7710 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7712 break;
7714 case OMP_CLAUSE_USE_DEVICE_PTR:
7715 case OMP_CLAUSE_IS_DEVICE_PTR:
7716 var = OMP_CLAUSE_DECL (c);
7717 map_cnt++;
7718 if (is_variable_sized (var))
7720 tree new_var = lookup_decl (var, ctx);
7721 tree pvar = DECL_VALUE_EXPR (var);
7722 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7723 pvar = TREE_OPERAND (pvar, 0);
7724 gcc_assert (DECL_P (pvar));
7725 tree new_pvar = lookup_decl (pvar, ctx);
7726 x = build_fold_indirect_ref (new_pvar);
7727 TREE_THIS_NOTRAP (x) = 1;
7728 SET_DECL_VALUE_EXPR (new_var, x);
7729 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7731 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7733 tree new_var = lookup_decl (var, ctx);
7734 tree type = build_pointer_type (TREE_TYPE (var));
7735 x = create_tmp_var_raw (type, get_name (new_var));
7736 gimple_add_tmp_var (x);
7737 x = build_simple_mem_ref (x);
7738 SET_DECL_VALUE_EXPR (new_var, x);
7739 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7741 else
7743 tree new_var = lookup_decl (var, ctx);
7744 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7745 gimple_add_tmp_var (x);
7746 SET_DECL_VALUE_EXPR (new_var, x);
7747 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7749 break;
7752 if (offloaded)
7754 target_nesting_level++;
7755 lower_omp (&tgt_body, ctx);
7756 target_nesting_level--;
7758 else if (data_region)
7759 lower_omp (&tgt_body, ctx);
7761 if (offloaded)
7763 /* Declare all the variables created by mapping and the variables
7764 declared in the scope of the target body. */
7765 record_vars_into (ctx->block_vars, child_fn);
7766 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
7767 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7770 olist = NULL;
7771 ilist = NULL;
7772 if (ctx->record_type)
7774 ctx->sender_decl
7775 = create_tmp_var (ctx->record_type, ".omp_data_arr");
7776 DECL_NAMELESS (ctx->sender_decl) = 1;
7777 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7778 t = make_tree_vec (3);
7779 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7780 TREE_VEC_ELT (t, 1)
7781 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7782 ".omp_data_sizes");
7783 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7784 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7785 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7786 tree tkind_type = short_unsigned_type_node;
7787 int talign_shift = 8;
7788 TREE_VEC_ELT (t, 2)
7789 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7790 ".omp_data_kinds");
7791 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7792 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7793 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7794 gimple_omp_target_set_data_arg (stmt, t);
7796 vec<constructor_elt, va_gc> *vsize;
7797 vec<constructor_elt, va_gc> *vkind;
7798 vec_alloc (vsize, map_cnt);
7799 vec_alloc (vkind, map_cnt);
7800 unsigned int map_idx = 0;
7802 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7803 switch (OMP_CLAUSE_CODE (c))
7805 tree ovar, nc, s, purpose, var, x, type;
7806 unsigned int talign;
7808 default:
7809 break;
7811 case OMP_CLAUSE_MAP:
7812 case OMP_CLAUSE_TO:
7813 case OMP_CLAUSE_FROM:
7814 oacc_firstprivate_map:
7815 nc = c;
7816 ovar = OMP_CLAUSE_DECL (c);
7817 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7818 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7819 || (OMP_CLAUSE_MAP_KIND (c)
7820 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7821 break;
7822 if (!DECL_P (ovar))
7824 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7825 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7827 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7828 == get_base_address (ovar));
7829 nc = OMP_CLAUSE_CHAIN (c);
7830 ovar = OMP_CLAUSE_DECL (nc);
7832 else
7834 tree x = build_sender_ref (ovar, ctx);
7835 tree v
7836 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7837 gimplify_assign (x, v, &ilist);
7838 nc = NULL_TREE;
7841 else
7843 if (DECL_SIZE (ovar)
7844 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7846 tree ovar2 = DECL_VALUE_EXPR (ovar);
7847 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7848 ovar2 = TREE_OPERAND (ovar2, 0);
7849 gcc_assert (DECL_P (ovar2));
7850 ovar = ovar2;
7852 if (!maybe_lookup_field (ovar, ctx))
7853 continue;
7856 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7857 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7858 talign = DECL_ALIGN_UNIT (ovar);
7859 if (nc)
7861 var = lookup_decl_in_outer_ctx (ovar, ctx);
7862 x = build_sender_ref (ovar, ctx);
7864 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7865 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7866 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7867 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7869 gcc_assert (offloaded);
7870 tree avar
7871 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7872 mark_addressable (avar);
7873 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7874 talign = DECL_ALIGN_UNIT (avar);
7875 avar = build_fold_addr_expr (avar);
7876 gimplify_assign (x, avar, &ilist);
7878 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7880 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7881 if (!omp_is_reference (var))
7883 if (is_gimple_reg (var)
7884 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7885 TREE_NO_WARNING (var) = 1;
7886 var = build_fold_addr_expr (var);
7888 else
7889 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7890 gimplify_assign (x, var, &ilist);
7892 else if (is_gimple_reg (var))
7894 gcc_assert (offloaded);
7895 tree avar = create_tmp_var (TREE_TYPE (var));
7896 mark_addressable (avar);
7897 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7898 if (GOMP_MAP_COPY_TO_P (map_kind)
7899 || map_kind == GOMP_MAP_POINTER
7900 || map_kind == GOMP_MAP_TO_PSET
7901 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7903 /* If we need to initialize a temporary
7904 with VAR because it is not addressable, and
7905 the variable hasn't been initialized yet, then
7906 we'll get a warning for the store to avar.
7907 Don't warn in that case, the mapping might
7908 be implicit. */
7909 TREE_NO_WARNING (var) = 1;
7910 gimplify_assign (avar, var, &ilist);
7912 avar = build_fold_addr_expr (avar);
7913 gimplify_assign (x, avar, &ilist);
7914 if ((GOMP_MAP_COPY_FROM_P (map_kind)
7915 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7916 && !TYPE_READONLY (TREE_TYPE (var)))
7918 x = unshare_expr (x);
7919 x = build_simple_mem_ref (x);
7920 gimplify_assign (var, x, &olist);
7923 else
7925 var = build_fold_addr_expr (var);
7926 gimplify_assign (x, var, &ilist);
7929 s = NULL_TREE;
7930 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7932 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7933 s = TREE_TYPE (ovar);
7934 if (TREE_CODE (s) == REFERENCE_TYPE)
7935 s = TREE_TYPE (s);
7936 s = TYPE_SIZE_UNIT (s);
7938 else
7939 s = OMP_CLAUSE_SIZE (c);
7940 if (s == NULL_TREE)
7941 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
7942 s = fold_convert (size_type_node, s);
7943 purpose = size_int (map_idx++);
7944 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
7945 if (TREE_CODE (s) != INTEGER_CST)
7946 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
7948 unsigned HOST_WIDE_INT tkind, tkind_zero;
7949 switch (OMP_CLAUSE_CODE (c))
7951 case OMP_CLAUSE_MAP:
7952 tkind = OMP_CLAUSE_MAP_KIND (c);
7953 tkind_zero = tkind;
7954 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
7955 switch (tkind)
7957 case GOMP_MAP_ALLOC:
7958 case GOMP_MAP_TO:
7959 case GOMP_MAP_FROM:
7960 case GOMP_MAP_TOFROM:
7961 case GOMP_MAP_ALWAYS_TO:
7962 case GOMP_MAP_ALWAYS_FROM:
7963 case GOMP_MAP_ALWAYS_TOFROM:
7964 case GOMP_MAP_RELEASE:
7965 case GOMP_MAP_FORCE_TO:
7966 case GOMP_MAP_FORCE_FROM:
7967 case GOMP_MAP_FORCE_TOFROM:
7968 case GOMP_MAP_FORCE_PRESENT:
7969 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
7970 break;
7971 case GOMP_MAP_DELETE:
7972 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
7973 default:
7974 break;
7976 if (tkind_zero != tkind)
7978 if (integer_zerop (s))
7979 tkind = tkind_zero;
7980 else if (integer_nonzerop (s))
7981 tkind_zero = tkind;
7983 break;
7984 case OMP_CLAUSE_FIRSTPRIVATE:
7985 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7986 tkind = GOMP_MAP_TO;
7987 tkind_zero = tkind;
7988 break;
7989 case OMP_CLAUSE_TO:
7990 tkind = GOMP_MAP_TO;
7991 tkind_zero = tkind;
7992 break;
7993 case OMP_CLAUSE_FROM:
7994 tkind = GOMP_MAP_FROM;
7995 tkind_zero = tkind;
7996 break;
7997 default:
7998 gcc_unreachable ();
8000 gcc_checking_assert (tkind
8001 < (HOST_WIDE_INT_C (1U) << talign_shift));
8002 gcc_checking_assert (tkind_zero
8003 < (HOST_WIDE_INT_C (1U) << talign_shift));
8004 talign = ceil_log2 (talign);
8005 tkind |= talign << talign_shift;
8006 tkind_zero |= talign << talign_shift;
8007 gcc_checking_assert (tkind
8008 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8009 gcc_checking_assert (tkind_zero
8010 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8011 if (tkind == tkind_zero)
8012 x = build_int_cstu (tkind_type, tkind);
8013 else
8015 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8016 x = build3 (COND_EXPR, tkind_type,
8017 fold_build2 (EQ_EXPR, boolean_type_node,
8018 unshare_expr (s), size_zero_node),
8019 build_int_cstu (tkind_type, tkind_zero),
8020 build_int_cstu (tkind_type, tkind));
8022 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8023 if (nc && nc != c)
8024 c = nc;
8025 break;
8027 case OMP_CLAUSE_FIRSTPRIVATE:
8028 if (is_oacc_parallel (ctx))
8029 goto oacc_firstprivate_map;
8030 ovar = OMP_CLAUSE_DECL (c);
8031 if (omp_is_reference (ovar))
8032 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8033 else
8034 talign = DECL_ALIGN_UNIT (ovar);
8035 var = lookup_decl_in_outer_ctx (ovar, ctx);
8036 x = build_sender_ref (ovar, ctx);
8037 tkind = GOMP_MAP_FIRSTPRIVATE;
8038 type = TREE_TYPE (ovar);
8039 if (omp_is_reference (ovar))
8040 type = TREE_TYPE (type);
8041 if ((INTEGRAL_TYPE_P (type)
8042 && TYPE_PRECISION (type) <= POINTER_SIZE)
8043 || TREE_CODE (type) == POINTER_TYPE)
8045 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8046 tree t = var;
8047 if (omp_is_reference (var))
8048 t = build_simple_mem_ref (var);
8049 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8050 TREE_NO_WARNING (var) = 1;
8051 if (TREE_CODE (type) != POINTER_TYPE)
8052 t = fold_convert (pointer_sized_int_node, t);
8053 t = fold_convert (TREE_TYPE (x), t);
8054 gimplify_assign (x, t, &ilist);
8056 else if (omp_is_reference (var))
8057 gimplify_assign (x, var, &ilist);
8058 else if (is_gimple_reg (var))
8060 tree avar = create_tmp_var (TREE_TYPE (var));
8061 mark_addressable (avar);
8062 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8063 TREE_NO_WARNING (var) = 1;
8064 gimplify_assign (avar, var, &ilist);
8065 avar = build_fold_addr_expr (avar);
8066 gimplify_assign (x, avar, &ilist);
8068 else
8070 var = build_fold_addr_expr (var);
8071 gimplify_assign (x, var, &ilist);
8073 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8074 s = size_int (0);
8075 else if (omp_is_reference (ovar))
8076 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8077 else
8078 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8079 s = fold_convert (size_type_node, s);
8080 purpose = size_int (map_idx++);
8081 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8082 if (TREE_CODE (s) != INTEGER_CST)
8083 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8085 gcc_checking_assert (tkind
8086 < (HOST_WIDE_INT_C (1U) << talign_shift));
8087 talign = ceil_log2 (talign);
8088 tkind |= talign << talign_shift;
8089 gcc_checking_assert (tkind
8090 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8091 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8092 build_int_cstu (tkind_type, tkind));
8093 break;
8095 case OMP_CLAUSE_USE_DEVICE_PTR:
8096 case OMP_CLAUSE_IS_DEVICE_PTR:
8097 ovar = OMP_CLAUSE_DECL (c);
8098 var = lookup_decl_in_outer_ctx (ovar, ctx);
8099 x = build_sender_ref (ovar, ctx);
8100 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8101 tkind = GOMP_MAP_USE_DEVICE_PTR;
8102 else
8103 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8104 type = TREE_TYPE (ovar);
8105 if (TREE_CODE (type) == ARRAY_TYPE)
8106 var = build_fold_addr_expr (var);
8107 else
8109 if (omp_is_reference (ovar))
8111 type = TREE_TYPE (type);
8112 if (TREE_CODE (type) != ARRAY_TYPE)
8113 var = build_simple_mem_ref (var);
8114 var = fold_convert (TREE_TYPE (x), var);
8117 gimplify_assign (x, var, &ilist);
8118 s = size_int (0);
8119 purpose = size_int (map_idx++);
8120 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8121 gcc_checking_assert (tkind
8122 < (HOST_WIDE_INT_C (1U) << talign_shift));
8123 gcc_checking_assert (tkind
8124 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8125 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8126 build_int_cstu (tkind_type, tkind));
8127 break;
8130 gcc_assert (map_idx == map_cnt);
8132 DECL_INITIAL (TREE_VEC_ELT (t, 1))
8133 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8134 DECL_INITIAL (TREE_VEC_ELT (t, 2))
8135 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8136 for (int i = 1; i <= 2; i++)
8137 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8139 gimple_seq initlist = NULL;
8140 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8141 TREE_VEC_ELT (t, i)),
8142 &initlist, true, NULL_TREE);
8143 gimple_seq_add_seq (&ilist, initlist);
8145 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8146 NULL);
8147 TREE_THIS_VOLATILE (clobber) = 1;
8148 gimple_seq_add_stmt (&olist,
8149 gimple_build_assign (TREE_VEC_ELT (t, i),
8150 clobber));
8153 tree clobber = build_constructor (ctx->record_type, NULL);
8154 TREE_THIS_VOLATILE (clobber) = 1;
8155 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8156 clobber));
8159 /* Once all the expansions are done, sequence all the different
8160 fragments inside gimple_omp_body. */
8162 new_body = NULL;
8164 if (offloaded
8165 && ctx->record_type)
8167 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8168 /* fixup_child_record_type might have changed receiver_decl's type. */
8169 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8170 gimple_seq_add_stmt (&new_body,
8171 gimple_build_assign (ctx->receiver_decl, t));
8173 gimple_seq_add_seq (&new_body, fplist);
8175 if (offloaded || data_region)
8177 tree prev = NULL_TREE;
8178 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8179 switch (OMP_CLAUSE_CODE (c))
8181 tree var, x;
8182 default:
8183 break;
8184 case OMP_CLAUSE_FIRSTPRIVATE:
8185 if (is_gimple_omp_oacc (ctx->stmt))
8186 break;
8187 var = OMP_CLAUSE_DECL (c);
8188 if (omp_is_reference (var)
8189 || is_gimple_reg_type (TREE_TYPE (var)))
8191 tree new_var = lookup_decl (var, ctx);
8192 tree type;
8193 type = TREE_TYPE (var);
8194 if (omp_is_reference (var))
8195 type = TREE_TYPE (type);
8196 if ((INTEGRAL_TYPE_P (type)
8197 && TYPE_PRECISION (type) <= POINTER_SIZE)
8198 || TREE_CODE (type) == POINTER_TYPE)
8200 x = build_receiver_ref (var, false, ctx);
8201 if (TREE_CODE (type) != POINTER_TYPE)
8202 x = fold_convert (pointer_sized_int_node, x);
8203 x = fold_convert (type, x);
8204 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8205 fb_rvalue);
8206 if (omp_is_reference (var))
8208 tree v = create_tmp_var_raw (type, get_name (var));
8209 gimple_add_tmp_var (v);
8210 TREE_ADDRESSABLE (v) = 1;
8211 gimple_seq_add_stmt (&new_body,
8212 gimple_build_assign (v, x));
8213 x = build_fold_addr_expr (v);
8215 gimple_seq_add_stmt (&new_body,
8216 gimple_build_assign (new_var, x));
8218 else
8220 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8221 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8222 fb_rvalue);
8223 gimple_seq_add_stmt (&new_body,
8224 gimple_build_assign (new_var, x));
8227 else if (is_variable_sized (var))
8229 tree pvar = DECL_VALUE_EXPR (var);
8230 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8231 pvar = TREE_OPERAND (pvar, 0);
8232 gcc_assert (DECL_P (pvar));
8233 tree new_var = lookup_decl (pvar, ctx);
8234 x = build_receiver_ref (var, false, ctx);
8235 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8236 gimple_seq_add_stmt (&new_body,
8237 gimple_build_assign (new_var, x));
8239 break;
8240 case OMP_CLAUSE_PRIVATE:
8241 if (is_gimple_omp_oacc (ctx->stmt))
8242 break;
8243 var = OMP_CLAUSE_DECL (c);
8244 if (omp_is_reference (var))
8246 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8247 tree new_var = lookup_decl (var, ctx);
8248 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8249 if (TREE_CONSTANT (x))
8251 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8252 get_name (var));
8253 gimple_add_tmp_var (x);
8254 TREE_ADDRESSABLE (x) = 1;
8255 x = build_fold_addr_expr_loc (clause_loc, x);
8257 else
8258 break;
8260 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8261 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8262 gimple_seq_add_stmt (&new_body,
8263 gimple_build_assign (new_var, x));
8265 break;
8266 case OMP_CLAUSE_USE_DEVICE_PTR:
8267 case OMP_CLAUSE_IS_DEVICE_PTR:
8268 var = OMP_CLAUSE_DECL (c);
8269 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8270 x = build_sender_ref (var, ctx);
8271 else
8272 x = build_receiver_ref (var, false, ctx);
8273 if (is_variable_sized (var))
8275 tree pvar = DECL_VALUE_EXPR (var);
8276 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8277 pvar = TREE_OPERAND (pvar, 0);
8278 gcc_assert (DECL_P (pvar));
8279 tree new_var = lookup_decl (pvar, ctx);
8280 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8281 gimple_seq_add_stmt (&new_body,
8282 gimple_build_assign (new_var, x));
8284 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8286 tree new_var = lookup_decl (var, ctx);
8287 new_var = DECL_VALUE_EXPR (new_var);
8288 gcc_assert (TREE_CODE (new_var) == MEM_REF);
8289 new_var = TREE_OPERAND (new_var, 0);
8290 gcc_assert (DECL_P (new_var));
8291 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8292 gimple_seq_add_stmt (&new_body,
8293 gimple_build_assign (new_var, x));
8295 else
8297 tree type = TREE_TYPE (var);
8298 tree new_var = lookup_decl (var, ctx);
8299 if (omp_is_reference (var))
8301 type = TREE_TYPE (type);
8302 if (TREE_CODE (type) != ARRAY_TYPE)
8304 tree v = create_tmp_var_raw (type, get_name (var));
8305 gimple_add_tmp_var (v);
8306 TREE_ADDRESSABLE (v) = 1;
8307 x = fold_convert (type, x);
8308 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8309 fb_rvalue);
8310 gimple_seq_add_stmt (&new_body,
8311 gimple_build_assign (v, x));
8312 x = build_fold_addr_expr (v);
8315 new_var = DECL_VALUE_EXPR (new_var);
8316 x = fold_convert (TREE_TYPE (new_var), x);
8317 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8318 gimple_seq_add_stmt (&new_body,
8319 gimple_build_assign (new_var, x));
8321 break;
8323 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8324 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8325 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8326 or references to VLAs. */
8327 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8328 switch (OMP_CLAUSE_CODE (c))
8330 tree var;
8331 default:
8332 break;
8333 case OMP_CLAUSE_MAP:
8334 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8335 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8337 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8338 poly_int64 offset = 0;
8339 gcc_assert (prev);
8340 var = OMP_CLAUSE_DECL (c);
8341 if (DECL_P (var)
8342 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8343 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8344 ctx))
8345 && varpool_node::get_create (var)->offloadable)
8346 break;
8347 if (TREE_CODE (var) == INDIRECT_REF
8348 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8349 var = TREE_OPERAND (var, 0);
8350 if (TREE_CODE (var) == COMPONENT_REF)
8352 var = get_addr_base_and_unit_offset (var, &offset);
8353 gcc_assert (var != NULL_TREE && DECL_P (var));
8355 else if (DECL_SIZE (var)
8356 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8358 tree var2 = DECL_VALUE_EXPR (var);
8359 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8360 var2 = TREE_OPERAND (var2, 0);
8361 gcc_assert (DECL_P (var2));
8362 var = var2;
8364 tree new_var = lookup_decl (var, ctx), x;
8365 tree type = TREE_TYPE (new_var);
8366 bool is_ref;
8367 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8368 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8369 == COMPONENT_REF))
8371 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8372 is_ref = true;
8373 new_var = build2 (MEM_REF, type,
8374 build_fold_addr_expr (new_var),
8375 build_int_cst (build_pointer_type (type),
8376 offset));
8378 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8380 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8381 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8382 new_var = build2 (MEM_REF, type,
8383 build_fold_addr_expr (new_var),
8384 build_int_cst (build_pointer_type (type),
8385 offset));
8387 else
8388 is_ref = omp_is_reference (var);
8389 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8390 is_ref = false;
8391 bool ref_to_array = false;
8392 if (is_ref)
8394 type = TREE_TYPE (type);
8395 if (TREE_CODE (type) == ARRAY_TYPE)
8397 type = build_pointer_type (type);
8398 ref_to_array = true;
8401 else if (TREE_CODE (type) == ARRAY_TYPE)
8403 tree decl2 = DECL_VALUE_EXPR (new_var);
8404 gcc_assert (TREE_CODE (decl2) == MEM_REF);
8405 decl2 = TREE_OPERAND (decl2, 0);
8406 gcc_assert (DECL_P (decl2));
8407 new_var = decl2;
8408 type = TREE_TYPE (new_var);
8410 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8411 x = fold_convert_loc (clause_loc, type, x);
8412 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8414 tree bias = OMP_CLAUSE_SIZE (c);
8415 if (DECL_P (bias))
8416 bias = lookup_decl (bias, ctx);
8417 bias = fold_convert_loc (clause_loc, sizetype, bias);
8418 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8419 bias);
8420 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8421 TREE_TYPE (x), x, bias);
8423 if (ref_to_array)
8424 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8425 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8426 if (is_ref && !ref_to_array)
8428 tree t = create_tmp_var_raw (type, get_name (var));
8429 gimple_add_tmp_var (t);
8430 TREE_ADDRESSABLE (t) = 1;
8431 gimple_seq_add_stmt (&new_body,
8432 gimple_build_assign (t, x));
8433 x = build_fold_addr_expr_loc (clause_loc, t);
8435 gimple_seq_add_stmt (&new_body,
8436 gimple_build_assign (new_var, x));
8437 prev = NULL_TREE;
8439 else if (OMP_CLAUSE_CHAIN (c)
8440 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8441 == OMP_CLAUSE_MAP
8442 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8443 == GOMP_MAP_FIRSTPRIVATE_POINTER
8444 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8445 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8446 prev = c;
8447 break;
8448 case OMP_CLAUSE_PRIVATE:
8449 var = OMP_CLAUSE_DECL (c);
8450 if (is_variable_sized (var))
8452 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8453 tree new_var = lookup_decl (var, ctx);
8454 tree pvar = DECL_VALUE_EXPR (var);
8455 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8456 pvar = TREE_OPERAND (pvar, 0);
8457 gcc_assert (DECL_P (pvar));
8458 tree new_pvar = lookup_decl (pvar, ctx);
8459 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8460 tree al = size_int (DECL_ALIGN (var));
8461 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8462 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8463 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8464 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8465 gimple_seq_add_stmt (&new_body,
8466 gimple_build_assign (new_pvar, x));
8468 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8470 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8471 tree new_var = lookup_decl (var, ctx);
8472 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8473 if (TREE_CONSTANT (x))
8474 break;
8475 else
8477 tree atmp
8478 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8479 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8480 tree al = size_int (TYPE_ALIGN (rtype));
8481 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8484 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8485 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8486 gimple_seq_add_stmt (&new_body,
8487 gimple_build_assign (new_var, x));
8489 break;
8492 gimple_seq fork_seq = NULL;
8493 gimple_seq join_seq = NULL;
8495 if (is_oacc_parallel (ctx))
8497 /* If there are reductions on the offloaded region itself, treat
8498 them as a dummy GANG loop. */
8499 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8501 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8502 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8505 gimple_seq_add_seq (&new_body, fork_seq);
8506 gimple_seq_add_seq (&new_body, tgt_body);
8507 gimple_seq_add_seq (&new_body, join_seq);
8509 if (offloaded)
8510 new_body = maybe_catch_exception (new_body);
8512 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8513 gimple_omp_set_body (stmt, new_body);
8516 bind = gimple_build_bind (NULL, NULL,
8517 tgt_bind ? gimple_bind_block (tgt_bind)
8518 : NULL_TREE);
8519 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8520 gimple_bind_add_seq (bind, ilist);
8521 gimple_bind_add_stmt (bind, stmt);
8522 gimple_bind_add_seq (bind, olist);
8524 pop_gimplify_context (NULL);
8526 if (dep_bind)
8528 gimple_bind_add_seq (dep_bind, dep_ilist);
8529 gimple_bind_add_stmt (dep_bind, bind);
8530 gimple_bind_add_seq (dep_bind, dep_olist);
8531 pop_gimplify_context (dep_bind);
8535 /* Expand code for an OpenMP teams directive. */
8537 static void
8538 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8540 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8541 push_gimplify_context ();
8543 tree block = make_node (BLOCK);
8544 gbind *bind = gimple_build_bind (NULL, NULL, block);
8545 gsi_replace (gsi_p, bind, true);
8546 gimple_seq bind_body = NULL;
8547 gimple_seq dlist = NULL;
8548 gimple_seq olist = NULL;
8550 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8551 OMP_CLAUSE_NUM_TEAMS);
8552 if (num_teams == NULL_TREE)
8553 num_teams = build_int_cst (unsigned_type_node, 0);
8554 else
8556 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8557 num_teams = fold_convert (unsigned_type_node, num_teams);
8558 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8560 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8561 OMP_CLAUSE_THREAD_LIMIT);
8562 if (thread_limit == NULL_TREE)
8563 thread_limit = build_int_cst (unsigned_type_node, 0);
8564 else
8566 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8567 thread_limit = fold_convert (unsigned_type_node, thread_limit);
8568 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8569 fb_rvalue);
8572 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8573 &bind_body, &dlist, ctx, NULL);
8574 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8575 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8576 if (!gimple_omp_teams_grid_phony (teams_stmt))
8578 gimple_seq_add_stmt (&bind_body, teams_stmt);
8579 location_t loc = gimple_location (teams_stmt);
8580 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8581 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8582 gimple_set_location (call, loc);
8583 gimple_seq_add_stmt (&bind_body, call);
8586 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8587 gimple_omp_set_body (teams_stmt, NULL);
8588 gimple_seq_add_seq (&bind_body, olist);
8589 gimple_seq_add_seq (&bind_body, dlist);
8590 if (!gimple_omp_teams_grid_phony (teams_stmt))
8591 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8592 gimple_bind_set_body (bind, bind_body);
8594 pop_gimplify_context (bind);
8596 gimple_bind_append_vars (bind, ctx->block_vars);
8597 BLOCK_VARS (block) = ctx->block_vars;
8598 if (BLOCK_VARS (block))
8599 TREE_USED (block) = 1;
8602 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8604 static void
8605 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8607 gimple *stmt = gsi_stmt (*gsi_p);
8608 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8609 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8610 gimple_build_omp_return (false));
8614 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8615 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8616 of OMP context, but with task_shared_vars set. */
8618 static tree
8619 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8620 void *data)
8622 tree t = *tp;
8624 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8625 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8626 return t;
8628 if (task_shared_vars
8629 && DECL_P (t)
8630 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8631 return t;
8633 /* If a global variable has been privatized, TREE_CONSTANT on
8634 ADDR_EXPR might be wrong. */
8635 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8636 recompute_tree_invariant_for_addr_expr (t);
8638 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8639 return NULL_TREE;
8642 /* Data to be communicated between lower_omp_regimplify_operands and
8643 lower_omp_regimplify_operands_p. */
8645 struct lower_omp_regimplify_operands_data
8647 omp_context *ctx;
8648 vec<tree> *decls;
8651 /* Helper function for lower_omp_regimplify_operands. Find
8652 omp_member_access_dummy_var vars and adjust temporarily their
8653 DECL_VALUE_EXPRs if needed. */
8655 static tree
8656 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8657 void *data)
8659 tree t = omp_member_access_dummy_var (*tp);
8660 if (t)
8662 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8663 lower_omp_regimplify_operands_data *ldata
8664 = (lower_omp_regimplify_operands_data *) wi->info;
8665 tree o = maybe_lookup_decl (t, ldata->ctx);
8666 if (o != t)
8668 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8669 ldata->decls->safe_push (*tp);
8670 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8671 SET_DECL_VALUE_EXPR (*tp, v);
8674 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8675 return NULL_TREE;
8678 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8679 of omp_member_access_dummy_var vars during regimplification. */
8681 static void
8682 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8683 gimple_stmt_iterator *gsi_p)
8685 auto_vec<tree, 10> decls;
8686 if (ctx)
8688 struct walk_stmt_info wi;
8689 memset (&wi, '\0', sizeof (wi));
8690 struct lower_omp_regimplify_operands_data data;
8691 data.ctx = ctx;
8692 data.decls = &decls;
8693 wi.info = &data;
8694 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8696 gimple_regimplify_operands (stmt, gsi_p);
8697 while (!decls.is_empty ())
8699 tree t = decls.pop ();
8700 tree v = decls.pop ();
8701 SET_DECL_VALUE_EXPR (t, v);
8705 static void
8706 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8708 gimple *stmt = gsi_stmt (*gsi_p);
8709 struct walk_stmt_info wi;
8710 gcall *call_stmt;
8712 if (gimple_has_location (stmt))
8713 input_location = gimple_location (stmt);
8715 if (task_shared_vars)
8716 memset (&wi, '\0', sizeof (wi));
8718 /* If we have issued syntax errors, avoid doing any heavy lifting.
8719 Just replace the OMP directives with a NOP to avoid
8720 confusing RTL expansion. */
8721 if (seen_error () && is_gimple_omp (stmt))
8723 gsi_replace (gsi_p, gimple_build_nop (), true);
8724 return;
8727 switch (gimple_code (stmt))
8729 case GIMPLE_COND:
8731 gcond *cond_stmt = as_a <gcond *> (stmt);
8732 if ((ctx || task_shared_vars)
8733 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8734 lower_omp_regimplify_p,
8735 ctx ? NULL : &wi, NULL)
8736 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8737 lower_omp_regimplify_p,
8738 ctx ? NULL : &wi, NULL)))
8739 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8741 break;
8742 case GIMPLE_CATCH:
8743 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8744 break;
8745 case GIMPLE_EH_FILTER:
8746 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8747 break;
8748 case GIMPLE_TRY:
8749 lower_omp (gimple_try_eval_ptr (stmt), ctx);
8750 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8751 break;
8752 case GIMPLE_TRANSACTION:
8753 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8754 ctx);
8755 break;
8756 case GIMPLE_BIND:
8757 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8758 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
8759 break;
8760 case GIMPLE_OMP_PARALLEL:
8761 case GIMPLE_OMP_TASK:
8762 ctx = maybe_lookup_ctx (stmt);
8763 gcc_assert (ctx);
8764 if (ctx->cancellable)
8765 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8766 lower_omp_taskreg (gsi_p, ctx);
8767 break;
8768 case GIMPLE_OMP_FOR:
8769 ctx = maybe_lookup_ctx (stmt);
8770 gcc_assert (ctx);
8771 if (ctx->cancellable)
8772 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8773 lower_omp_for (gsi_p, ctx);
8774 break;
8775 case GIMPLE_OMP_SECTIONS:
8776 ctx = maybe_lookup_ctx (stmt);
8777 gcc_assert (ctx);
8778 if (ctx->cancellable)
8779 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8780 lower_omp_sections (gsi_p, ctx);
8781 break;
8782 case GIMPLE_OMP_SINGLE:
8783 ctx = maybe_lookup_ctx (stmt);
8784 gcc_assert (ctx);
8785 lower_omp_single (gsi_p, ctx);
8786 break;
8787 case GIMPLE_OMP_MASTER:
8788 ctx = maybe_lookup_ctx (stmt);
8789 gcc_assert (ctx);
8790 lower_omp_master (gsi_p, ctx);
8791 break;
8792 case GIMPLE_OMP_TASKGROUP:
8793 ctx = maybe_lookup_ctx (stmt);
8794 gcc_assert (ctx);
8795 lower_omp_taskgroup (gsi_p, ctx);
8796 break;
8797 case GIMPLE_OMP_ORDERED:
8798 ctx = maybe_lookup_ctx (stmt);
8799 gcc_assert (ctx);
8800 lower_omp_ordered (gsi_p, ctx);
8801 break;
8802 case GIMPLE_OMP_CRITICAL:
8803 ctx = maybe_lookup_ctx (stmt);
8804 gcc_assert (ctx);
8805 lower_omp_critical (gsi_p, ctx);
8806 break;
8807 case GIMPLE_OMP_ATOMIC_LOAD:
8808 if ((ctx || task_shared_vars)
8809 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8810 as_a <gomp_atomic_load *> (stmt)),
8811 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8812 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8813 break;
8814 case GIMPLE_OMP_TARGET:
8815 ctx = maybe_lookup_ctx (stmt);
8816 gcc_assert (ctx);
8817 lower_omp_target (gsi_p, ctx);
8818 break;
8819 case GIMPLE_OMP_TEAMS:
8820 ctx = maybe_lookup_ctx (stmt);
8821 gcc_assert (ctx);
8822 lower_omp_teams (gsi_p, ctx);
8823 break;
8824 case GIMPLE_OMP_GRID_BODY:
8825 ctx = maybe_lookup_ctx (stmt);
8826 gcc_assert (ctx);
8827 lower_omp_grid_body (gsi_p, ctx);
8828 break;
8829 case GIMPLE_CALL:
8830 tree fndecl;
8831 call_stmt = as_a <gcall *> (stmt);
8832 fndecl = gimple_call_fndecl (call_stmt);
8833 if (fndecl
8834 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8835 switch (DECL_FUNCTION_CODE (fndecl))
8837 case BUILT_IN_GOMP_BARRIER:
8838 if (ctx == NULL)
8839 break;
8840 /* FALLTHRU */
8841 case BUILT_IN_GOMP_CANCEL:
8842 case BUILT_IN_GOMP_CANCELLATION_POINT:
8843 omp_context *cctx;
8844 cctx = ctx;
8845 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8846 cctx = cctx->outer;
8847 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8848 if (!cctx->cancellable)
8850 if (DECL_FUNCTION_CODE (fndecl)
8851 == BUILT_IN_GOMP_CANCELLATION_POINT)
8853 stmt = gimple_build_nop ();
8854 gsi_replace (gsi_p, stmt, false);
8856 break;
8858 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8860 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8861 gimple_call_set_fndecl (call_stmt, fndecl);
8862 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8864 tree lhs;
8865 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8866 gimple_call_set_lhs (call_stmt, lhs);
8867 tree fallthru_label;
8868 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8869 gimple *g;
8870 g = gimple_build_label (fallthru_label);
8871 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8872 g = gimple_build_cond (NE_EXPR, lhs,
8873 fold_convert (TREE_TYPE (lhs),
8874 boolean_false_node),
8875 cctx->cancel_label, fallthru_label);
8876 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8877 break;
8878 default:
8879 break;
8881 /* FALLTHRU */
8882 default:
8883 if ((ctx || task_shared_vars)
8884 && walk_gimple_op (stmt, lower_omp_regimplify_p,
8885 ctx ? NULL : &wi))
8887 /* Just remove clobbers, this should happen only if we have
8888 "privatized" local addressable variables in SIMD regions,
8889 the clobber isn't needed in that case and gimplifying address
8890 of the ARRAY_REF into a pointer and creating MEM_REF based
8891 clobber would create worse code than we get with the clobber
8892 dropped. */
8893 if (gimple_clobber_p (stmt))
8895 gsi_replace (gsi_p, gimple_build_nop (), true);
8896 break;
8898 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8900 break;
8904 static void
8905 lower_omp (gimple_seq *body, omp_context *ctx)
8907 location_t saved_location = input_location;
8908 gimple_stmt_iterator gsi;
8909 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8910 lower_omp_1 (&gsi, ctx);
8911 /* During gimplification, we haven't folded statments inside offloading
8912 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8913 if (target_nesting_level || taskreg_nesting_level)
8914 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8915 fold_stmt (&gsi);
8916 input_location = saved_location;
8919 /* Main entry point. */
8921 static unsigned int
8922 execute_lower_omp (void)
8924 gimple_seq body;
8925 int i;
8926 omp_context *ctx;
8928 /* This pass always runs, to provide PROP_gimple_lomp.
8929 But often, there is nothing to do. */
8930 if (flag_openacc == 0 && flag_openmp == 0
8931 && flag_openmp_simd == 0)
8932 return 0;
8934 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
8935 delete_omp_context);
8937 body = gimple_body (current_function_decl);
8939 if (hsa_gen_requested_p ())
8940 omp_grid_gridify_all_targets (&body);
8942 scan_omp (&body, NULL);
8943 gcc_assert (taskreg_nesting_level == 0);
8944 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
8945 finish_taskreg_scan (ctx);
8946 taskreg_contexts.release ();
8948 if (all_contexts->root)
8950 if (task_shared_vars)
8951 push_gimplify_context ();
8952 lower_omp (&body, NULL);
8953 if (task_shared_vars)
8954 pop_gimplify_context (NULL);
8957 if (all_contexts)
8959 splay_tree_delete (all_contexts);
8960 all_contexts = NULL;
8962 BITMAP_FREE (task_shared_vars);
8964 /* If current function is a method, remove artificial dummy VAR_DECL created
8965 for non-static data member privatization, they aren't needed for
8966 debuginfo nor anything else, have been already replaced everywhere in the
8967 IL and cause problems with LTO. */
8968 if (DECL_ARGUMENTS (current_function_decl)
8969 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
8970 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
8971 == POINTER_TYPE))
8972 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
8973 return 0;
8976 namespace {
8978 const pass_data pass_data_lower_omp =
8980 GIMPLE_PASS, /* type */
8981 "omplower", /* name */
8982 OPTGROUP_OMP, /* optinfo_flags */
8983 TV_NONE, /* tv_id */
8984 PROP_gimple_any, /* properties_required */
8985 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
8986 0, /* properties_destroyed */
8987 0, /* todo_flags_start */
8988 0, /* todo_flags_finish */
8991 class pass_lower_omp : public gimple_opt_pass
8993 public:
8994 pass_lower_omp (gcc::context *ctxt)
8995 : gimple_opt_pass (pass_data_lower_omp, ctxt)
8998 /* opt_pass methods: */
8999 virtual unsigned int execute (function *) { return execute_lower_omp (); }
9001 }; // class pass_lower_omp
9003 } // anon namespace
9005 gimple_opt_pass *
9006 make_pass_lower_omp (gcc::context *ctxt)
9008 return new pass_lower_omp (ctxt);
9011 /* The following is a utility to diagnose structured block violations.
9012 It is not part of the "omplower" pass, as that's invoked too late. It
9013 should be invoked by the respective front ends after gimplification. */
9015 static splay_tree all_labels;
9017 /* Check for mismatched contexts and generate an error if needed. Return
9018 true if an error is detected. */
9020 static bool
9021 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9022 gimple *branch_ctx, gimple *label_ctx)
9024 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9025 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9027 if (label_ctx == branch_ctx)
9028 return false;
9030 const char* kind = NULL;
9032 if (flag_openacc)
9034 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9035 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9037 gcc_checking_assert (kind == NULL);
9038 kind = "OpenACC";
9041 if (kind == NULL)
9043 gcc_checking_assert (flag_openmp || flag_openmp_simd);
9044 kind = "OpenMP";
9047 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9048 so we could traverse it and issue a correct "exit" or "enter" error
9049 message upon a structured block violation.
9051 We built the context by building a list with tree_cons'ing, but there is
9052 no easy counterpart in gimple tuples. It seems like far too much work
9053 for issuing exit/enter error messages. If someone really misses the
9054 distinct error message... patches welcome. */
9056 #if 0
9057 /* Try to avoid confusing the user by producing and error message
9058 with correct "exit" or "enter" verbiage. We prefer "exit"
9059 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9060 if (branch_ctx == NULL)
9061 exit_p = false;
9062 else
9064 while (label_ctx)
9066 if (TREE_VALUE (label_ctx) == branch_ctx)
9068 exit_p = false;
9069 break;
9071 label_ctx = TREE_CHAIN (label_ctx);
9075 if (exit_p)
9076 error ("invalid exit from %s structured block", kind);
9077 else
9078 error ("invalid entry to %s structured block", kind);
9079 #endif
9081 /* If it's obvious we have an invalid entry, be specific about the error. */
9082 if (branch_ctx == NULL)
9083 error ("invalid entry to %s structured block", kind);
9084 else
9086 /* Otherwise, be vague and lazy, but efficient. */
9087 error ("invalid branch to/from %s structured block", kind);
9090 gsi_replace (gsi_p, gimple_build_nop (), false);
9091 return true;
9094 /* Pass 1: Create a minimal tree of structured blocks, and record
9095 where each label is found. */
9097 static tree
9098 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9099 struct walk_stmt_info *wi)
9101 gimple *context = (gimple *) wi->info;
9102 gimple *inner_context;
9103 gimple *stmt = gsi_stmt (*gsi_p);
9105 *handled_ops_p = true;
9107 switch (gimple_code (stmt))
9109 WALK_SUBSTMTS;
9111 case GIMPLE_OMP_PARALLEL:
9112 case GIMPLE_OMP_TASK:
9113 case GIMPLE_OMP_SECTIONS:
9114 case GIMPLE_OMP_SINGLE:
9115 case GIMPLE_OMP_SECTION:
9116 case GIMPLE_OMP_MASTER:
9117 case GIMPLE_OMP_ORDERED:
9118 case GIMPLE_OMP_CRITICAL:
9119 case GIMPLE_OMP_TARGET:
9120 case GIMPLE_OMP_TEAMS:
9121 case GIMPLE_OMP_TASKGROUP:
9122 /* The minimal context here is just the current OMP construct. */
9123 inner_context = stmt;
9124 wi->info = inner_context;
9125 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9126 wi->info = context;
9127 break;
9129 case GIMPLE_OMP_FOR:
9130 inner_context = stmt;
9131 wi->info = inner_context;
9132 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9133 walk them. */
9134 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9135 diagnose_sb_1, NULL, wi);
9136 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9137 wi->info = context;
9138 break;
9140 case GIMPLE_LABEL:
9141 splay_tree_insert (all_labels,
9142 (splay_tree_key) gimple_label_label (
9143 as_a <glabel *> (stmt)),
9144 (splay_tree_value) context);
9145 break;
9147 default:
9148 break;
9151 return NULL_TREE;
9154 /* Pass 2: Check each branch and see if its context differs from that of
9155 the destination label's context. */
9157 static tree
9158 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9159 struct walk_stmt_info *wi)
9161 gimple *context = (gimple *) wi->info;
9162 splay_tree_node n;
9163 gimple *stmt = gsi_stmt (*gsi_p);
9165 *handled_ops_p = true;
9167 switch (gimple_code (stmt))
9169 WALK_SUBSTMTS;
9171 case GIMPLE_OMP_PARALLEL:
9172 case GIMPLE_OMP_TASK:
9173 case GIMPLE_OMP_SECTIONS:
9174 case GIMPLE_OMP_SINGLE:
9175 case GIMPLE_OMP_SECTION:
9176 case GIMPLE_OMP_MASTER:
9177 case GIMPLE_OMP_ORDERED:
9178 case GIMPLE_OMP_CRITICAL:
9179 case GIMPLE_OMP_TARGET:
9180 case GIMPLE_OMP_TEAMS:
9181 case GIMPLE_OMP_TASKGROUP:
9182 wi->info = stmt;
9183 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9184 wi->info = context;
9185 break;
9187 case GIMPLE_OMP_FOR:
9188 wi->info = stmt;
9189 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9190 walk them. */
9191 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9192 diagnose_sb_2, NULL, wi);
9193 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9194 wi->info = context;
9195 break;
9197 case GIMPLE_COND:
9199 gcond *cond_stmt = as_a <gcond *> (stmt);
9200 tree lab = gimple_cond_true_label (cond_stmt);
9201 if (lab)
9203 n = splay_tree_lookup (all_labels,
9204 (splay_tree_key) lab);
9205 diagnose_sb_0 (gsi_p, context,
9206 n ? (gimple *) n->value : NULL);
9208 lab = gimple_cond_false_label (cond_stmt);
9209 if (lab)
9211 n = splay_tree_lookup (all_labels,
9212 (splay_tree_key) lab);
9213 diagnose_sb_0 (gsi_p, context,
9214 n ? (gimple *) n->value : NULL);
9217 break;
9219 case GIMPLE_GOTO:
9221 tree lab = gimple_goto_dest (stmt);
9222 if (TREE_CODE (lab) != LABEL_DECL)
9223 break;
9225 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9226 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9228 break;
9230 case GIMPLE_SWITCH:
9232 gswitch *switch_stmt = as_a <gswitch *> (stmt);
9233 unsigned int i;
9234 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9236 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9237 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9238 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9239 break;
9242 break;
9244 case GIMPLE_RETURN:
9245 diagnose_sb_0 (gsi_p, context, NULL);
9246 break;
9248 default:
9249 break;
9252 return NULL_TREE;
9255 static unsigned int
9256 diagnose_omp_structured_block_errors (void)
9258 struct walk_stmt_info wi;
9259 gimple_seq body = gimple_body (current_function_decl);
9261 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9263 memset (&wi, 0, sizeof (wi));
9264 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9266 memset (&wi, 0, sizeof (wi));
9267 wi.want_locations = true;
9268 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9270 gimple_set_body (current_function_decl, body);
9272 splay_tree_delete (all_labels);
9273 all_labels = NULL;
9275 return 0;
9278 namespace {
9280 const pass_data pass_data_diagnose_omp_blocks =
9282 GIMPLE_PASS, /* type */
9283 "*diagnose_omp_blocks", /* name */
9284 OPTGROUP_OMP, /* optinfo_flags */
9285 TV_NONE, /* tv_id */
9286 PROP_gimple_any, /* properties_required */
9287 0, /* properties_provided */
9288 0, /* properties_destroyed */
9289 0, /* todo_flags_start */
9290 0, /* todo_flags_finish */
9293 class pass_diagnose_omp_blocks : public gimple_opt_pass
9295 public:
9296 pass_diagnose_omp_blocks (gcc::context *ctxt)
9297 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9300 /* opt_pass methods: */
9301 virtual bool gate (function *)
9303 return flag_openacc || flag_openmp || flag_openmp_simd;
9305 virtual unsigned int execute (function *)
9307 return diagnose_omp_structured_block_errors ();
9310 }; // class pass_diagnose_omp_blocks
9312 } // anon namespace
9314 gimple_opt_pass *
9315 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9317 return new pass_diagnose_omp_blocks (ctxt);
9321 #include "gt-omp-low.h"