* tree-ssa-dse.c (compute_trims): Avoid folding away undefined
[official-gcc.git] / gcc / omp-low.c
blob843c66fd22199b3b77737030c5f489d7c2f1c482
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2018 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* Nesting depth of this context. Used to beautify error messages re
118 invalid gotos. The outermost ctx is depth 1, with depth 0 being
119 reserved for the main body of the function. */
120 int depth;
122 /* True if this parallel directive is nested within another. */
123 bool is_nested;
125 /* True if this construct can be cancelled. */
126 bool cancellable;
129 static splay_tree all_contexts;
130 static int taskreg_nesting_level;
131 static int target_nesting_level;
132 static bitmap task_shared_vars;
133 static vec<omp_context *> taskreg_contexts;
135 static void scan_omp (gimple_seq *, omp_context *);
136 static tree scan_omp_1_op (tree *, int *, void *);
138 #define WALK_SUBSTMTS \
139 case GIMPLE_BIND: \
140 case GIMPLE_TRY: \
141 case GIMPLE_CATCH: \
142 case GIMPLE_EH_FILTER: \
143 case GIMPLE_TRANSACTION: \
144 /* The sub-statements for these should be walked. */ \
145 *handled_ops_p = false; \
146 break;
148 /* Return true if CTX corresponds to an oacc parallel region. */
150 static bool
151 is_oacc_parallel (omp_context *ctx)
153 enum gimple_code outer_type = gimple_code (ctx->stmt);
154 return ((outer_type == GIMPLE_OMP_TARGET)
155 && (gimple_omp_target_kind (ctx->stmt)
156 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
159 /* Return true if CTX corresponds to an oacc kernels region. */
161 static bool
162 is_oacc_kernels (omp_context *ctx)
164 enum gimple_code outer_type = gimple_code (ctx->stmt);
165 return ((outer_type == GIMPLE_OMP_TARGET)
166 && (gimple_omp_target_kind (ctx->stmt)
167 == GF_OMP_TARGET_KIND_OACC_KERNELS));
170 /* If DECL is the artificial dummy VAR_DECL created for non-static
171 data member privatization, return the underlying "this" parameter,
172 otherwise return NULL. */
174 tree
175 omp_member_access_dummy_var (tree decl)
177 if (!VAR_P (decl)
178 || !DECL_ARTIFICIAL (decl)
179 || !DECL_IGNORED_P (decl)
180 || !DECL_HAS_VALUE_EXPR_P (decl)
181 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
182 return NULL_TREE;
184 tree v = DECL_VALUE_EXPR (decl);
185 if (TREE_CODE (v) != COMPONENT_REF)
186 return NULL_TREE;
188 while (1)
189 switch (TREE_CODE (v))
191 case COMPONENT_REF:
192 case MEM_REF:
193 case INDIRECT_REF:
194 CASE_CONVERT:
195 case POINTER_PLUS_EXPR:
196 v = TREE_OPERAND (v, 0);
197 continue;
198 case PARM_DECL:
199 if (DECL_CONTEXT (v) == current_function_decl
200 && DECL_ARTIFICIAL (v)
201 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
202 return v;
203 return NULL_TREE;
204 default:
205 return NULL_TREE;
209 /* Helper for unshare_and_remap, called through walk_tree. */
211 static tree
212 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
214 tree *pair = (tree *) data;
215 if (*tp == pair[0])
217 *tp = unshare_expr (pair[1]);
218 *walk_subtrees = 0;
220 else if (IS_TYPE_OR_DECL_P (*tp))
221 *walk_subtrees = 0;
222 return NULL_TREE;
225 /* Return unshare_expr (X) with all occurrences of FROM
226 replaced with TO. */
228 static tree
229 unshare_and_remap (tree x, tree from, tree to)
231 tree pair[2] = { from, to };
232 x = unshare_expr (x);
233 walk_tree (&x, unshare_and_remap_1, pair, NULL);
234 return x;
237 /* Convenience function for calling scan_omp_1_op on tree operands. */
239 static inline tree
240 scan_omp_op (tree *tp, omp_context *ctx)
242 struct walk_stmt_info wi;
244 memset (&wi, 0, sizeof (wi));
245 wi.info = ctx;
246 wi.want_locations = true;
248 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
251 static void lower_omp (gimple_seq *, omp_context *);
252 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
253 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
255 /* Return true if CTX is for an omp parallel. */
257 static inline bool
258 is_parallel_ctx (omp_context *ctx)
260 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
264 /* Return true if CTX is for an omp task. */
266 static inline bool
267 is_task_ctx (omp_context *ctx)
269 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
273 /* Return true if CTX is for an omp taskloop. */
275 static inline bool
276 is_taskloop_ctx (omp_context *ctx)
278 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
279 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
283 /* Return true if CTX is for an omp parallel or omp task. */
285 static inline bool
286 is_taskreg_ctx (omp_context *ctx)
288 return is_parallel_ctx (ctx) || is_task_ctx (ctx);
291 /* Return true if EXPR is variable sized. */
293 static inline bool
294 is_variable_sized (const_tree expr)
296 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
299 /* Lookup variables. The "maybe" form
300 allows for the variable form to not have been entered, otherwise we
301 assert that the variable must have been entered. */
303 static inline tree
304 lookup_decl (tree var, omp_context *ctx)
306 tree *n = ctx->cb.decl_map->get (var);
307 return *n;
310 static inline tree
311 maybe_lookup_decl (const_tree var, omp_context *ctx)
313 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
314 return n ? *n : NULL_TREE;
317 static inline tree
318 lookup_field (tree var, omp_context *ctx)
320 splay_tree_node n;
321 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
322 return (tree) n->value;
325 static inline tree
326 lookup_sfield (splay_tree_key key, omp_context *ctx)
328 splay_tree_node n;
329 n = splay_tree_lookup (ctx->sfield_map
330 ? ctx->sfield_map : ctx->field_map, key);
331 return (tree) n->value;
334 static inline tree
335 lookup_sfield (tree var, omp_context *ctx)
337 return lookup_sfield ((splay_tree_key) var, ctx);
340 static inline tree
341 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
343 splay_tree_node n;
344 n = splay_tree_lookup (ctx->field_map, key);
345 return n ? (tree) n->value : NULL_TREE;
348 static inline tree
349 maybe_lookup_field (tree var, omp_context *ctx)
351 return maybe_lookup_field ((splay_tree_key) var, ctx);
354 /* Return true if DECL should be copied by pointer. SHARED_CTX is
355 the parallel context if DECL is to be shared. */
357 static bool
358 use_pointer_for_field (tree decl, omp_context *shared_ctx)
360 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
361 || TYPE_ATOMIC (TREE_TYPE (decl)))
362 return true;
364 /* We can only use copy-in/copy-out semantics for shared variables
365 when we know the value is not accessible from an outer scope. */
366 if (shared_ctx)
368 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
370 /* ??? Trivially accessible from anywhere. But why would we even
371 be passing an address in this case? Should we simply assert
372 this to be false, or should we have a cleanup pass that removes
373 these from the list of mappings? */
374 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
375 return true;
377 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
378 without analyzing the expression whether or not its location
379 is accessible to anyone else. In the case of nested parallel
380 regions it certainly may be. */
381 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
382 return true;
384 /* Do not use copy-in/copy-out for variables that have their
385 address taken. */
386 if (TREE_ADDRESSABLE (decl))
387 return true;
389 /* lower_send_shared_vars only uses copy-in, but not copy-out
390 for these. */
391 if (TREE_READONLY (decl)
392 || ((TREE_CODE (decl) == RESULT_DECL
393 || TREE_CODE (decl) == PARM_DECL)
394 && DECL_BY_REFERENCE (decl)))
395 return false;
397 /* Disallow copy-in/out in nested parallel if
398 decl is shared in outer parallel, otherwise
399 each thread could store the shared variable
400 in its own copy-in location, making the
401 variable no longer really shared. */
402 if (shared_ctx->is_nested)
404 omp_context *up;
406 for (up = shared_ctx->outer; up; up = up->outer)
407 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
408 break;
410 if (up)
412 tree c;
414 for (c = gimple_omp_taskreg_clauses (up->stmt);
415 c; c = OMP_CLAUSE_CHAIN (c))
416 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
417 && OMP_CLAUSE_DECL (c) == decl)
418 break;
420 if (c)
421 goto maybe_mark_addressable_and_ret;
425 /* For tasks avoid using copy-in/out. As tasks can be
426 deferred or executed in different thread, when GOMP_task
427 returns, the task hasn't necessarily terminated. */
428 if (is_task_ctx (shared_ctx))
430 tree outer;
431 maybe_mark_addressable_and_ret:
432 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
433 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
435 /* Taking address of OUTER in lower_send_shared_vars
436 might need regimplification of everything that uses the
437 variable. */
438 if (!task_shared_vars)
439 task_shared_vars = BITMAP_ALLOC (NULL);
440 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
441 TREE_ADDRESSABLE (outer) = 1;
443 return true;
447 return false;
450 /* Construct a new automatic decl similar to VAR. */
452 static tree
453 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
455 tree copy = copy_var_decl (var, name, type);
457 DECL_CONTEXT (copy) = current_function_decl;
458 DECL_CHAIN (copy) = ctx->block_vars;
459 /* If VAR is listed in task_shared_vars, it means it wasn't
460 originally addressable and is just because task needs to take
461 it's address. But we don't need to take address of privatizations
462 from that var. */
463 if (TREE_ADDRESSABLE (var)
464 && task_shared_vars
465 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
466 TREE_ADDRESSABLE (copy) = 0;
467 ctx->block_vars = copy;
469 return copy;
472 static tree
473 omp_copy_decl_1 (tree var, omp_context *ctx)
475 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
478 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
479 as appropriate. */
480 static tree
481 omp_build_component_ref (tree obj, tree field)
483 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
484 if (TREE_THIS_VOLATILE (field))
485 TREE_THIS_VOLATILE (ret) |= 1;
486 if (TREE_READONLY (field))
487 TREE_READONLY (ret) |= 1;
488 return ret;
491 /* Build tree nodes to access the field for VAR on the receiver side. */
493 static tree
494 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
496 tree x, field = lookup_field (var, ctx);
498 /* If the receiver record type was remapped in the child function,
499 remap the field into the new record type. */
500 x = maybe_lookup_field (field, ctx);
501 if (x != NULL)
502 field = x;
504 x = build_simple_mem_ref (ctx->receiver_decl);
505 TREE_THIS_NOTRAP (x) = 1;
506 x = omp_build_component_ref (x, field);
507 if (by_ref)
509 x = build_simple_mem_ref (x);
510 TREE_THIS_NOTRAP (x) = 1;
513 return x;
516 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
517 of a parallel, this is a component reference; for workshare constructs
518 this is some variable. */
520 static tree
521 build_outer_var_ref (tree var, omp_context *ctx,
522 enum omp_clause_code code = OMP_CLAUSE_ERROR)
524 tree x;
526 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
527 x = var;
528 else if (is_variable_sized (var))
530 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
531 x = build_outer_var_ref (x, ctx, code);
532 x = build_simple_mem_ref (x);
534 else if (is_taskreg_ctx (ctx))
536 bool by_ref = use_pointer_for_field (var, NULL);
537 x = build_receiver_ref (var, by_ref, ctx);
539 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
540 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
541 || (code == OMP_CLAUSE_PRIVATE
542 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
543 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
544 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
546 /* #pragma omp simd isn't a worksharing construct, and can reference
547 even private vars in its linear etc. clauses.
548 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
549 to private vars in all worksharing constructs. */
550 x = NULL_TREE;
551 if (ctx->outer && is_taskreg_ctx (ctx))
552 x = lookup_decl (var, ctx->outer);
553 else if (ctx->outer)
554 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
555 if (x == NULL_TREE)
556 x = var;
558 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
560 gcc_assert (ctx->outer);
561 splay_tree_node n
562 = splay_tree_lookup (ctx->outer->field_map,
563 (splay_tree_key) &DECL_UID (var));
564 if (n == NULL)
566 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
567 x = var;
568 else
569 x = lookup_decl (var, ctx->outer);
571 else
573 tree field = (tree) n->value;
574 /* If the receiver record type was remapped in the child function,
575 remap the field into the new record type. */
576 x = maybe_lookup_field (field, ctx->outer);
577 if (x != NULL)
578 field = x;
580 x = build_simple_mem_ref (ctx->outer->receiver_decl);
581 x = omp_build_component_ref (x, field);
582 if (use_pointer_for_field (var, ctx->outer))
583 x = build_simple_mem_ref (x);
586 else if (ctx->outer)
588 omp_context *outer = ctx->outer;
589 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
591 outer = outer->outer;
592 gcc_assert (outer
593 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
595 x = lookup_decl (var, outer);
597 else if (omp_is_reference (var))
598 /* This can happen with orphaned constructs. If var is reference, it is
599 possible it is shared and as such valid. */
600 x = var;
601 else if (omp_member_access_dummy_var (var))
602 x = var;
603 else
604 gcc_unreachable ();
606 if (x == var)
608 tree t = omp_member_access_dummy_var (var);
609 if (t)
611 x = DECL_VALUE_EXPR (var);
612 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
613 if (o != t)
614 x = unshare_and_remap (x, t, o);
615 else
616 x = unshare_expr (x);
620 if (omp_is_reference (var))
621 x = build_simple_mem_ref (x);
623 return x;
626 /* Build tree nodes to access the field for VAR on the sender side. */
628 static tree
629 build_sender_ref (splay_tree_key key, omp_context *ctx)
631 tree field = lookup_sfield (key, ctx);
632 return omp_build_component_ref (ctx->sender_decl, field);
635 static tree
636 build_sender_ref (tree var, omp_context *ctx)
638 return build_sender_ref ((splay_tree_key) var, ctx);
641 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
642 BASE_POINTERS_RESTRICT, declare the field with restrict. */
644 static void
645 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
647 tree field, type, sfield = NULL_TREE;
648 splay_tree_key key = (splay_tree_key) var;
650 if ((mask & 8) != 0)
652 key = (splay_tree_key) &DECL_UID (var);
653 gcc_checking_assert (key != (splay_tree_key) var);
655 gcc_assert ((mask & 1) == 0
656 || !splay_tree_lookup (ctx->field_map, key));
657 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
658 || !splay_tree_lookup (ctx->sfield_map, key));
659 gcc_assert ((mask & 3) == 3
660 || !is_gimple_omp_oacc (ctx->stmt));
662 type = TREE_TYPE (var);
663 /* Prevent redeclaring the var in the split-off function with a restrict
664 pointer type. Note that we only clear type itself, restrict qualifiers in
665 the pointed-to type will be ignored by points-to analysis. */
666 if (POINTER_TYPE_P (type)
667 && TYPE_RESTRICT (type))
668 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
670 if (mask & 4)
672 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
673 type = build_pointer_type (build_pointer_type (type));
675 else if (by_ref)
676 type = build_pointer_type (type);
677 else if ((mask & 3) == 1 && omp_is_reference (var))
678 type = TREE_TYPE (type);
680 field = build_decl (DECL_SOURCE_LOCATION (var),
681 FIELD_DECL, DECL_NAME (var), type);
683 /* Remember what variable this field was created for. This does have a
684 side effect of making dwarf2out ignore this member, so for helpful
685 debugging we clear it later in delete_omp_context. */
686 DECL_ABSTRACT_ORIGIN (field) = var;
687 if (type == TREE_TYPE (var))
689 SET_DECL_ALIGN (field, DECL_ALIGN (var));
690 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
691 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
693 else
694 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
696 if ((mask & 3) == 3)
698 insert_field_into_struct (ctx->record_type, field);
699 if (ctx->srecord_type)
701 sfield = build_decl (DECL_SOURCE_LOCATION (var),
702 FIELD_DECL, DECL_NAME (var), type);
703 DECL_ABSTRACT_ORIGIN (sfield) = var;
704 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
705 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
706 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
707 insert_field_into_struct (ctx->srecord_type, sfield);
710 else
712 if (ctx->srecord_type == NULL_TREE)
714 tree t;
716 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
717 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
718 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
720 sfield = build_decl (DECL_SOURCE_LOCATION (t),
721 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
722 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
723 insert_field_into_struct (ctx->srecord_type, sfield);
724 splay_tree_insert (ctx->sfield_map,
725 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
726 (splay_tree_value) sfield);
729 sfield = field;
730 insert_field_into_struct ((mask & 1) ? ctx->record_type
731 : ctx->srecord_type, field);
734 if (mask & 1)
735 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
736 if ((mask & 2) && ctx->sfield_map)
737 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
740 static tree
741 install_var_local (tree var, omp_context *ctx)
743 tree new_var = omp_copy_decl_1 (var, ctx);
744 insert_decl_map (&ctx->cb, var, new_var);
745 return new_var;
748 /* Adjust the replacement for DECL in CTX for the new context. This means
749 copying the DECL_VALUE_EXPR, and fixing up the type. */
751 static void
752 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
754 tree new_decl, size;
756 new_decl = lookup_decl (decl, ctx);
758 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
760 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
761 && DECL_HAS_VALUE_EXPR_P (decl))
763 tree ve = DECL_VALUE_EXPR (decl);
764 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
765 SET_DECL_VALUE_EXPR (new_decl, ve);
766 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
769 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
771 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
772 if (size == error_mark_node)
773 size = TYPE_SIZE (TREE_TYPE (new_decl));
774 DECL_SIZE (new_decl) = size;
776 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
777 if (size == error_mark_node)
778 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
779 DECL_SIZE_UNIT (new_decl) = size;
783 /* The callback for remap_decl. Search all containing contexts for a
784 mapping of the variable; this avoids having to duplicate the splay
785 tree ahead of time. We know a mapping doesn't already exist in the
786 given context. Create new mappings to implement default semantics. */
788 static tree
789 omp_copy_decl (tree var, copy_body_data *cb)
791 omp_context *ctx = (omp_context *) cb;
792 tree new_var;
794 if (TREE_CODE (var) == LABEL_DECL)
796 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
797 return var;
798 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
799 DECL_CONTEXT (new_var) = current_function_decl;
800 insert_decl_map (&ctx->cb, var, new_var);
801 return new_var;
804 while (!is_taskreg_ctx (ctx))
806 ctx = ctx->outer;
807 if (ctx == NULL)
808 return var;
809 new_var = maybe_lookup_decl (var, ctx);
810 if (new_var)
811 return new_var;
814 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
815 return var;
817 return error_mark_node;
820 /* Create a new context, with OUTER_CTX being the surrounding context. */
822 static omp_context *
823 new_omp_context (gimple *stmt, omp_context *outer_ctx)
825 omp_context *ctx = XCNEW (omp_context);
827 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
828 (splay_tree_value) ctx);
829 ctx->stmt = stmt;
831 if (outer_ctx)
833 ctx->outer = outer_ctx;
834 ctx->cb = outer_ctx->cb;
835 ctx->cb.block = NULL;
836 ctx->depth = outer_ctx->depth + 1;
838 else
840 ctx->cb.src_fn = current_function_decl;
841 ctx->cb.dst_fn = current_function_decl;
842 ctx->cb.src_node = cgraph_node::get (current_function_decl);
843 gcc_checking_assert (ctx->cb.src_node);
844 ctx->cb.dst_node = ctx->cb.src_node;
845 ctx->cb.src_cfun = cfun;
846 ctx->cb.copy_decl = omp_copy_decl;
847 ctx->cb.eh_lp_nr = 0;
848 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
849 ctx->depth = 1;
852 ctx->cb.decl_map = new hash_map<tree, tree>;
854 return ctx;
857 static gimple_seq maybe_catch_exception (gimple_seq);
859 /* Finalize task copyfn. */
861 static void
862 finalize_task_copyfn (gomp_task *task_stmt)
864 struct function *child_cfun;
865 tree child_fn;
866 gimple_seq seq = NULL, new_seq;
867 gbind *bind;
869 child_fn = gimple_omp_task_copy_fn (task_stmt);
870 if (child_fn == NULL_TREE)
871 return;
873 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
874 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
876 push_cfun (child_cfun);
877 bind = gimplify_body (child_fn, false);
878 gimple_seq_add_stmt (&seq, bind);
879 new_seq = maybe_catch_exception (seq);
880 if (new_seq != seq)
882 bind = gimple_build_bind (NULL, new_seq, NULL);
883 seq = NULL;
884 gimple_seq_add_stmt (&seq, bind);
886 gimple_set_body (child_fn, seq);
887 pop_cfun ();
889 /* Inform the callgraph about the new function. */
890 cgraph_node *node = cgraph_node::get_create (child_fn);
891 node->parallelized_function = 1;
892 cgraph_node::add_new_function (child_fn, false);
895 /* Destroy a omp_context data structures. Called through the splay tree
896 value delete callback. */
898 static void
899 delete_omp_context (splay_tree_value value)
901 omp_context *ctx = (omp_context *) value;
903 delete ctx->cb.decl_map;
905 if (ctx->field_map)
906 splay_tree_delete (ctx->field_map);
907 if (ctx->sfield_map)
908 splay_tree_delete (ctx->sfield_map);
910 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
911 it produces corrupt debug information. */
912 if (ctx->record_type)
914 tree t;
915 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
916 DECL_ABSTRACT_ORIGIN (t) = NULL;
918 if (ctx->srecord_type)
920 tree t;
921 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
922 DECL_ABSTRACT_ORIGIN (t) = NULL;
925 if (is_task_ctx (ctx))
926 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
928 XDELETE (ctx);
931 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
932 context. */
934 static void
935 fixup_child_record_type (omp_context *ctx)
937 tree f, type = ctx->record_type;
939 if (!ctx->receiver_decl)
940 return;
941 /* ??? It isn't sufficient to just call remap_type here, because
942 variably_modified_type_p doesn't work the way we expect for
943 record types. Testing each field for whether it needs remapping
944 and creating a new record by hand works, however. */
945 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
946 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
947 break;
948 if (f)
950 tree name, new_fields = NULL;
952 type = lang_hooks.types.make_type (RECORD_TYPE);
953 name = DECL_NAME (TYPE_NAME (ctx->record_type));
954 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
955 TYPE_DECL, name, type);
956 TYPE_NAME (type) = name;
958 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
960 tree new_f = copy_node (f);
961 DECL_CONTEXT (new_f) = type;
962 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
963 DECL_CHAIN (new_f) = new_fields;
964 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
965 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
966 &ctx->cb, NULL);
967 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
968 &ctx->cb, NULL);
969 new_fields = new_f;
971 /* Arrange to be able to look up the receiver field
972 given the sender field. */
973 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
974 (splay_tree_value) new_f);
976 TYPE_FIELDS (type) = nreverse (new_fields);
977 layout_type (type);
980 /* In a target region we never modify any of the pointers in *.omp_data_i,
981 so attempt to help the optimizers. */
982 if (is_gimple_omp_offloaded (ctx->stmt))
983 type = build_qualified_type (type, TYPE_QUAL_CONST);
985 TREE_TYPE (ctx->receiver_decl)
986 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
989 /* Instantiate decls as necessary in CTX to satisfy the data sharing
990 specified by CLAUSES. */
992 static void
993 scan_sharing_clauses (tree clauses, omp_context *ctx)
995 tree c, decl;
996 bool scan_array_reductions = false;
998 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1000 bool by_ref;
1002 switch (OMP_CLAUSE_CODE (c))
1004 case OMP_CLAUSE_PRIVATE:
1005 decl = OMP_CLAUSE_DECL (c);
1006 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1007 goto do_private;
1008 else if (!is_variable_sized (decl))
1009 install_var_local (decl, ctx);
1010 break;
1012 case OMP_CLAUSE_SHARED:
1013 decl = OMP_CLAUSE_DECL (c);
1014 /* Ignore shared directives in teams construct. */
1015 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1017 /* Global variables don't need to be copied,
1018 the receiver side will use them directly. */
1019 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1020 if (is_global_var (odecl))
1021 break;
1022 insert_decl_map (&ctx->cb, decl, odecl);
1023 break;
1025 gcc_assert (is_taskreg_ctx (ctx));
1026 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1027 || !is_variable_sized (decl));
1028 /* Global variables don't need to be copied,
1029 the receiver side will use them directly. */
1030 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1031 break;
1032 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1034 use_pointer_for_field (decl, ctx);
1035 break;
1037 by_ref = use_pointer_for_field (decl, NULL);
1038 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1039 || TREE_ADDRESSABLE (decl)
1040 || by_ref
1041 || omp_is_reference (decl))
1043 by_ref = use_pointer_for_field (decl, ctx);
1044 install_var_field (decl, by_ref, 3, ctx);
1045 install_var_local (decl, ctx);
1046 break;
1048 /* We don't need to copy const scalar vars back. */
1049 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1050 goto do_private;
1052 case OMP_CLAUSE_REDUCTION:
1053 decl = OMP_CLAUSE_DECL (c);
1054 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1055 && TREE_CODE (decl) == MEM_REF)
1057 tree t = TREE_OPERAND (decl, 0);
1058 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1059 t = TREE_OPERAND (t, 0);
1060 if (TREE_CODE (t) == INDIRECT_REF
1061 || TREE_CODE (t) == ADDR_EXPR)
1062 t = TREE_OPERAND (t, 0);
1063 install_var_local (t, ctx);
1064 if (is_taskreg_ctx (ctx)
1065 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1066 && !is_variable_sized (t))
1068 by_ref = use_pointer_for_field (t, ctx);
1069 install_var_field (t, by_ref, 3, ctx);
1071 break;
1073 goto do_private;
1075 case OMP_CLAUSE_LASTPRIVATE:
1076 /* Let the corresponding firstprivate clause create
1077 the variable. */
1078 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1079 break;
1080 /* FALLTHRU */
1082 case OMP_CLAUSE_FIRSTPRIVATE:
1083 case OMP_CLAUSE_LINEAR:
1084 decl = OMP_CLAUSE_DECL (c);
1085 do_private:
1086 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1087 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1088 && is_gimple_omp_offloaded (ctx->stmt))
1090 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1091 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1092 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1093 install_var_field (decl, true, 3, ctx);
1094 else
1095 install_var_field (decl, false, 3, ctx);
1097 if (is_variable_sized (decl))
1099 if (is_task_ctx (ctx))
1100 install_var_field (decl, false, 1, ctx);
1101 break;
1103 else if (is_taskreg_ctx (ctx))
1105 bool global
1106 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1107 by_ref = use_pointer_for_field (decl, NULL);
1109 if (is_task_ctx (ctx)
1110 && (global || by_ref || omp_is_reference (decl)))
1112 install_var_field (decl, false, 1, ctx);
1113 if (!global)
1114 install_var_field (decl, by_ref, 2, ctx);
1116 else if (!global)
1117 install_var_field (decl, by_ref, 3, ctx);
1119 install_var_local (decl, ctx);
1120 break;
1122 case OMP_CLAUSE_USE_DEVICE_PTR:
1123 decl = OMP_CLAUSE_DECL (c);
1124 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1125 install_var_field (decl, true, 3, ctx);
1126 else
1127 install_var_field (decl, false, 3, ctx);
1128 if (DECL_SIZE (decl)
1129 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1131 tree decl2 = DECL_VALUE_EXPR (decl);
1132 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1133 decl2 = TREE_OPERAND (decl2, 0);
1134 gcc_assert (DECL_P (decl2));
1135 install_var_local (decl2, ctx);
1137 install_var_local (decl, ctx);
1138 break;
1140 case OMP_CLAUSE_IS_DEVICE_PTR:
1141 decl = OMP_CLAUSE_DECL (c);
1142 goto do_private;
1144 case OMP_CLAUSE__LOOPTEMP_:
1145 gcc_assert (is_taskreg_ctx (ctx));
1146 decl = OMP_CLAUSE_DECL (c);
1147 install_var_field (decl, false, 3, ctx);
1148 install_var_local (decl, ctx);
1149 break;
1151 case OMP_CLAUSE_COPYPRIVATE:
1152 case OMP_CLAUSE_COPYIN:
1153 decl = OMP_CLAUSE_DECL (c);
1154 by_ref = use_pointer_for_field (decl, NULL);
1155 install_var_field (decl, by_ref, 3, ctx);
1156 break;
1158 case OMP_CLAUSE_FINAL:
1159 case OMP_CLAUSE_IF:
1160 case OMP_CLAUSE_NUM_THREADS:
1161 case OMP_CLAUSE_NUM_TEAMS:
1162 case OMP_CLAUSE_THREAD_LIMIT:
1163 case OMP_CLAUSE_DEVICE:
1164 case OMP_CLAUSE_SCHEDULE:
1165 case OMP_CLAUSE_DIST_SCHEDULE:
1166 case OMP_CLAUSE_DEPEND:
1167 case OMP_CLAUSE_PRIORITY:
1168 case OMP_CLAUSE_GRAINSIZE:
1169 case OMP_CLAUSE_NUM_TASKS:
1170 case OMP_CLAUSE_NUM_GANGS:
1171 case OMP_CLAUSE_NUM_WORKERS:
1172 case OMP_CLAUSE_VECTOR_LENGTH:
1173 if (ctx->outer)
1174 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1175 break;
1177 case OMP_CLAUSE_TO:
1178 case OMP_CLAUSE_FROM:
1179 case OMP_CLAUSE_MAP:
1180 if (ctx->outer)
1181 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1182 decl = OMP_CLAUSE_DECL (c);
1183 /* Global variables with "omp declare target" attribute
1184 don't need to be copied, the receiver side will use them
1185 directly. However, global variables with "omp declare target link"
1186 attribute need to be copied. Or when ALWAYS modifier is used. */
1187 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1188 && DECL_P (decl)
1189 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1190 && (OMP_CLAUSE_MAP_KIND (c)
1191 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1192 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1193 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1194 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1195 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1196 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1197 && varpool_node::get_create (decl)->offloadable
1198 && !lookup_attribute ("omp declare target link",
1199 DECL_ATTRIBUTES (decl)))
1200 break;
1201 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1202 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1204 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1205 not offloaded; there is nothing to map for those. */
1206 if (!is_gimple_omp_offloaded (ctx->stmt)
1207 && !POINTER_TYPE_P (TREE_TYPE (decl))
1208 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1209 break;
1211 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1212 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1213 || (OMP_CLAUSE_MAP_KIND (c)
1214 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1216 if (TREE_CODE (decl) == COMPONENT_REF
1217 || (TREE_CODE (decl) == INDIRECT_REF
1218 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1219 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1220 == REFERENCE_TYPE)))
1221 break;
1222 if (DECL_SIZE (decl)
1223 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1225 tree decl2 = DECL_VALUE_EXPR (decl);
1226 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1227 decl2 = TREE_OPERAND (decl2, 0);
1228 gcc_assert (DECL_P (decl2));
1229 install_var_local (decl2, ctx);
1231 install_var_local (decl, ctx);
1232 break;
1234 if (DECL_P (decl))
1236 if (DECL_SIZE (decl)
1237 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1239 tree decl2 = DECL_VALUE_EXPR (decl);
1240 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1241 decl2 = TREE_OPERAND (decl2, 0);
1242 gcc_assert (DECL_P (decl2));
1243 install_var_field (decl2, true, 3, ctx);
1244 install_var_local (decl2, ctx);
1245 install_var_local (decl, ctx);
1247 else
1249 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1250 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1251 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1252 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1253 install_var_field (decl, true, 7, ctx);
1254 else
1255 install_var_field (decl, true, 3, ctx);
1256 if (is_gimple_omp_offloaded (ctx->stmt)
1257 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1258 install_var_local (decl, ctx);
1261 else
1263 tree base = get_base_address (decl);
1264 tree nc = OMP_CLAUSE_CHAIN (c);
1265 if (DECL_P (base)
1266 && nc != NULL_TREE
1267 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1268 && OMP_CLAUSE_DECL (nc) == base
1269 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1270 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1272 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1273 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1275 else
1277 if (ctx->outer)
1279 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1280 decl = OMP_CLAUSE_DECL (c);
1282 gcc_assert (!splay_tree_lookup (ctx->field_map,
1283 (splay_tree_key) decl));
1284 tree field
1285 = build_decl (OMP_CLAUSE_LOCATION (c),
1286 FIELD_DECL, NULL_TREE, ptr_type_node);
1287 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1288 insert_field_into_struct (ctx->record_type, field);
1289 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1290 (splay_tree_value) field);
1293 break;
1295 case OMP_CLAUSE__GRIDDIM_:
1296 if (ctx->outer)
1298 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1299 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1301 break;
1303 case OMP_CLAUSE_NOWAIT:
1304 case OMP_CLAUSE_ORDERED:
1305 case OMP_CLAUSE_COLLAPSE:
1306 case OMP_CLAUSE_UNTIED:
1307 case OMP_CLAUSE_MERGEABLE:
1308 case OMP_CLAUSE_PROC_BIND:
1309 case OMP_CLAUSE_SAFELEN:
1310 case OMP_CLAUSE_SIMDLEN:
1311 case OMP_CLAUSE_THREADS:
1312 case OMP_CLAUSE_SIMD:
1313 case OMP_CLAUSE_NOGROUP:
1314 case OMP_CLAUSE_DEFAULTMAP:
1315 case OMP_CLAUSE_ASYNC:
1316 case OMP_CLAUSE_WAIT:
1317 case OMP_CLAUSE_GANG:
1318 case OMP_CLAUSE_WORKER:
1319 case OMP_CLAUSE_VECTOR:
1320 case OMP_CLAUSE_INDEPENDENT:
1321 case OMP_CLAUSE_AUTO:
1322 case OMP_CLAUSE_SEQ:
1323 case OMP_CLAUSE_TILE:
1324 case OMP_CLAUSE__SIMT_:
1325 case OMP_CLAUSE_DEFAULT:
1326 case OMP_CLAUSE_IF_PRESENT:
1327 case OMP_CLAUSE_FINALIZE:
1328 break;
1330 case OMP_CLAUSE_ALIGNED:
1331 decl = OMP_CLAUSE_DECL (c);
1332 if (is_global_var (decl)
1333 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1334 install_var_local (decl, ctx);
1335 break;
1337 case OMP_CLAUSE__CACHE_:
1338 default:
1339 gcc_unreachable ();
1343 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1345 switch (OMP_CLAUSE_CODE (c))
1347 case OMP_CLAUSE_LASTPRIVATE:
1348 /* Let the corresponding firstprivate clause create
1349 the variable. */
1350 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1351 scan_array_reductions = true;
1352 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1353 break;
1354 /* FALLTHRU */
1356 case OMP_CLAUSE_FIRSTPRIVATE:
1357 case OMP_CLAUSE_PRIVATE:
1358 case OMP_CLAUSE_LINEAR:
1359 case OMP_CLAUSE_IS_DEVICE_PTR:
1360 decl = OMP_CLAUSE_DECL (c);
1361 if (is_variable_sized (decl))
1363 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1364 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1365 && is_gimple_omp_offloaded (ctx->stmt))
1367 tree decl2 = DECL_VALUE_EXPR (decl);
1368 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1369 decl2 = TREE_OPERAND (decl2, 0);
1370 gcc_assert (DECL_P (decl2));
1371 install_var_local (decl2, ctx);
1372 fixup_remapped_decl (decl2, ctx, false);
1374 install_var_local (decl, ctx);
1376 fixup_remapped_decl (decl, ctx,
1377 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1378 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1379 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1380 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1381 scan_array_reductions = true;
1382 break;
1384 case OMP_CLAUSE_REDUCTION:
1385 decl = OMP_CLAUSE_DECL (c);
1386 if (TREE_CODE (decl) != MEM_REF)
1388 if (is_variable_sized (decl))
1389 install_var_local (decl, ctx);
1390 fixup_remapped_decl (decl, ctx, false);
1392 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1393 scan_array_reductions = true;
1394 break;
1396 case OMP_CLAUSE_SHARED:
1397 /* Ignore shared directives in teams construct. */
1398 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1399 break;
1400 decl = OMP_CLAUSE_DECL (c);
1401 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1402 break;
1403 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1405 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1406 ctx->outer)))
1407 break;
1408 bool by_ref = use_pointer_for_field (decl, ctx);
1409 install_var_field (decl, by_ref, 11, ctx);
1410 break;
1412 fixup_remapped_decl (decl, ctx, false);
1413 break;
1415 case OMP_CLAUSE_MAP:
1416 if (!is_gimple_omp_offloaded (ctx->stmt))
1417 break;
1418 decl = OMP_CLAUSE_DECL (c);
1419 if (DECL_P (decl)
1420 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1421 && (OMP_CLAUSE_MAP_KIND (c)
1422 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1423 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1424 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1425 && varpool_node::get_create (decl)->offloadable)
1426 break;
1427 if (DECL_P (decl))
1429 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1430 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1431 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1432 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1434 tree new_decl = lookup_decl (decl, ctx);
1435 TREE_TYPE (new_decl)
1436 = remap_type (TREE_TYPE (decl), &ctx->cb);
1438 else if (DECL_SIZE (decl)
1439 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1441 tree decl2 = DECL_VALUE_EXPR (decl);
1442 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1443 decl2 = TREE_OPERAND (decl2, 0);
1444 gcc_assert (DECL_P (decl2));
1445 fixup_remapped_decl (decl2, ctx, false);
1446 fixup_remapped_decl (decl, ctx, true);
1448 else
1449 fixup_remapped_decl (decl, ctx, false);
1451 break;
1453 case OMP_CLAUSE_COPYPRIVATE:
1454 case OMP_CLAUSE_COPYIN:
1455 case OMP_CLAUSE_DEFAULT:
1456 case OMP_CLAUSE_IF:
1457 case OMP_CLAUSE_NUM_THREADS:
1458 case OMP_CLAUSE_NUM_TEAMS:
1459 case OMP_CLAUSE_THREAD_LIMIT:
1460 case OMP_CLAUSE_DEVICE:
1461 case OMP_CLAUSE_SCHEDULE:
1462 case OMP_CLAUSE_DIST_SCHEDULE:
1463 case OMP_CLAUSE_NOWAIT:
1464 case OMP_CLAUSE_ORDERED:
1465 case OMP_CLAUSE_COLLAPSE:
1466 case OMP_CLAUSE_UNTIED:
1467 case OMP_CLAUSE_FINAL:
1468 case OMP_CLAUSE_MERGEABLE:
1469 case OMP_CLAUSE_PROC_BIND:
1470 case OMP_CLAUSE_SAFELEN:
1471 case OMP_CLAUSE_SIMDLEN:
1472 case OMP_CLAUSE_ALIGNED:
1473 case OMP_CLAUSE_DEPEND:
1474 case OMP_CLAUSE__LOOPTEMP_:
1475 case OMP_CLAUSE_TO:
1476 case OMP_CLAUSE_FROM:
1477 case OMP_CLAUSE_PRIORITY:
1478 case OMP_CLAUSE_GRAINSIZE:
1479 case OMP_CLAUSE_NUM_TASKS:
1480 case OMP_CLAUSE_THREADS:
1481 case OMP_CLAUSE_SIMD:
1482 case OMP_CLAUSE_NOGROUP:
1483 case OMP_CLAUSE_DEFAULTMAP:
1484 case OMP_CLAUSE_USE_DEVICE_PTR:
1485 case OMP_CLAUSE_ASYNC:
1486 case OMP_CLAUSE_WAIT:
1487 case OMP_CLAUSE_NUM_GANGS:
1488 case OMP_CLAUSE_NUM_WORKERS:
1489 case OMP_CLAUSE_VECTOR_LENGTH:
1490 case OMP_CLAUSE_GANG:
1491 case OMP_CLAUSE_WORKER:
1492 case OMP_CLAUSE_VECTOR:
1493 case OMP_CLAUSE_INDEPENDENT:
1494 case OMP_CLAUSE_AUTO:
1495 case OMP_CLAUSE_SEQ:
1496 case OMP_CLAUSE_TILE:
1497 case OMP_CLAUSE__GRIDDIM_:
1498 case OMP_CLAUSE__SIMT_:
1499 case OMP_CLAUSE_IF_PRESENT:
1500 case OMP_CLAUSE_FINALIZE:
1501 break;
1503 case OMP_CLAUSE__CACHE_:
1504 default:
1505 gcc_unreachable ();
1509 gcc_checking_assert (!scan_array_reductions
1510 || !is_gimple_omp_oacc (ctx->stmt));
1511 if (scan_array_reductions)
1513 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1514 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1515 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1517 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1518 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1520 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1521 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1522 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1523 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1524 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1525 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1529 /* Create a new name for omp child function. Returns an identifier. */
1531 static tree
1532 create_omp_child_function_name (bool task_copy)
1534 return clone_function_name (current_function_decl,
1535 task_copy ? "_omp_cpyfn" : "_omp_fn");
1538 /* Return true if CTX may belong to offloaded code: either if current function
1539 is offloaded, or any enclosing context corresponds to a target region. */
1541 static bool
1542 omp_maybe_offloaded_ctx (omp_context *ctx)
1544 if (cgraph_node::get (current_function_decl)->offloadable)
1545 return true;
1546 for (; ctx; ctx = ctx->outer)
1547 if (is_gimple_omp_offloaded (ctx->stmt))
1548 return true;
1549 return false;
1552 /* Build a decl for the omp child function. It'll not contain a body
1553 yet, just the bare decl. */
1555 static void
1556 create_omp_child_function (omp_context *ctx, bool task_copy)
1558 tree decl, type, name, t;
1560 name = create_omp_child_function_name (task_copy);
1561 if (task_copy)
1562 type = build_function_type_list (void_type_node, ptr_type_node,
1563 ptr_type_node, NULL_TREE);
1564 else
1565 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1567 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1569 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1570 || !task_copy);
1571 if (!task_copy)
1572 ctx->cb.dst_fn = decl;
1573 else
1574 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1576 TREE_STATIC (decl) = 1;
1577 TREE_USED (decl) = 1;
1578 DECL_ARTIFICIAL (decl) = 1;
1579 DECL_IGNORED_P (decl) = 0;
1580 TREE_PUBLIC (decl) = 0;
1581 DECL_UNINLINABLE (decl) = 1;
1582 DECL_EXTERNAL (decl) = 0;
1583 DECL_CONTEXT (decl) = NULL_TREE;
1584 DECL_INITIAL (decl) = make_node (BLOCK);
1585 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1586 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1587 /* Remove omp declare simd attribute from the new attributes. */
1588 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1590 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1591 a = a2;
1592 a = TREE_CHAIN (a);
1593 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1594 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1595 *p = TREE_CHAIN (*p);
1596 else
1598 tree chain = TREE_CHAIN (*p);
1599 *p = copy_node (*p);
1600 p = &TREE_CHAIN (*p);
1601 *p = chain;
1604 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1605 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1606 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1607 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1608 DECL_FUNCTION_VERSIONED (decl)
1609 = DECL_FUNCTION_VERSIONED (current_function_decl);
1611 if (omp_maybe_offloaded_ctx (ctx))
1613 cgraph_node::get_create (decl)->offloadable = 1;
1614 if (ENABLE_OFFLOADING)
1615 g->have_offload = true;
1618 if (cgraph_node::get_create (decl)->offloadable
1619 && !lookup_attribute ("omp declare target",
1620 DECL_ATTRIBUTES (current_function_decl)))
1622 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1623 ? "omp target entrypoint"
1624 : "omp declare target");
1625 DECL_ATTRIBUTES (decl)
1626 = tree_cons (get_identifier (target_attr),
1627 NULL_TREE, DECL_ATTRIBUTES (decl));
1630 t = build_decl (DECL_SOURCE_LOCATION (decl),
1631 RESULT_DECL, NULL_TREE, void_type_node);
1632 DECL_ARTIFICIAL (t) = 1;
1633 DECL_IGNORED_P (t) = 1;
1634 DECL_CONTEXT (t) = decl;
1635 DECL_RESULT (decl) = t;
1637 tree data_name = get_identifier (".omp_data_i");
1638 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1639 ptr_type_node);
1640 DECL_ARTIFICIAL (t) = 1;
1641 DECL_NAMELESS (t) = 1;
1642 DECL_ARG_TYPE (t) = ptr_type_node;
1643 DECL_CONTEXT (t) = current_function_decl;
1644 TREE_USED (t) = 1;
1645 TREE_READONLY (t) = 1;
1646 DECL_ARGUMENTS (decl) = t;
1647 if (!task_copy)
1648 ctx->receiver_decl = t;
1649 else
1651 t = build_decl (DECL_SOURCE_LOCATION (decl),
1652 PARM_DECL, get_identifier (".omp_data_o"),
1653 ptr_type_node);
1654 DECL_ARTIFICIAL (t) = 1;
1655 DECL_NAMELESS (t) = 1;
1656 DECL_ARG_TYPE (t) = ptr_type_node;
1657 DECL_CONTEXT (t) = current_function_decl;
1658 TREE_USED (t) = 1;
1659 TREE_ADDRESSABLE (t) = 1;
1660 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1661 DECL_ARGUMENTS (decl) = t;
1664 /* Allocate memory for the function structure. The call to
1665 allocate_struct_function clobbers CFUN, so we need to restore
1666 it afterward. */
1667 push_struct_function (decl);
1668 cfun->function_end_locus = gimple_location (ctx->stmt);
1669 init_tree_ssa (cfun);
1670 pop_cfun ();
1673 /* Callback for walk_gimple_seq. Check if combined parallel
1674 contains gimple_omp_for_combined_into_p OMP_FOR. */
1676 tree
1677 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1678 bool *handled_ops_p,
1679 struct walk_stmt_info *wi)
1681 gimple *stmt = gsi_stmt (*gsi_p);
1683 *handled_ops_p = true;
1684 switch (gimple_code (stmt))
1686 WALK_SUBSTMTS;
1688 case GIMPLE_OMP_FOR:
1689 if (gimple_omp_for_combined_into_p (stmt)
1690 && gimple_omp_for_kind (stmt)
1691 == *(const enum gf_mask *) (wi->info))
1693 wi->info = stmt;
1694 return integer_zero_node;
1696 break;
1697 default:
1698 break;
1700 return NULL;
1703 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1705 static void
1706 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1707 omp_context *outer_ctx)
1709 struct walk_stmt_info wi;
1711 memset (&wi, 0, sizeof (wi));
1712 wi.val_only = true;
1713 wi.info = (void *) &msk;
1714 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1715 if (wi.info != (void *) &msk)
1717 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1718 struct omp_for_data fd;
1719 omp_extract_for_data (for_stmt, &fd, NULL);
1720 /* We need two temporaries with fd.loop.v type (istart/iend)
1721 and then (fd.collapse - 1) temporaries with the same
1722 type for count2 ... countN-1 vars if not constant. */
1723 size_t count = 2, i;
1724 tree type = fd.iter_type;
1725 if (fd.collapse > 1
1726 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1728 count += fd.collapse - 1;
1729 /* If there are lastprivate clauses on the inner
1730 GIMPLE_OMP_FOR, add one more temporaries for the total number
1731 of iterations (product of count1 ... countN-1). */
1732 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1733 OMP_CLAUSE_LASTPRIVATE))
1734 count++;
1735 else if (msk == GF_OMP_FOR_KIND_FOR
1736 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1737 OMP_CLAUSE_LASTPRIVATE))
1738 count++;
1740 for (i = 0; i < count; i++)
1742 tree temp = create_tmp_var (type);
1743 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1744 insert_decl_map (&outer_ctx->cb, temp, temp);
1745 OMP_CLAUSE_DECL (c) = temp;
1746 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1747 gimple_omp_taskreg_set_clauses (stmt, c);
1752 /* Scan an OpenMP parallel directive. */
1754 static void
1755 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1757 omp_context *ctx;
1758 tree name;
1759 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1761 /* Ignore parallel directives with empty bodies, unless there
1762 are copyin clauses. */
1763 if (optimize > 0
1764 && empty_body_p (gimple_omp_body (stmt))
1765 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1766 OMP_CLAUSE_COPYIN) == NULL)
1768 gsi_replace (gsi, gimple_build_nop (), false);
1769 return;
1772 if (gimple_omp_parallel_combined_p (stmt))
1773 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1775 ctx = new_omp_context (stmt, outer_ctx);
1776 taskreg_contexts.safe_push (ctx);
1777 if (taskreg_nesting_level > 1)
1778 ctx->is_nested = true;
1779 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1780 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1781 name = create_tmp_var_name (".omp_data_s");
1782 name = build_decl (gimple_location (stmt),
1783 TYPE_DECL, name, ctx->record_type);
1784 DECL_ARTIFICIAL (name) = 1;
1785 DECL_NAMELESS (name) = 1;
1786 TYPE_NAME (ctx->record_type) = name;
1787 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1788 if (!gimple_omp_parallel_grid_phony (stmt))
1790 create_omp_child_function (ctx, false);
1791 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1794 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1795 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1797 if (TYPE_FIELDS (ctx->record_type) == NULL)
1798 ctx->record_type = ctx->receiver_decl = NULL;
1801 /* Scan an OpenMP task directive. */
1803 static void
1804 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1806 omp_context *ctx;
1807 tree name, t;
1808 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1810 /* Ignore task directives with empty bodies, unless they have depend
1811 clause. */
1812 if (optimize > 0
1813 && empty_body_p (gimple_omp_body (stmt))
1814 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1816 gsi_replace (gsi, gimple_build_nop (), false);
1817 return;
1820 if (gimple_omp_task_taskloop_p (stmt))
1821 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1823 ctx = new_omp_context (stmt, outer_ctx);
1824 taskreg_contexts.safe_push (ctx);
1825 if (taskreg_nesting_level > 1)
1826 ctx->is_nested = true;
1827 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1828 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1829 name = create_tmp_var_name (".omp_data_s");
1830 name = build_decl (gimple_location (stmt),
1831 TYPE_DECL, name, ctx->record_type);
1832 DECL_ARTIFICIAL (name) = 1;
1833 DECL_NAMELESS (name) = 1;
1834 TYPE_NAME (ctx->record_type) = name;
1835 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1836 create_omp_child_function (ctx, false);
1837 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1839 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1841 if (ctx->srecord_type)
1843 name = create_tmp_var_name (".omp_data_a");
1844 name = build_decl (gimple_location (stmt),
1845 TYPE_DECL, name, ctx->srecord_type);
1846 DECL_ARTIFICIAL (name) = 1;
1847 DECL_NAMELESS (name) = 1;
1848 TYPE_NAME (ctx->srecord_type) = name;
1849 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1850 create_omp_child_function (ctx, true);
1853 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1855 if (TYPE_FIELDS (ctx->record_type) == NULL)
1857 ctx->record_type = ctx->receiver_decl = NULL;
1858 t = build_int_cst (long_integer_type_node, 0);
1859 gimple_omp_task_set_arg_size (stmt, t);
1860 t = build_int_cst (long_integer_type_node, 1);
1861 gimple_omp_task_set_arg_align (stmt, t);
1865 /* Helper function for finish_taskreg_scan, called through walk_tree.
1866 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1867 tree, replace it in the expression. */
1869 static tree
1870 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1872 if (VAR_P (*tp))
1874 omp_context *ctx = (omp_context *) data;
1875 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1876 if (t != *tp)
1878 if (DECL_HAS_VALUE_EXPR_P (t))
1879 t = unshare_expr (DECL_VALUE_EXPR (t));
1880 *tp = t;
1882 *walk_subtrees = 0;
1884 else if (IS_TYPE_OR_DECL_P (*tp))
1885 *walk_subtrees = 0;
1886 return NULL_TREE;
1889 /* If any decls have been made addressable during scan_omp,
1890 adjust their fields if needed, and layout record types
1891 of parallel/task constructs. */
1893 static void
1894 finish_taskreg_scan (omp_context *ctx)
1896 if (ctx->record_type == NULL_TREE)
1897 return;
1899 /* If any task_shared_vars were needed, verify all
1900 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1901 statements if use_pointer_for_field hasn't changed
1902 because of that. If it did, update field types now. */
1903 if (task_shared_vars)
1905 tree c;
1907 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1908 c; c = OMP_CLAUSE_CHAIN (c))
1909 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1910 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1912 tree decl = OMP_CLAUSE_DECL (c);
1914 /* Global variables don't need to be copied,
1915 the receiver side will use them directly. */
1916 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1917 continue;
1918 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1919 || !use_pointer_for_field (decl, ctx))
1920 continue;
1921 tree field = lookup_field (decl, ctx);
1922 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1923 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1924 continue;
1925 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1926 TREE_THIS_VOLATILE (field) = 0;
1927 DECL_USER_ALIGN (field) = 0;
1928 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1929 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1930 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1931 if (ctx->srecord_type)
1933 tree sfield = lookup_sfield (decl, ctx);
1934 TREE_TYPE (sfield) = TREE_TYPE (field);
1935 TREE_THIS_VOLATILE (sfield) = 0;
1936 DECL_USER_ALIGN (sfield) = 0;
1937 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1938 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1939 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1944 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1946 layout_type (ctx->record_type);
1947 fixup_child_record_type (ctx);
1949 else
1951 location_t loc = gimple_location (ctx->stmt);
1952 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1953 /* Move VLA fields to the end. */
1954 p = &TYPE_FIELDS (ctx->record_type);
1955 while (*p)
1956 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1957 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1959 *q = *p;
1960 *p = TREE_CHAIN (*p);
1961 TREE_CHAIN (*q) = NULL_TREE;
1962 q = &TREE_CHAIN (*q);
1964 else
1965 p = &DECL_CHAIN (*p);
1966 *p = vla_fields;
1967 if (gimple_omp_task_taskloop_p (ctx->stmt))
1969 /* Move fields corresponding to first and second _looptemp_
1970 clause first. There are filled by GOMP_taskloop
1971 and thus need to be in specific positions. */
1972 tree c1 = gimple_omp_task_clauses (ctx->stmt);
1973 c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
1974 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
1975 OMP_CLAUSE__LOOPTEMP_);
1976 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
1977 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
1978 p = &TYPE_FIELDS (ctx->record_type);
1979 while (*p)
1980 if (*p == f1 || *p == f2)
1981 *p = DECL_CHAIN (*p);
1982 else
1983 p = &DECL_CHAIN (*p);
1984 DECL_CHAIN (f1) = f2;
1985 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
1986 TYPE_FIELDS (ctx->record_type) = f1;
1987 if (ctx->srecord_type)
1989 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
1990 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
1991 p = &TYPE_FIELDS (ctx->srecord_type);
1992 while (*p)
1993 if (*p == f1 || *p == f2)
1994 *p = DECL_CHAIN (*p);
1995 else
1996 p = &DECL_CHAIN (*p);
1997 DECL_CHAIN (f1) = f2;
1998 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
1999 TYPE_FIELDS (ctx->srecord_type) = f1;
2002 layout_type (ctx->record_type);
2003 fixup_child_record_type (ctx);
2004 if (ctx->srecord_type)
2005 layout_type (ctx->srecord_type);
2006 tree t = fold_convert_loc (loc, long_integer_type_node,
2007 TYPE_SIZE_UNIT (ctx->record_type));
2008 if (TREE_CODE (t) != INTEGER_CST)
2010 t = unshare_expr (t);
2011 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2013 gimple_omp_task_set_arg_size (ctx->stmt, t);
2014 t = build_int_cst (long_integer_type_node,
2015 TYPE_ALIGN_UNIT (ctx->record_type));
2016 gimple_omp_task_set_arg_align (ctx->stmt, t);
2020 /* Find the enclosing offload context. */
2022 static omp_context *
2023 enclosing_target_ctx (omp_context *ctx)
2025 for (; ctx; ctx = ctx->outer)
2026 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2027 break;
2029 return ctx;
2032 /* Return true if ctx is part of an oacc kernels region. */
2034 static bool
2035 ctx_in_oacc_kernels_region (omp_context *ctx)
2037 for (;ctx != NULL; ctx = ctx->outer)
2039 gimple *stmt = ctx->stmt;
2040 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2041 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2042 return true;
2045 return false;
2048 /* Check the parallelism clauses inside a kernels regions.
2049 Until kernels handling moves to use the same loop indirection
2050 scheme as parallel, we need to do this checking early. */
2052 static unsigned
2053 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2055 bool checking = true;
2056 unsigned outer_mask = 0;
2057 unsigned this_mask = 0;
2058 bool has_seq = false, has_auto = false;
2060 if (ctx->outer)
2061 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2062 if (!stmt)
2064 checking = false;
2065 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2066 return outer_mask;
2067 stmt = as_a <gomp_for *> (ctx->stmt);
2070 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2072 switch (OMP_CLAUSE_CODE (c))
2074 case OMP_CLAUSE_GANG:
2075 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2076 break;
2077 case OMP_CLAUSE_WORKER:
2078 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2079 break;
2080 case OMP_CLAUSE_VECTOR:
2081 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2082 break;
2083 case OMP_CLAUSE_SEQ:
2084 has_seq = true;
2085 break;
2086 case OMP_CLAUSE_AUTO:
2087 has_auto = true;
2088 break;
2089 default:
2090 break;
2094 if (checking)
2096 if (has_seq && (this_mask || has_auto))
2097 error_at (gimple_location (stmt), "%<seq%> overrides other"
2098 " OpenACC loop specifiers");
2099 else if (has_auto && this_mask)
2100 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2101 " OpenACC loop specifiers");
2103 if (this_mask & outer_mask)
2104 error_at (gimple_location (stmt), "inner loop uses same"
2105 " OpenACC parallelism as containing loop");
2108 return outer_mask | this_mask;
2111 /* Scan a GIMPLE_OMP_FOR. */
2113 static omp_context *
2114 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2116 omp_context *ctx;
2117 size_t i;
2118 tree clauses = gimple_omp_for_clauses (stmt);
2120 ctx = new_omp_context (stmt, outer_ctx);
2122 if (is_gimple_omp_oacc (stmt))
2124 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2126 if (!tgt || is_oacc_parallel (tgt))
2127 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2129 char const *check = NULL;
2131 switch (OMP_CLAUSE_CODE (c))
2133 case OMP_CLAUSE_GANG:
2134 check = "gang";
2135 break;
2137 case OMP_CLAUSE_WORKER:
2138 check = "worker";
2139 break;
2141 case OMP_CLAUSE_VECTOR:
2142 check = "vector";
2143 break;
2145 default:
2146 break;
2149 if (check && OMP_CLAUSE_OPERAND (c, 0))
2150 error_at (gimple_location (stmt),
2151 "argument not permitted on %qs clause in"
2152 " OpenACC %<parallel%>", check);
2155 if (tgt && is_oacc_kernels (tgt))
2157 /* Strip out reductions, as they are not handled yet. */
2158 tree *prev_ptr = &clauses;
2160 while (tree probe = *prev_ptr)
2162 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2164 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2165 *prev_ptr = *next_ptr;
2166 else
2167 prev_ptr = next_ptr;
2170 gimple_omp_for_set_clauses (stmt, clauses);
2171 check_oacc_kernel_gwv (stmt, ctx);
2175 scan_sharing_clauses (clauses, ctx);
2177 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2178 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2180 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2181 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2182 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2183 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2185 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2186 return ctx;
2189 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2191 static void
2192 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2193 omp_context *outer_ctx)
2195 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2196 gsi_replace (gsi, bind, false);
2197 gimple_seq seq = NULL;
2198 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2199 tree cond = create_tmp_var_raw (integer_type_node);
2200 DECL_CONTEXT (cond) = current_function_decl;
2201 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2202 gimple_bind_set_vars (bind, cond);
2203 gimple_call_set_lhs (g, cond);
2204 gimple_seq_add_stmt (&seq, g);
2205 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2206 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2207 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2208 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2209 gimple_seq_add_stmt (&seq, g);
2210 g = gimple_build_label (lab1);
2211 gimple_seq_add_stmt (&seq, g);
2212 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2213 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2214 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2215 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2216 gimple_omp_for_set_clauses (new_stmt, clause);
2217 gimple_seq_add_stmt (&seq, new_stmt);
2218 g = gimple_build_goto (lab3);
2219 gimple_seq_add_stmt (&seq, g);
2220 g = gimple_build_label (lab2);
2221 gimple_seq_add_stmt (&seq, g);
2222 gimple_seq_add_stmt (&seq, stmt);
2223 g = gimple_build_label (lab3);
2224 gimple_seq_add_stmt (&seq, g);
2225 gimple_bind_set_body (bind, seq);
2226 update_stmt (bind);
2227 scan_omp_for (new_stmt, outer_ctx);
2228 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2231 /* Scan an OpenMP sections directive. */
2233 static void
2234 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2236 omp_context *ctx;
2238 ctx = new_omp_context (stmt, outer_ctx);
2239 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2240 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2243 /* Scan an OpenMP single directive. */
2245 static void
2246 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2248 omp_context *ctx;
2249 tree name;
2251 ctx = new_omp_context (stmt, outer_ctx);
2252 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2253 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2254 name = create_tmp_var_name (".omp_copy_s");
2255 name = build_decl (gimple_location (stmt),
2256 TYPE_DECL, name, ctx->record_type);
2257 TYPE_NAME (ctx->record_type) = name;
2259 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2260 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2262 if (TYPE_FIELDS (ctx->record_type) == NULL)
2263 ctx->record_type = NULL;
2264 else
2265 layout_type (ctx->record_type);
2268 /* Scan a GIMPLE_OMP_TARGET. */
2270 static void
2271 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2273 omp_context *ctx;
2274 tree name;
2275 bool offloaded = is_gimple_omp_offloaded (stmt);
2276 tree clauses = gimple_omp_target_clauses (stmt);
2278 ctx = new_omp_context (stmt, outer_ctx);
2279 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2280 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2281 name = create_tmp_var_name (".omp_data_t");
2282 name = build_decl (gimple_location (stmt),
2283 TYPE_DECL, name, ctx->record_type);
2284 DECL_ARTIFICIAL (name) = 1;
2285 DECL_NAMELESS (name) = 1;
2286 TYPE_NAME (ctx->record_type) = name;
2287 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2289 if (offloaded)
2291 create_omp_child_function (ctx, false);
2292 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2295 scan_sharing_clauses (clauses, ctx);
2296 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2298 if (TYPE_FIELDS (ctx->record_type) == NULL)
2299 ctx->record_type = ctx->receiver_decl = NULL;
2300 else
2302 TYPE_FIELDS (ctx->record_type)
2303 = nreverse (TYPE_FIELDS (ctx->record_type));
2304 if (flag_checking)
2306 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2307 for (tree field = TYPE_FIELDS (ctx->record_type);
2308 field;
2309 field = DECL_CHAIN (field))
2310 gcc_assert (DECL_ALIGN (field) == align);
2312 layout_type (ctx->record_type);
2313 if (offloaded)
2314 fixup_child_record_type (ctx);
2318 /* Scan an OpenMP teams directive. */
2320 static void
2321 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2323 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2324 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2325 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2328 /* Check nesting restrictions. */
2329 static bool
2330 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2332 tree c;
2334 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2335 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2336 the original copy of its contents. */
2337 return true;
2339 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2340 inside an OpenACC CTX. */
2341 if (!(is_gimple_omp (stmt)
2342 && is_gimple_omp_oacc (stmt))
2343 /* Except for atomic codes that we share with OpenMP. */
2344 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2345 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2347 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2349 error_at (gimple_location (stmt),
2350 "non-OpenACC construct inside of OpenACC routine");
2351 return false;
2353 else
2354 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2355 if (is_gimple_omp (octx->stmt)
2356 && is_gimple_omp_oacc (octx->stmt))
2358 error_at (gimple_location (stmt),
2359 "non-OpenACC construct inside of OpenACC region");
2360 return false;
2364 if (ctx != NULL)
2366 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2367 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2369 c = NULL_TREE;
2370 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2372 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2373 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2375 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2376 && (ctx->outer == NULL
2377 || !gimple_omp_for_combined_into_p (ctx->stmt)
2378 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2379 || (gimple_omp_for_kind (ctx->outer->stmt)
2380 != GF_OMP_FOR_KIND_FOR)
2381 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2383 error_at (gimple_location (stmt),
2384 "%<ordered simd threads%> must be closely "
2385 "nested inside of %<for simd%> region");
2386 return false;
2388 return true;
2391 error_at (gimple_location (stmt),
2392 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2393 " may not be nested inside %<simd%> region");
2394 return false;
2396 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2398 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2399 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2400 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2401 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2403 error_at (gimple_location (stmt),
2404 "only %<distribute%> or %<parallel%> regions are "
2405 "allowed to be strictly nested inside %<teams%> "
2406 "region");
2407 return false;
2411 switch (gimple_code (stmt))
2413 case GIMPLE_OMP_FOR:
2414 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2415 return true;
2416 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2418 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2420 error_at (gimple_location (stmt),
2421 "%<distribute%> region must be strictly nested "
2422 "inside %<teams%> construct");
2423 return false;
2425 return true;
2427 /* We split taskloop into task and nested taskloop in it. */
2428 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2429 return true;
2430 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2432 bool ok = false;
2434 if (ctx)
2435 switch (gimple_code (ctx->stmt))
2437 case GIMPLE_OMP_FOR:
2438 ok = (gimple_omp_for_kind (ctx->stmt)
2439 == GF_OMP_FOR_KIND_OACC_LOOP);
2440 break;
2442 case GIMPLE_OMP_TARGET:
2443 switch (gimple_omp_target_kind (ctx->stmt))
2445 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2446 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2447 ok = true;
2448 break;
2450 default:
2451 break;
2454 default:
2455 break;
2457 else if (oacc_get_fn_attrib (current_function_decl))
2458 ok = true;
2459 if (!ok)
2461 error_at (gimple_location (stmt),
2462 "OpenACC loop directive must be associated with"
2463 " an OpenACC compute region");
2464 return false;
2467 /* FALLTHRU */
2468 case GIMPLE_CALL:
2469 if (is_gimple_call (stmt)
2470 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2471 == BUILT_IN_GOMP_CANCEL
2472 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2473 == BUILT_IN_GOMP_CANCELLATION_POINT))
2475 const char *bad = NULL;
2476 const char *kind = NULL;
2477 const char *construct
2478 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2479 == BUILT_IN_GOMP_CANCEL)
2480 ? "#pragma omp cancel"
2481 : "#pragma omp cancellation point";
2482 if (ctx == NULL)
2484 error_at (gimple_location (stmt), "orphaned %qs construct",
2485 construct);
2486 return false;
2488 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2489 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2490 : 0)
2492 case 1:
2493 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2494 bad = "#pragma omp parallel";
2495 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2496 == BUILT_IN_GOMP_CANCEL
2497 && !integer_zerop (gimple_call_arg (stmt, 1)))
2498 ctx->cancellable = true;
2499 kind = "parallel";
2500 break;
2501 case 2:
2502 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2503 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2504 bad = "#pragma omp for";
2505 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2506 == BUILT_IN_GOMP_CANCEL
2507 && !integer_zerop (gimple_call_arg (stmt, 1)))
2509 ctx->cancellable = true;
2510 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2511 OMP_CLAUSE_NOWAIT))
2512 warning_at (gimple_location (stmt), 0,
2513 "%<#pragma omp cancel for%> inside "
2514 "%<nowait%> for construct");
2515 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2516 OMP_CLAUSE_ORDERED))
2517 warning_at (gimple_location (stmt), 0,
2518 "%<#pragma omp cancel for%> inside "
2519 "%<ordered%> for construct");
2521 kind = "for";
2522 break;
2523 case 4:
2524 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2525 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2526 bad = "#pragma omp sections";
2527 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2528 == BUILT_IN_GOMP_CANCEL
2529 && !integer_zerop (gimple_call_arg (stmt, 1)))
2531 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2533 ctx->cancellable = true;
2534 if (omp_find_clause (gimple_omp_sections_clauses
2535 (ctx->stmt),
2536 OMP_CLAUSE_NOWAIT))
2537 warning_at (gimple_location (stmt), 0,
2538 "%<#pragma omp cancel sections%> inside "
2539 "%<nowait%> sections construct");
2541 else
2543 gcc_assert (ctx->outer
2544 && gimple_code (ctx->outer->stmt)
2545 == GIMPLE_OMP_SECTIONS);
2546 ctx->outer->cancellable = true;
2547 if (omp_find_clause (gimple_omp_sections_clauses
2548 (ctx->outer->stmt),
2549 OMP_CLAUSE_NOWAIT))
2550 warning_at (gimple_location (stmt), 0,
2551 "%<#pragma omp cancel sections%> inside "
2552 "%<nowait%> sections construct");
2555 kind = "sections";
2556 break;
2557 case 8:
2558 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2559 bad = "#pragma omp task";
2560 else
2562 for (omp_context *octx = ctx->outer;
2563 octx; octx = octx->outer)
2565 switch (gimple_code (octx->stmt))
2567 case GIMPLE_OMP_TASKGROUP:
2568 break;
2569 case GIMPLE_OMP_TARGET:
2570 if (gimple_omp_target_kind (octx->stmt)
2571 != GF_OMP_TARGET_KIND_REGION)
2572 continue;
2573 /* FALLTHRU */
2574 case GIMPLE_OMP_PARALLEL:
2575 case GIMPLE_OMP_TEAMS:
2576 error_at (gimple_location (stmt),
2577 "%<%s taskgroup%> construct not closely "
2578 "nested inside of %<taskgroup%> region",
2579 construct);
2580 return false;
2581 default:
2582 continue;
2584 break;
2586 ctx->cancellable = true;
2588 kind = "taskgroup";
2589 break;
2590 default:
2591 error_at (gimple_location (stmt), "invalid arguments");
2592 return false;
2594 if (bad)
2596 error_at (gimple_location (stmt),
2597 "%<%s %s%> construct not closely nested inside of %qs",
2598 construct, kind, bad);
2599 return false;
2602 /* FALLTHRU */
2603 case GIMPLE_OMP_SECTIONS:
2604 case GIMPLE_OMP_SINGLE:
2605 for (; ctx != NULL; ctx = ctx->outer)
2606 switch (gimple_code (ctx->stmt))
2608 case GIMPLE_OMP_FOR:
2609 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2610 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2611 break;
2612 /* FALLTHRU */
2613 case GIMPLE_OMP_SECTIONS:
2614 case GIMPLE_OMP_SINGLE:
2615 case GIMPLE_OMP_ORDERED:
2616 case GIMPLE_OMP_MASTER:
2617 case GIMPLE_OMP_TASK:
2618 case GIMPLE_OMP_CRITICAL:
2619 if (is_gimple_call (stmt))
2621 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2622 != BUILT_IN_GOMP_BARRIER)
2623 return true;
2624 error_at (gimple_location (stmt),
2625 "barrier region may not be closely nested inside "
2626 "of work-sharing, %<critical%>, %<ordered%>, "
2627 "%<master%>, explicit %<task%> or %<taskloop%> "
2628 "region");
2629 return false;
2631 error_at (gimple_location (stmt),
2632 "work-sharing region may not be closely nested inside "
2633 "of work-sharing, %<critical%>, %<ordered%>, "
2634 "%<master%>, explicit %<task%> or %<taskloop%> region");
2635 return false;
2636 case GIMPLE_OMP_PARALLEL:
2637 case GIMPLE_OMP_TEAMS:
2638 return true;
2639 case GIMPLE_OMP_TARGET:
2640 if (gimple_omp_target_kind (ctx->stmt)
2641 == GF_OMP_TARGET_KIND_REGION)
2642 return true;
2643 break;
2644 default:
2645 break;
2647 break;
2648 case GIMPLE_OMP_MASTER:
2649 for (; ctx != NULL; ctx = ctx->outer)
2650 switch (gimple_code (ctx->stmt))
2652 case GIMPLE_OMP_FOR:
2653 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2654 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2655 break;
2656 /* FALLTHRU */
2657 case GIMPLE_OMP_SECTIONS:
2658 case GIMPLE_OMP_SINGLE:
2659 case GIMPLE_OMP_TASK:
2660 error_at (gimple_location (stmt),
2661 "%<master%> region may not be closely nested inside "
2662 "of work-sharing, explicit %<task%> or %<taskloop%> "
2663 "region");
2664 return false;
2665 case GIMPLE_OMP_PARALLEL:
2666 case GIMPLE_OMP_TEAMS:
2667 return true;
2668 case GIMPLE_OMP_TARGET:
2669 if (gimple_omp_target_kind (ctx->stmt)
2670 == GF_OMP_TARGET_KIND_REGION)
2671 return true;
2672 break;
2673 default:
2674 break;
2676 break;
2677 case GIMPLE_OMP_TASK:
2678 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2679 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2680 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2681 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2683 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2684 error_at (OMP_CLAUSE_LOCATION (c),
2685 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2686 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2687 return false;
2689 break;
2690 case GIMPLE_OMP_ORDERED:
2691 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2692 c; c = OMP_CLAUSE_CHAIN (c))
2694 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2696 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2697 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2698 continue;
2700 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2701 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2702 || kind == OMP_CLAUSE_DEPEND_SINK)
2704 tree oclause;
2705 /* Look for containing ordered(N) loop. */
2706 if (ctx == NULL
2707 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2708 || (oclause
2709 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2710 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2712 error_at (OMP_CLAUSE_LOCATION (c),
2713 "%<ordered%> construct with %<depend%> clause "
2714 "must be closely nested inside an %<ordered%> "
2715 "loop");
2716 return false;
2718 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2720 error_at (OMP_CLAUSE_LOCATION (c),
2721 "%<ordered%> construct with %<depend%> clause "
2722 "must be closely nested inside a loop with "
2723 "%<ordered%> clause with a parameter");
2724 return false;
2727 else
2729 error_at (OMP_CLAUSE_LOCATION (c),
2730 "invalid depend kind in omp %<ordered%> %<depend%>");
2731 return false;
2734 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2735 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2737 /* ordered simd must be closely nested inside of simd region,
2738 and simd region must not encounter constructs other than
2739 ordered simd, therefore ordered simd may be either orphaned,
2740 or ctx->stmt must be simd. The latter case is handled already
2741 earlier. */
2742 if (ctx != NULL)
2744 error_at (gimple_location (stmt),
2745 "%<ordered%> %<simd%> must be closely nested inside "
2746 "%<simd%> region");
2747 return false;
2750 for (; ctx != NULL; ctx = ctx->outer)
2751 switch (gimple_code (ctx->stmt))
2753 case GIMPLE_OMP_CRITICAL:
2754 case GIMPLE_OMP_TASK:
2755 case GIMPLE_OMP_ORDERED:
2756 ordered_in_taskloop:
2757 error_at (gimple_location (stmt),
2758 "%<ordered%> region may not be closely nested inside "
2759 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2760 "%<taskloop%> region");
2761 return false;
2762 case GIMPLE_OMP_FOR:
2763 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2764 goto ordered_in_taskloop;
2765 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2766 OMP_CLAUSE_ORDERED) == NULL)
2768 error_at (gimple_location (stmt),
2769 "%<ordered%> region must be closely nested inside "
2770 "a loop region with an %<ordered%> clause");
2771 return false;
2773 return true;
2774 case GIMPLE_OMP_TARGET:
2775 if (gimple_omp_target_kind (ctx->stmt)
2776 != GF_OMP_TARGET_KIND_REGION)
2777 break;
2778 /* FALLTHRU */
2779 case GIMPLE_OMP_PARALLEL:
2780 case GIMPLE_OMP_TEAMS:
2781 error_at (gimple_location (stmt),
2782 "%<ordered%> region must be closely nested inside "
2783 "a loop region with an %<ordered%> clause");
2784 return false;
2785 default:
2786 break;
2788 break;
2789 case GIMPLE_OMP_CRITICAL:
2791 tree this_stmt_name
2792 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2793 for (; ctx != NULL; ctx = ctx->outer)
2794 if (gomp_critical *other_crit
2795 = dyn_cast <gomp_critical *> (ctx->stmt))
2796 if (this_stmt_name == gimple_omp_critical_name (other_crit))
2798 error_at (gimple_location (stmt),
2799 "%<critical%> region may not be nested inside "
2800 "a %<critical%> region with the same name");
2801 return false;
2804 break;
2805 case GIMPLE_OMP_TEAMS:
2806 if (ctx == NULL
2807 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2808 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2810 error_at (gimple_location (stmt),
2811 "%<teams%> construct not closely nested inside of "
2812 "%<target%> construct");
2813 return false;
2815 break;
2816 case GIMPLE_OMP_TARGET:
2817 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2818 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2819 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2820 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2822 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2823 error_at (OMP_CLAUSE_LOCATION (c),
2824 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2825 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2826 return false;
2828 if (is_gimple_omp_offloaded (stmt)
2829 && oacc_get_fn_attrib (cfun->decl) != NULL)
2831 error_at (gimple_location (stmt),
2832 "OpenACC region inside of OpenACC routine, nested "
2833 "parallelism not supported yet");
2834 return false;
2836 for (; ctx != NULL; ctx = ctx->outer)
2838 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2840 if (is_gimple_omp (stmt)
2841 && is_gimple_omp_oacc (stmt)
2842 && is_gimple_omp (ctx->stmt))
2844 error_at (gimple_location (stmt),
2845 "OpenACC construct inside of non-OpenACC region");
2846 return false;
2848 continue;
2851 const char *stmt_name, *ctx_stmt_name;
2852 switch (gimple_omp_target_kind (stmt))
2854 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2855 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2856 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2857 case GF_OMP_TARGET_KIND_ENTER_DATA:
2858 stmt_name = "target enter data"; break;
2859 case GF_OMP_TARGET_KIND_EXIT_DATA:
2860 stmt_name = "target exit data"; break;
2861 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2862 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2863 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2864 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
2865 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2866 stmt_name = "enter/exit data"; break;
2867 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2868 break;
2869 default: gcc_unreachable ();
2871 switch (gimple_omp_target_kind (ctx->stmt))
2873 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2874 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
2875 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2876 ctx_stmt_name = "parallel"; break;
2877 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2878 ctx_stmt_name = "kernels"; break;
2879 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
2880 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2881 ctx_stmt_name = "host_data"; break;
2882 default: gcc_unreachable ();
2885 /* OpenACC/OpenMP mismatch? */
2886 if (is_gimple_omp_oacc (stmt)
2887 != is_gimple_omp_oacc (ctx->stmt))
2889 error_at (gimple_location (stmt),
2890 "%s %qs construct inside of %s %qs region",
2891 (is_gimple_omp_oacc (stmt)
2892 ? "OpenACC" : "OpenMP"), stmt_name,
2893 (is_gimple_omp_oacc (ctx->stmt)
2894 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
2895 return false;
2897 if (is_gimple_omp_offloaded (ctx->stmt))
2899 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
2900 if (is_gimple_omp_oacc (ctx->stmt))
2902 error_at (gimple_location (stmt),
2903 "%qs construct inside of %qs region",
2904 stmt_name, ctx_stmt_name);
2905 return false;
2907 else
2909 warning_at (gimple_location (stmt), 0,
2910 "%qs construct inside of %qs region",
2911 stmt_name, ctx_stmt_name);
2915 break;
2916 default:
2917 break;
2919 return true;
2923 /* Helper function scan_omp.
2925 Callback for walk_tree or operators in walk_gimple_stmt used to
2926 scan for OMP directives in TP. */
2928 static tree
2929 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
2931 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2932 omp_context *ctx = (omp_context *) wi->info;
2933 tree t = *tp;
2935 switch (TREE_CODE (t))
2937 case VAR_DECL:
2938 case PARM_DECL:
2939 case LABEL_DECL:
2940 case RESULT_DECL:
2941 if (ctx)
2943 tree repl = remap_decl (t, &ctx->cb);
2944 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
2945 *tp = repl;
2947 break;
2949 default:
2950 if (ctx && TYPE_P (t))
2951 *tp = remap_type (t, &ctx->cb);
2952 else if (!DECL_P (t))
2954 *walk_subtrees = 1;
2955 if (ctx)
2957 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
2958 if (tem != TREE_TYPE (t))
2960 if (TREE_CODE (t) == INTEGER_CST)
2961 *tp = wide_int_to_tree (tem, wi::to_wide (t));
2962 else
2963 TREE_TYPE (t) = tem;
2967 break;
2970 return NULL_TREE;
2973 /* Return true if FNDECL is a setjmp or a longjmp. */
2975 static bool
2976 setjmp_or_longjmp_p (const_tree fndecl)
2978 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2979 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
2980 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
2981 return true;
2983 tree declname = DECL_NAME (fndecl);
2984 if (!declname)
2985 return false;
2986 const char *name = IDENTIFIER_POINTER (declname);
2987 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
2991 /* Helper function for scan_omp.
2993 Callback for walk_gimple_stmt used to scan for OMP directives in
2994 the current statement in GSI. */
2996 static tree
2997 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2998 struct walk_stmt_info *wi)
3000 gimple *stmt = gsi_stmt (*gsi);
3001 omp_context *ctx = (omp_context *) wi->info;
3003 if (gimple_has_location (stmt))
3004 input_location = gimple_location (stmt);
3006 /* Check the nesting restrictions. */
3007 bool remove = false;
3008 if (is_gimple_omp (stmt))
3009 remove = !check_omp_nesting_restrictions (stmt, ctx);
3010 else if (is_gimple_call (stmt))
3012 tree fndecl = gimple_call_fndecl (stmt);
3013 if (fndecl)
3015 if (setjmp_or_longjmp_p (fndecl)
3016 && ctx
3017 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3018 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3020 remove = true;
3021 error_at (gimple_location (stmt),
3022 "setjmp/longjmp inside simd construct");
3024 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3025 switch (DECL_FUNCTION_CODE (fndecl))
3027 case BUILT_IN_GOMP_BARRIER:
3028 case BUILT_IN_GOMP_CANCEL:
3029 case BUILT_IN_GOMP_CANCELLATION_POINT:
3030 case BUILT_IN_GOMP_TASKYIELD:
3031 case BUILT_IN_GOMP_TASKWAIT:
3032 case BUILT_IN_GOMP_TASKGROUP_START:
3033 case BUILT_IN_GOMP_TASKGROUP_END:
3034 remove = !check_omp_nesting_restrictions (stmt, ctx);
3035 break;
3036 default:
3037 break;
3041 if (remove)
3043 stmt = gimple_build_nop ();
3044 gsi_replace (gsi, stmt, false);
3047 *handled_ops_p = true;
3049 switch (gimple_code (stmt))
3051 case GIMPLE_OMP_PARALLEL:
3052 taskreg_nesting_level++;
3053 scan_omp_parallel (gsi, ctx);
3054 taskreg_nesting_level--;
3055 break;
3057 case GIMPLE_OMP_TASK:
3058 taskreg_nesting_level++;
3059 scan_omp_task (gsi, ctx);
3060 taskreg_nesting_level--;
3061 break;
3063 case GIMPLE_OMP_FOR:
3064 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3065 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3066 && omp_maybe_offloaded_ctx (ctx)
3067 && omp_max_simt_vf ())
3068 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3069 else
3070 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3071 break;
3073 case GIMPLE_OMP_SECTIONS:
3074 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3075 break;
3077 case GIMPLE_OMP_SINGLE:
3078 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3079 break;
3081 case GIMPLE_OMP_SECTION:
3082 case GIMPLE_OMP_MASTER:
3083 case GIMPLE_OMP_TASKGROUP:
3084 case GIMPLE_OMP_ORDERED:
3085 case GIMPLE_OMP_CRITICAL:
3086 case GIMPLE_OMP_GRID_BODY:
3087 ctx = new_omp_context (stmt, ctx);
3088 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3089 break;
3091 case GIMPLE_OMP_TARGET:
3092 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3093 break;
3095 case GIMPLE_OMP_TEAMS:
3096 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3097 break;
3099 case GIMPLE_BIND:
3101 tree var;
3103 *handled_ops_p = false;
3104 if (ctx)
3105 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3106 var ;
3107 var = DECL_CHAIN (var))
3108 insert_decl_map (&ctx->cb, var, var);
3110 break;
3111 default:
3112 *handled_ops_p = false;
3113 break;
3116 return NULL_TREE;
3120 /* Scan all the statements starting at the current statement. CTX
3121 contains context information about the OMP directives and
3122 clauses found during the scan. */
3124 static void
3125 scan_omp (gimple_seq *body_p, omp_context *ctx)
3127 location_t saved_location;
3128 struct walk_stmt_info wi;
3130 memset (&wi, 0, sizeof (wi));
3131 wi.info = ctx;
3132 wi.want_locations = true;
3134 saved_location = input_location;
3135 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3136 input_location = saved_location;
3139 /* Re-gimplification and code generation routines. */
3141 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3142 of BIND if in a method. */
3144 static void
3145 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3147 if (DECL_ARGUMENTS (current_function_decl)
3148 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3149 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3150 == POINTER_TYPE))
3152 tree vars = gimple_bind_vars (bind);
3153 for (tree *pvar = &vars; *pvar; )
3154 if (omp_member_access_dummy_var (*pvar))
3155 *pvar = DECL_CHAIN (*pvar);
3156 else
3157 pvar = &DECL_CHAIN (*pvar);
3158 gimple_bind_set_vars (bind, vars);
3162 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3163 block and its subblocks. */
3165 static void
3166 remove_member_access_dummy_vars (tree block)
3168 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3169 if (omp_member_access_dummy_var (*pvar))
3170 *pvar = DECL_CHAIN (*pvar);
3171 else
3172 pvar = &DECL_CHAIN (*pvar);
3174 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3175 remove_member_access_dummy_vars (block);
3178 /* If a context was created for STMT when it was scanned, return it. */
3180 static omp_context *
3181 maybe_lookup_ctx (gimple *stmt)
3183 splay_tree_node n;
3184 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3185 return n ? (omp_context *) n->value : NULL;
3189 /* Find the mapping for DECL in CTX or the immediately enclosing
3190 context that has a mapping for DECL.
3192 If CTX is a nested parallel directive, we may have to use the decl
3193 mappings created in CTX's parent context. Suppose that we have the
3194 following parallel nesting (variable UIDs showed for clarity):
3196 iD.1562 = 0;
3197 #omp parallel shared(iD.1562) -> outer parallel
3198 iD.1562 = iD.1562 + 1;
3200 #omp parallel shared (iD.1562) -> inner parallel
3201 iD.1562 = iD.1562 - 1;
3203 Each parallel structure will create a distinct .omp_data_s structure
3204 for copying iD.1562 in/out of the directive:
3206 outer parallel .omp_data_s.1.i -> iD.1562
3207 inner parallel .omp_data_s.2.i -> iD.1562
3209 A shared variable mapping will produce a copy-out operation before
3210 the parallel directive and a copy-in operation after it. So, in
3211 this case we would have:
3213 iD.1562 = 0;
3214 .omp_data_o.1.i = iD.1562;
3215 #omp parallel shared(iD.1562) -> outer parallel
3216 .omp_data_i.1 = &.omp_data_o.1
3217 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3219 .omp_data_o.2.i = iD.1562; -> **
3220 #omp parallel shared(iD.1562) -> inner parallel
3221 .omp_data_i.2 = &.omp_data_o.2
3222 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3225 ** This is a problem. The symbol iD.1562 cannot be referenced
3226 inside the body of the outer parallel region. But since we are
3227 emitting this copy operation while expanding the inner parallel
3228 directive, we need to access the CTX structure of the outer
3229 parallel directive to get the correct mapping:
3231 .omp_data_o.2.i = .omp_data_i.1->i
3233 Since there may be other workshare or parallel directives enclosing
3234 the parallel directive, it may be necessary to walk up the context
3235 parent chain. This is not a problem in general because nested
3236 parallelism happens only rarely. */
3238 static tree
3239 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3241 tree t;
3242 omp_context *up;
3244 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3245 t = maybe_lookup_decl (decl, up);
3247 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3249 return t ? t : decl;
3253 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3254 in outer contexts. */
3256 static tree
3257 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3259 tree t = NULL;
3260 omp_context *up;
3262 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3263 t = maybe_lookup_decl (decl, up);
3265 return t ? t : decl;
3269 /* Construct the initialization value for reduction operation OP. */
3271 tree
3272 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3274 switch (op)
3276 case PLUS_EXPR:
3277 case MINUS_EXPR:
3278 case BIT_IOR_EXPR:
3279 case BIT_XOR_EXPR:
3280 case TRUTH_OR_EXPR:
3281 case TRUTH_ORIF_EXPR:
3282 case TRUTH_XOR_EXPR:
3283 case NE_EXPR:
3284 return build_zero_cst (type);
3286 case MULT_EXPR:
3287 case TRUTH_AND_EXPR:
3288 case TRUTH_ANDIF_EXPR:
3289 case EQ_EXPR:
3290 return fold_convert_loc (loc, type, integer_one_node);
3292 case BIT_AND_EXPR:
3293 return fold_convert_loc (loc, type, integer_minus_one_node);
3295 case MAX_EXPR:
3296 if (SCALAR_FLOAT_TYPE_P (type))
3298 REAL_VALUE_TYPE max, min;
3299 if (HONOR_INFINITIES (type))
3301 real_inf (&max);
3302 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3304 else
3305 real_maxval (&min, 1, TYPE_MODE (type));
3306 return build_real (type, min);
3308 else if (POINTER_TYPE_P (type))
3310 wide_int min
3311 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3312 return wide_int_to_tree (type, min);
3314 else
3316 gcc_assert (INTEGRAL_TYPE_P (type));
3317 return TYPE_MIN_VALUE (type);
3320 case MIN_EXPR:
3321 if (SCALAR_FLOAT_TYPE_P (type))
3323 REAL_VALUE_TYPE max;
3324 if (HONOR_INFINITIES (type))
3325 real_inf (&max);
3326 else
3327 real_maxval (&max, 0, TYPE_MODE (type));
3328 return build_real (type, max);
3330 else if (POINTER_TYPE_P (type))
3332 wide_int max
3333 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3334 return wide_int_to_tree (type, max);
3336 else
3338 gcc_assert (INTEGRAL_TYPE_P (type));
3339 return TYPE_MAX_VALUE (type);
3342 default:
3343 gcc_unreachable ();
3347 /* Construct the initialization value for reduction CLAUSE. */
3349 tree
3350 omp_reduction_init (tree clause, tree type)
3352 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3353 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3356 /* Return alignment to be assumed for var in CLAUSE, which should be
3357 OMP_CLAUSE_ALIGNED. */
3359 static tree
3360 omp_clause_aligned_alignment (tree clause)
3362 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3363 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3365 /* Otherwise return implementation defined alignment. */
3366 unsigned int al = 1;
3367 opt_scalar_mode mode_iter;
3368 auto_vector_sizes sizes;
3369 targetm.vectorize.autovectorize_vector_sizes (&sizes);
3370 poly_uint64 vs = 0;
3371 for (unsigned int i = 0; i < sizes.length (); ++i)
3372 vs = ordered_max (vs, sizes[i]);
3373 static enum mode_class classes[]
3374 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3375 for (int i = 0; i < 4; i += 2)
3376 /* The for loop above dictates that we only walk through scalar classes. */
3377 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3379 scalar_mode mode = mode_iter.require ();
3380 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3381 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3382 continue;
3383 while (maybe_ne (vs, 0U)
3384 && known_lt (GET_MODE_SIZE (vmode), vs)
3385 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3386 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3388 tree type = lang_hooks.types.type_for_mode (mode, 1);
3389 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3390 continue;
3391 poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3392 GET_MODE_SIZE (mode));
3393 type = build_vector_type (type, nelts);
3394 if (TYPE_MODE (type) != vmode)
3395 continue;
3396 if (TYPE_ALIGN_UNIT (type) > al)
3397 al = TYPE_ALIGN_UNIT (type);
3399 return build_int_cst (integer_type_node, al);
3403 /* This structure is part of the interface between lower_rec_simd_input_clauses
3404 and lower_rec_input_clauses. */
3406 struct omplow_simd_context {
3407 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3408 tree idx;
3409 tree lane;
3410 vec<tree, va_heap> simt_eargs;
3411 gimple_seq simt_dlist;
3412 poly_uint64_pod max_vf;
3413 bool is_simt;
3416 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3417 privatization. */
3419 static bool
3420 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3421 omplow_simd_context *sctx, tree &ivar, tree &lvar)
3423 if (known_eq (sctx->max_vf, 0U))
3425 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3426 if (maybe_gt (sctx->max_vf, 1U))
3428 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3429 OMP_CLAUSE_SAFELEN);
3430 if (c)
3432 poly_uint64 safe_len;
3433 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3434 || maybe_lt (safe_len, 1U))
3435 sctx->max_vf = 1;
3436 else
3437 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3440 if (maybe_gt (sctx->max_vf, 1U))
3442 sctx->idx = create_tmp_var (unsigned_type_node);
3443 sctx->lane = create_tmp_var (unsigned_type_node);
3446 if (known_eq (sctx->max_vf, 1U))
3447 return false;
3449 if (sctx->is_simt)
3451 if (is_gimple_reg (new_var))
3453 ivar = lvar = new_var;
3454 return true;
3456 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3457 ivar = lvar = create_tmp_var (type);
3458 TREE_ADDRESSABLE (ivar) = 1;
3459 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3460 NULL, DECL_ATTRIBUTES (ivar));
3461 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3462 tree clobber = build_constructor (type, NULL);
3463 TREE_THIS_VOLATILE (clobber) = 1;
3464 gimple *g = gimple_build_assign (ivar, clobber);
3465 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3467 else
3469 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3470 tree avar = create_tmp_var_raw (atype);
3471 if (TREE_ADDRESSABLE (new_var))
3472 TREE_ADDRESSABLE (avar) = 1;
3473 DECL_ATTRIBUTES (avar)
3474 = tree_cons (get_identifier ("omp simd array"), NULL,
3475 DECL_ATTRIBUTES (avar));
3476 gimple_add_tmp_var (avar);
3477 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3478 NULL_TREE, NULL_TREE);
3479 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3480 NULL_TREE, NULL_TREE);
3482 if (DECL_P (new_var))
3484 SET_DECL_VALUE_EXPR (new_var, lvar);
3485 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3487 return true;
3490 /* Helper function of lower_rec_input_clauses. For a reference
3491 in simd reduction, add an underlying variable it will reference. */
3493 static void
3494 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3496 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3497 if (TREE_CONSTANT (z))
3499 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3500 get_name (new_vard));
3501 gimple_add_tmp_var (z);
3502 TREE_ADDRESSABLE (z) = 1;
3503 z = build_fold_addr_expr_loc (loc, z);
3504 gimplify_assign (new_vard, z, ilist);
3508 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3509 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3510 private variables. Initialization statements go in ILIST, while calls
3511 to destructors go in DLIST. */
3513 static void
3514 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3515 omp_context *ctx, struct omp_for_data *fd)
3517 tree c, dtor, copyin_seq, x, ptr;
3518 bool copyin_by_ref = false;
3519 bool lastprivate_firstprivate = false;
3520 bool reduction_omp_orig_ref = false;
3521 int pass;
3522 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3523 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3524 omplow_simd_context sctx = omplow_simd_context ();
3525 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3526 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3527 gimple_seq llist[3] = { };
3529 copyin_seq = NULL;
3530 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3532 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3533 with data sharing clauses referencing variable sized vars. That
3534 is unnecessarily hard to support and very unlikely to result in
3535 vectorized code anyway. */
3536 if (is_simd)
3537 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3538 switch (OMP_CLAUSE_CODE (c))
3540 case OMP_CLAUSE_LINEAR:
3541 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3542 sctx.max_vf = 1;
3543 /* FALLTHRU */
3544 case OMP_CLAUSE_PRIVATE:
3545 case OMP_CLAUSE_FIRSTPRIVATE:
3546 case OMP_CLAUSE_LASTPRIVATE:
3547 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3548 sctx.max_vf = 1;
3549 break;
3550 case OMP_CLAUSE_REDUCTION:
3551 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3552 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3553 sctx.max_vf = 1;
3554 break;
3555 default:
3556 continue;
3559 /* Add a placeholder for simduid. */
3560 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
3561 sctx.simt_eargs.safe_push (NULL_TREE);
3563 /* Do all the fixed sized types in the first pass, and the variable sized
3564 types in the second pass. This makes sure that the scalar arguments to
3565 the variable sized types are processed before we use them in the
3566 variable sized operations. */
3567 for (pass = 0; pass < 2; ++pass)
3569 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3571 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3572 tree var, new_var;
3573 bool by_ref;
3574 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3576 switch (c_kind)
3578 case OMP_CLAUSE_PRIVATE:
3579 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3580 continue;
3581 break;
3582 case OMP_CLAUSE_SHARED:
3583 /* Ignore shared directives in teams construct. */
3584 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3585 continue;
3586 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3588 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3589 || is_global_var (OMP_CLAUSE_DECL (c)));
3590 continue;
3592 case OMP_CLAUSE_FIRSTPRIVATE:
3593 case OMP_CLAUSE_COPYIN:
3594 break;
3595 case OMP_CLAUSE_LINEAR:
3596 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3597 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3598 lastprivate_firstprivate = true;
3599 break;
3600 case OMP_CLAUSE_REDUCTION:
3601 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3602 reduction_omp_orig_ref = true;
3603 break;
3604 case OMP_CLAUSE__LOOPTEMP_:
3605 /* Handle _looptemp_ clauses only on parallel/task. */
3606 if (fd)
3607 continue;
3608 break;
3609 case OMP_CLAUSE_LASTPRIVATE:
3610 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3612 lastprivate_firstprivate = true;
3613 if (pass != 0 || is_taskloop_ctx (ctx))
3614 continue;
3616 /* Even without corresponding firstprivate, if
3617 decl is Fortran allocatable, it needs outer var
3618 reference. */
3619 else if (pass == 0
3620 && lang_hooks.decls.omp_private_outer_ref
3621 (OMP_CLAUSE_DECL (c)))
3622 lastprivate_firstprivate = true;
3623 break;
3624 case OMP_CLAUSE_ALIGNED:
3625 if (pass == 0)
3626 continue;
3627 var = OMP_CLAUSE_DECL (c);
3628 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3629 && !is_global_var (var))
3631 new_var = maybe_lookup_decl (var, ctx);
3632 if (new_var == NULL_TREE)
3633 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3634 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3635 tree alarg = omp_clause_aligned_alignment (c);
3636 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3637 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3638 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3639 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3640 gimplify_and_add (x, ilist);
3642 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3643 && is_global_var (var))
3645 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3646 new_var = lookup_decl (var, ctx);
3647 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3648 t = build_fold_addr_expr_loc (clause_loc, t);
3649 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3650 tree alarg = omp_clause_aligned_alignment (c);
3651 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3652 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3653 t = fold_convert_loc (clause_loc, ptype, t);
3654 x = create_tmp_var (ptype);
3655 t = build2 (MODIFY_EXPR, ptype, x, t);
3656 gimplify_and_add (t, ilist);
3657 t = build_simple_mem_ref_loc (clause_loc, x);
3658 SET_DECL_VALUE_EXPR (new_var, t);
3659 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3661 continue;
3662 default:
3663 continue;
3666 new_var = var = OMP_CLAUSE_DECL (c);
3667 if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3669 var = TREE_OPERAND (var, 0);
3670 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3671 var = TREE_OPERAND (var, 0);
3672 if (TREE_CODE (var) == INDIRECT_REF
3673 || TREE_CODE (var) == ADDR_EXPR)
3674 var = TREE_OPERAND (var, 0);
3675 if (is_variable_sized (var))
3677 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3678 var = DECL_VALUE_EXPR (var);
3679 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3680 var = TREE_OPERAND (var, 0);
3681 gcc_assert (DECL_P (var));
3683 new_var = var;
3685 if (c_kind != OMP_CLAUSE_COPYIN)
3686 new_var = lookup_decl (var, ctx);
3688 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3690 if (pass != 0)
3691 continue;
3693 /* C/C++ array section reductions. */
3694 else if (c_kind == OMP_CLAUSE_REDUCTION
3695 && var != OMP_CLAUSE_DECL (c))
3697 if (pass == 0)
3698 continue;
3700 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3701 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3702 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3704 tree b = TREE_OPERAND (orig_var, 1);
3705 b = maybe_lookup_decl (b, ctx);
3706 if (b == NULL)
3708 b = TREE_OPERAND (orig_var, 1);
3709 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3711 if (integer_zerop (bias))
3712 bias = b;
3713 else
3715 bias = fold_convert_loc (clause_loc,
3716 TREE_TYPE (b), bias);
3717 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3718 TREE_TYPE (b), b, bias);
3720 orig_var = TREE_OPERAND (orig_var, 0);
3722 if (TREE_CODE (orig_var) == INDIRECT_REF
3723 || TREE_CODE (orig_var) == ADDR_EXPR)
3724 orig_var = TREE_OPERAND (orig_var, 0);
3725 tree d = OMP_CLAUSE_DECL (c);
3726 tree type = TREE_TYPE (d);
3727 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3728 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3729 const char *name = get_name (orig_var);
3730 if (TREE_CONSTANT (v))
3732 x = create_tmp_var_raw (type, name);
3733 gimple_add_tmp_var (x);
3734 TREE_ADDRESSABLE (x) = 1;
3735 x = build_fold_addr_expr_loc (clause_loc, x);
3737 else
3739 tree atmp
3740 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3741 tree t = maybe_lookup_decl (v, ctx);
3742 if (t)
3743 v = t;
3744 else
3745 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3746 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3747 t = fold_build2_loc (clause_loc, PLUS_EXPR,
3748 TREE_TYPE (v), v,
3749 build_int_cst (TREE_TYPE (v), 1));
3750 t = fold_build2_loc (clause_loc, MULT_EXPR,
3751 TREE_TYPE (v), t,
3752 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3753 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3754 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3757 tree ptype = build_pointer_type (TREE_TYPE (type));
3758 x = fold_convert_loc (clause_loc, ptype, x);
3759 tree y = create_tmp_var (ptype, name);
3760 gimplify_assign (y, x, ilist);
3761 x = y;
3762 tree yb = y;
3764 if (!integer_zerop (bias))
3766 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3767 bias);
3768 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3770 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3771 pointer_sized_int_node, yb, bias);
3772 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3773 yb = create_tmp_var (ptype, name);
3774 gimplify_assign (yb, x, ilist);
3775 x = yb;
3778 d = TREE_OPERAND (d, 0);
3779 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3780 d = TREE_OPERAND (d, 0);
3781 if (TREE_CODE (d) == ADDR_EXPR)
3783 if (orig_var != var)
3785 gcc_assert (is_variable_sized (orig_var));
3786 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3788 gimplify_assign (new_var, x, ilist);
3789 tree new_orig_var = lookup_decl (orig_var, ctx);
3790 tree t = build_fold_indirect_ref (new_var);
3791 DECL_IGNORED_P (new_var) = 0;
3792 TREE_THIS_NOTRAP (t);
3793 SET_DECL_VALUE_EXPR (new_orig_var, t);
3794 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3796 else
3798 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3799 build_int_cst (ptype, 0));
3800 SET_DECL_VALUE_EXPR (new_var, x);
3801 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3804 else
3806 gcc_assert (orig_var == var);
3807 if (TREE_CODE (d) == INDIRECT_REF)
3809 x = create_tmp_var (ptype, name);
3810 TREE_ADDRESSABLE (x) = 1;
3811 gimplify_assign (x, yb, ilist);
3812 x = build_fold_addr_expr_loc (clause_loc, x);
3814 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3815 gimplify_assign (new_var, x, ilist);
3817 tree y1 = create_tmp_var (ptype, NULL);
3818 gimplify_assign (y1, y, ilist);
3819 tree i2 = NULL_TREE, y2 = NULL_TREE;
3820 tree body2 = NULL_TREE, end2 = NULL_TREE;
3821 tree y3 = NULL_TREE, y4 = NULL_TREE;
3822 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3824 y2 = create_tmp_var (ptype, NULL);
3825 gimplify_assign (y2, y, ilist);
3826 tree ref = build_outer_var_ref (var, ctx);
3827 /* For ref build_outer_var_ref already performs this. */
3828 if (TREE_CODE (d) == INDIRECT_REF)
3829 gcc_assert (omp_is_reference (var));
3830 else if (TREE_CODE (d) == ADDR_EXPR)
3831 ref = build_fold_addr_expr (ref);
3832 else if (omp_is_reference (var))
3833 ref = build_fold_addr_expr (ref);
3834 ref = fold_convert_loc (clause_loc, ptype, ref);
3835 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3836 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3838 y3 = create_tmp_var (ptype, NULL);
3839 gimplify_assign (y3, unshare_expr (ref), ilist);
3841 if (is_simd)
3843 y4 = create_tmp_var (ptype, NULL);
3844 gimplify_assign (y4, ref, dlist);
3847 tree i = create_tmp_var (TREE_TYPE (v), NULL);
3848 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3849 tree body = create_artificial_label (UNKNOWN_LOCATION);
3850 tree end = create_artificial_label (UNKNOWN_LOCATION);
3851 gimple_seq_add_stmt (ilist, gimple_build_label (body));
3852 if (y2)
3854 i2 = create_tmp_var (TREE_TYPE (v), NULL);
3855 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3856 body2 = create_artificial_label (UNKNOWN_LOCATION);
3857 end2 = create_artificial_label (UNKNOWN_LOCATION);
3858 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3860 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3862 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3863 tree decl_placeholder
3864 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3865 SET_DECL_VALUE_EXPR (decl_placeholder,
3866 build_simple_mem_ref (y1));
3867 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3868 SET_DECL_VALUE_EXPR (placeholder,
3869 y3 ? build_simple_mem_ref (y3)
3870 : error_mark_node);
3871 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3872 x = lang_hooks.decls.omp_clause_default_ctor
3873 (c, build_simple_mem_ref (y1),
3874 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3875 if (x)
3876 gimplify_and_add (x, ilist);
3877 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3879 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3880 lower_omp (&tseq, ctx);
3881 gimple_seq_add_seq (ilist, tseq);
3883 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3884 if (is_simd)
3886 SET_DECL_VALUE_EXPR (decl_placeholder,
3887 build_simple_mem_ref (y2));
3888 SET_DECL_VALUE_EXPR (placeholder,
3889 build_simple_mem_ref (y4));
3890 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3891 lower_omp (&tseq, ctx);
3892 gimple_seq_add_seq (dlist, tseq);
3893 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3895 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3896 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3897 x = lang_hooks.decls.omp_clause_dtor
3898 (c, build_simple_mem_ref (y2));
3899 if (x)
3901 gimple_seq tseq = NULL;
3902 dtor = x;
3903 gimplify_stmt (&dtor, &tseq);
3904 gimple_seq_add_seq (dlist, tseq);
3907 else
3909 x = omp_reduction_init (c, TREE_TYPE (type));
3910 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3912 /* reduction(-:var) sums up the partial results, so it
3913 acts identically to reduction(+:var). */
3914 if (code == MINUS_EXPR)
3915 code = PLUS_EXPR;
3917 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
3918 if (is_simd)
3920 x = build2 (code, TREE_TYPE (type),
3921 build_simple_mem_ref (y4),
3922 build_simple_mem_ref (y2));
3923 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
3926 gimple *g
3927 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
3928 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3929 gimple_seq_add_stmt (ilist, g);
3930 if (y3)
3932 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
3933 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3934 gimple_seq_add_stmt (ilist, g);
3936 g = gimple_build_assign (i, PLUS_EXPR, i,
3937 build_int_cst (TREE_TYPE (i), 1));
3938 gimple_seq_add_stmt (ilist, g);
3939 g = gimple_build_cond (LE_EXPR, i, v, body, end);
3940 gimple_seq_add_stmt (ilist, g);
3941 gimple_seq_add_stmt (ilist, gimple_build_label (end));
3942 if (y2)
3944 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
3945 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3946 gimple_seq_add_stmt (dlist, g);
3947 if (y4)
3949 g = gimple_build_assign
3950 (y4, POINTER_PLUS_EXPR, y4,
3951 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3952 gimple_seq_add_stmt (dlist, g);
3954 g = gimple_build_assign (i2, PLUS_EXPR, i2,
3955 build_int_cst (TREE_TYPE (i2), 1));
3956 gimple_seq_add_stmt (dlist, g);
3957 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
3958 gimple_seq_add_stmt (dlist, g);
3959 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
3961 continue;
3963 else if (is_variable_sized (var))
3965 /* For variable sized types, we need to allocate the
3966 actual storage here. Call alloca and store the
3967 result in the pointer decl that we created elsewhere. */
3968 if (pass == 0)
3969 continue;
3971 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
3973 gcall *stmt;
3974 tree tmp, atmp;
3976 ptr = DECL_VALUE_EXPR (new_var);
3977 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
3978 ptr = TREE_OPERAND (ptr, 0);
3979 gcc_assert (DECL_P (ptr));
3980 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
3982 /* void *tmp = __builtin_alloca */
3983 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3984 stmt = gimple_build_call (atmp, 2, x,
3985 size_int (DECL_ALIGN (var)));
3986 tmp = create_tmp_var_raw (ptr_type_node);
3987 gimple_add_tmp_var (tmp);
3988 gimple_call_set_lhs (stmt, tmp);
3990 gimple_seq_add_stmt (ilist, stmt);
3992 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
3993 gimplify_assign (ptr, x, ilist);
3996 else if (omp_is_reference (var))
3998 /* For references that are being privatized for Fortran,
3999 allocate new backing storage for the new pointer
4000 variable. This allows us to avoid changing all the
4001 code that expects a pointer to something that expects
4002 a direct variable. */
4003 if (pass == 0)
4004 continue;
4006 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4007 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4009 x = build_receiver_ref (var, false, ctx);
4010 x = build_fold_addr_expr_loc (clause_loc, x);
4012 else if (TREE_CONSTANT (x))
4014 /* For reduction in SIMD loop, defer adding the
4015 initialization of the reference, because if we decide
4016 to use SIMD array for it, the initilization could cause
4017 expansion ICE. */
4018 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4019 x = NULL_TREE;
4020 else
4022 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4023 get_name (var));
4024 gimple_add_tmp_var (x);
4025 TREE_ADDRESSABLE (x) = 1;
4026 x = build_fold_addr_expr_loc (clause_loc, x);
4029 else
4031 tree atmp
4032 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4033 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4034 tree al = size_int (TYPE_ALIGN (rtype));
4035 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4038 if (x)
4040 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4041 gimplify_assign (new_var, x, ilist);
4044 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4046 else if (c_kind == OMP_CLAUSE_REDUCTION
4047 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4049 if (pass == 0)
4050 continue;
4052 else if (pass != 0)
4053 continue;
4055 switch (OMP_CLAUSE_CODE (c))
4057 case OMP_CLAUSE_SHARED:
4058 /* Ignore shared directives in teams construct. */
4059 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4060 continue;
4061 /* Shared global vars are just accessed directly. */
4062 if (is_global_var (new_var))
4063 break;
4064 /* For taskloop firstprivate/lastprivate, represented
4065 as firstprivate and shared clause on the task, new_var
4066 is the firstprivate var. */
4067 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4068 break;
4069 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4070 needs to be delayed until after fixup_child_record_type so
4071 that we get the correct type during the dereference. */
4072 by_ref = use_pointer_for_field (var, ctx);
4073 x = build_receiver_ref (var, by_ref, ctx);
4074 SET_DECL_VALUE_EXPR (new_var, x);
4075 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4077 /* ??? If VAR is not passed by reference, and the variable
4078 hasn't been initialized yet, then we'll get a warning for
4079 the store into the omp_data_s structure. Ideally, we'd be
4080 able to notice this and not store anything at all, but
4081 we're generating code too early. Suppress the warning. */
4082 if (!by_ref)
4083 TREE_NO_WARNING (var) = 1;
4084 break;
4086 case OMP_CLAUSE_LASTPRIVATE:
4087 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4088 break;
4089 /* FALLTHRU */
4091 case OMP_CLAUSE_PRIVATE:
4092 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4093 x = build_outer_var_ref (var, ctx);
4094 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4096 if (is_task_ctx (ctx))
4097 x = build_receiver_ref (var, false, ctx);
4098 else
4099 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4101 else
4102 x = NULL;
4103 do_private:
4104 tree nx;
4105 nx = lang_hooks.decls.omp_clause_default_ctor
4106 (c, unshare_expr (new_var), x);
4107 if (is_simd)
4109 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4110 if ((TREE_ADDRESSABLE (new_var) || nx || y
4111 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4112 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4113 ivar, lvar))
4115 if (nx)
4116 x = lang_hooks.decls.omp_clause_default_ctor
4117 (c, unshare_expr (ivar), x);
4118 if (nx && x)
4119 gimplify_and_add (x, &llist[0]);
4120 if (y)
4122 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4123 if (y)
4125 gimple_seq tseq = NULL;
4127 dtor = y;
4128 gimplify_stmt (&dtor, &tseq);
4129 gimple_seq_add_seq (&llist[1], tseq);
4132 break;
4135 if (nx)
4136 gimplify_and_add (nx, ilist);
4137 /* FALLTHRU */
4139 do_dtor:
4140 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4141 if (x)
4143 gimple_seq tseq = NULL;
4145 dtor = x;
4146 gimplify_stmt (&dtor, &tseq);
4147 gimple_seq_add_seq (dlist, tseq);
4149 break;
4151 case OMP_CLAUSE_LINEAR:
4152 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4153 goto do_firstprivate;
4154 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4155 x = NULL;
4156 else
4157 x = build_outer_var_ref (var, ctx);
4158 goto do_private;
4160 case OMP_CLAUSE_FIRSTPRIVATE:
4161 if (is_task_ctx (ctx))
4163 if (omp_is_reference (var) || is_variable_sized (var))
4164 goto do_dtor;
4165 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4166 ctx))
4167 || use_pointer_for_field (var, NULL))
4169 x = build_receiver_ref (var, false, ctx);
4170 SET_DECL_VALUE_EXPR (new_var, x);
4171 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4172 goto do_dtor;
4175 do_firstprivate:
4176 x = build_outer_var_ref (var, ctx);
4177 if (is_simd)
4179 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4180 && gimple_omp_for_combined_into_p (ctx->stmt))
4182 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4183 tree stept = TREE_TYPE (t);
4184 tree ct = omp_find_clause (clauses,
4185 OMP_CLAUSE__LOOPTEMP_);
4186 gcc_assert (ct);
4187 tree l = OMP_CLAUSE_DECL (ct);
4188 tree n1 = fd->loop.n1;
4189 tree step = fd->loop.step;
4190 tree itype = TREE_TYPE (l);
4191 if (POINTER_TYPE_P (itype))
4192 itype = signed_type_for (itype);
4193 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4194 if (TYPE_UNSIGNED (itype)
4195 && fd->loop.cond_code == GT_EXPR)
4196 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4197 fold_build1 (NEGATE_EXPR, itype, l),
4198 fold_build1 (NEGATE_EXPR,
4199 itype, step));
4200 else
4201 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4202 t = fold_build2 (MULT_EXPR, stept,
4203 fold_convert (stept, l), t);
4205 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4207 x = lang_hooks.decls.omp_clause_linear_ctor
4208 (c, new_var, x, t);
4209 gimplify_and_add (x, ilist);
4210 goto do_dtor;
4213 if (POINTER_TYPE_P (TREE_TYPE (x)))
4214 x = fold_build2 (POINTER_PLUS_EXPR,
4215 TREE_TYPE (x), x, t);
4216 else
4217 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4220 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4221 || TREE_ADDRESSABLE (new_var))
4222 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4223 ivar, lvar))
4225 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4227 tree iv = create_tmp_var (TREE_TYPE (new_var));
4228 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4229 gimplify_and_add (x, ilist);
4230 gimple_stmt_iterator gsi
4231 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4232 gassign *g
4233 = gimple_build_assign (unshare_expr (lvar), iv);
4234 gsi_insert_before_without_update (&gsi, g,
4235 GSI_SAME_STMT);
4236 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4237 enum tree_code code = PLUS_EXPR;
4238 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4239 code = POINTER_PLUS_EXPR;
4240 g = gimple_build_assign (iv, code, iv, t);
4241 gsi_insert_before_without_update (&gsi, g,
4242 GSI_SAME_STMT);
4243 break;
4245 x = lang_hooks.decls.omp_clause_copy_ctor
4246 (c, unshare_expr (ivar), x);
4247 gimplify_and_add (x, &llist[0]);
4248 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4249 if (x)
4251 gimple_seq tseq = NULL;
4253 dtor = x;
4254 gimplify_stmt (&dtor, &tseq);
4255 gimple_seq_add_seq (&llist[1], tseq);
4257 break;
4260 x = lang_hooks.decls.omp_clause_copy_ctor
4261 (c, unshare_expr (new_var), x);
4262 gimplify_and_add (x, ilist);
4263 goto do_dtor;
4265 case OMP_CLAUSE__LOOPTEMP_:
4266 gcc_assert (is_taskreg_ctx (ctx));
4267 x = build_outer_var_ref (var, ctx);
4268 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4269 gimplify_and_add (x, ilist);
4270 break;
4272 case OMP_CLAUSE_COPYIN:
4273 by_ref = use_pointer_for_field (var, NULL);
4274 x = build_receiver_ref (var, by_ref, ctx);
4275 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4276 append_to_statement_list (x, &copyin_seq);
4277 copyin_by_ref |= by_ref;
4278 break;
4280 case OMP_CLAUSE_REDUCTION:
4281 /* OpenACC reductions are initialized using the
4282 GOACC_REDUCTION internal function. */
4283 if (is_gimple_omp_oacc (ctx->stmt))
4284 break;
4285 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4287 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4288 gimple *tseq;
4289 x = build_outer_var_ref (var, ctx);
4291 if (omp_is_reference (var)
4292 && !useless_type_conversion_p (TREE_TYPE (placeholder),
4293 TREE_TYPE (x)))
4294 x = build_fold_addr_expr_loc (clause_loc, x);
4295 SET_DECL_VALUE_EXPR (placeholder, x);
4296 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4297 tree new_vard = new_var;
4298 if (omp_is_reference (var))
4300 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4301 new_vard = TREE_OPERAND (new_var, 0);
4302 gcc_assert (DECL_P (new_vard));
4304 if (is_simd
4305 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4306 ivar, lvar))
4308 if (new_vard == new_var)
4310 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4311 SET_DECL_VALUE_EXPR (new_var, ivar);
4313 else
4315 SET_DECL_VALUE_EXPR (new_vard,
4316 build_fold_addr_expr (ivar));
4317 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4319 x = lang_hooks.decls.omp_clause_default_ctor
4320 (c, unshare_expr (ivar),
4321 build_outer_var_ref (var, ctx));
4322 if (x)
4323 gimplify_and_add (x, &llist[0]);
4324 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4326 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4327 lower_omp (&tseq, ctx);
4328 gimple_seq_add_seq (&llist[0], tseq);
4330 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4331 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4332 lower_omp (&tseq, ctx);
4333 gimple_seq_add_seq (&llist[1], tseq);
4334 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4335 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4336 if (new_vard == new_var)
4337 SET_DECL_VALUE_EXPR (new_var, lvar);
4338 else
4339 SET_DECL_VALUE_EXPR (new_vard,
4340 build_fold_addr_expr (lvar));
4341 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4342 if (x)
4344 tseq = NULL;
4345 dtor = x;
4346 gimplify_stmt (&dtor, &tseq);
4347 gimple_seq_add_seq (&llist[1], tseq);
4349 break;
4351 /* If this is a reference to constant size reduction var
4352 with placeholder, we haven't emitted the initializer
4353 for it because it is undesirable if SIMD arrays are used.
4354 But if they aren't used, we need to emit the deferred
4355 initialization now. */
4356 else if (omp_is_reference (var) && is_simd)
4357 handle_simd_reference (clause_loc, new_vard, ilist);
4358 x = lang_hooks.decls.omp_clause_default_ctor
4359 (c, unshare_expr (new_var),
4360 build_outer_var_ref (var, ctx));
4361 if (x)
4362 gimplify_and_add (x, ilist);
4363 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4365 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4366 lower_omp (&tseq, ctx);
4367 gimple_seq_add_seq (ilist, tseq);
4369 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4370 if (is_simd)
4372 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4373 lower_omp (&tseq, ctx);
4374 gimple_seq_add_seq (dlist, tseq);
4375 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4377 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4378 goto do_dtor;
4380 else
4382 x = omp_reduction_init (c, TREE_TYPE (new_var));
4383 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4384 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4386 /* reduction(-:var) sums up the partial results, so it
4387 acts identically to reduction(+:var). */
4388 if (code == MINUS_EXPR)
4389 code = PLUS_EXPR;
4391 tree new_vard = new_var;
4392 if (is_simd && omp_is_reference (var))
4394 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4395 new_vard = TREE_OPERAND (new_var, 0);
4396 gcc_assert (DECL_P (new_vard));
4398 if (is_simd
4399 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4400 ivar, lvar))
4402 tree ref = build_outer_var_ref (var, ctx);
4404 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4406 if (sctx.is_simt)
4408 if (!simt_lane)
4409 simt_lane = create_tmp_var (unsigned_type_node);
4410 x = build_call_expr_internal_loc
4411 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4412 TREE_TYPE (ivar), 2, ivar, simt_lane);
4413 x = build2 (code, TREE_TYPE (ivar), ivar, x);
4414 gimplify_assign (ivar, x, &llist[2]);
4416 x = build2 (code, TREE_TYPE (ref), ref, ivar);
4417 ref = build_outer_var_ref (var, ctx);
4418 gimplify_assign (ref, x, &llist[1]);
4420 if (new_vard != new_var)
4422 SET_DECL_VALUE_EXPR (new_vard,
4423 build_fold_addr_expr (lvar));
4424 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4427 else
4429 if (omp_is_reference (var) && is_simd)
4430 handle_simd_reference (clause_loc, new_vard, ilist);
4431 gimplify_assign (new_var, x, ilist);
4432 if (is_simd)
4434 tree ref = build_outer_var_ref (var, ctx);
4436 x = build2 (code, TREE_TYPE (ref), ref, new_var);
4437 ref = build_outer_var_ref (var, ctx);
4438 gimplify_assign (ref, x, dlist);
4442 break;
4444 default:
4445 gcc_unreachable ();
4450 if (known_eq (sctx.max_vf, 1U))
4451 sctx.is_simt = false;
4453 if (sctx.lane || sctx.is_simt)
4455 uid = create_tmp_var (ptr_type_node, "simduid");
4456 /* Don't want uninit warnings on simduid, it is always uninitialized,
4457 but we use it not for the value, but for the DECL_UID only. */
4458 TREE_NO_WARNING (uid) = 1;
4459 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4460 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4461 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4462 gimple_omp_for_set_clauses (ctx->stmt, c);
4464 /* Emit calls denoting privatized variables and initializing a pointer to
4465 structure that holds private variables as fields after ompdevlow pass. */
4466 if (sctx.is_simt)
4468 sctx.simt_eargs[0] = uid;
4469 gimple *g
4470 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4471 gimple_call_set_lhs (g, uid);
4472 gimple_seq_add_stmt (ilist, g);
4473 sctx.simt_eargs.release ();
4475 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4476 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4477 gimple_call_set_lhs (g, simtrec);
4478 gimple_seq_add_stmt (ilist, g);
4480 if (sctx.lane)
4482 gimple *g
4483 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4484 gimple_call_set_lhs (g, sctx.lane);
4485 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4486 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4487 g = gimple_build_assign (sctx.lane, INTEGER_CST,
4488 build_int_cst (unsigned_type_node, 0));
4489 gimple_seq_add_stmt (ilist, g);
4490 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4491 if (llist[2])
4493 tree simt_vf = create_tmp_var (unsigned_type_node);
4494 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4495 gimple_call_set_lhs (g, simt_vf);
4496 gimple_seq_add_stmt (dlist, g);
4498 tree t = build_int_cst (unsigned_type_node, 1);
4499 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4500 gimple_seq_add_stmt (dlist, g);
4502 t = build_int_cst (unsigned_type_node, 0);
4503 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4504 gimple_seq_add_stmt (dlist, g);
4506 tree body = create_artificial_label (UNKNOWN_LOCATION);
4507 tree header = create_artificial_label (UNKNOWN_LOCATION);
4508 tree end = create_artificial_label (UNKNOWN_LOCATION);
4509 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4510 gimple_seq_add_stmt (dlist, gimple_build_label (body));
4512 gimple_seq_add_seq (dlist, llist[2]);
4514 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4515 gimple_seq_add_stmt (dlist, g);
4517 gimple_seq_add_stmt (dlist, gimple_build_label (header));
4518 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4519 gimple_seq_add_stmt (dlist, g);
4521 gimple_seq_add_stmt (dlist, gimple_build_label (end));
4523 for (int i = 0; i < 2; i++)
4524 if (llist[i])
4526 tree vf = create_tmp_var (unsigned_type_node);
4527 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4528 gimple_call_set_lhs (g, vf);
4529 gimple_seq *seq = i == 0 ? ilist : dlist;
4530 gimple_seq_add_stmt (seq, g);
4531 tree t = build_int_cst (unsigned_type_node, 0);
4532 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4533 gimple_seq_add_stmt (seq, g);
4534 tree body = create_artificial_label (UNKNOWN_LOCATION);
4535 tree header = create_artificial_label (UNKNOWN_LOCATION);
4536 tree end = create_artificial_label (UNKNOWN_LOCATION);
4537 gimple_seq_add_stmt (seq, gimple_build_goto (header));
4538 gimple_seq_add_stmt (seq, gimple_build_label (body));
4539 gimple_seq_add_seq (seq, llist[i]);
4540 t = build_int_cst (unsigned_type_node, 1);
4541 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
4542 gimple_seq_add_stmt (seq, g);
4543 gimple_seq_add_stmt (seq, gimple_build_label (header));
4544 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
4545 gimple_seq_add_stmt (seq, g);
4546 gimple_seq_add_stmt (seq, gimple_build_label (end));
4549 if (sctx.is_simt)
4551 gimple_seq_add_seq (dlist, sctx.simt_dlist);
4552 gimple *g
4553 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4554 gimple_seq_add_stmt (dlist, g);
4557 /* The copyin sequence is not to be executed by the main thread, since
4558 that would result in self-copies. Perhaps not visible to scalars,
4559 but it certainly is to C++ operator=. */
4560 if (copyin_seq)
4562 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4564 x = build2 (NE_EXPR, boolean_type_node, x,
4565 build_int_cst (TREE_TYPE (x), 0));
4566 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4567 gimplify_and_add (x, ilist);
4570 /* If any copyin variable is passed by reference, we must ensure the
4571 master thread doesn't modify it before it is copied over in all
4572 threads. Similarly for variables in both firstprivate and
4573 lastprivate clauses we need to ensure the lastprivate copying
4574 happens after firstprivate copying in all threads. And similarly
4575 for UDRs if initializer expression refers to omp_orig. */
4576 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4578 /* Don't add any barrier for #pragma omp simd or
4579 #pragma omp distribute. */
4580 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4581 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4582 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4585 /* If max_vf is non-zero, then we can use only a vectorization factor
4586 up to the max_vf we chose. So stick it into the safelen clause. */
4587 if (maybe_ne (sctx.max_vf, 0U))
4589 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4590 OMP_CLAUSE_SAFELEN);
4591 poly_uint64 safe_len;
4592 if (c == NULL_TREE
4593 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4594 && maybe_gt (safe_len, sctx.max_vf)))
4596 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4597 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4598 sctx.max_vf);
4599 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4600 gimple_omp_for_set_clauses (ctx->stmt, c);
4606 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4607 both parallel and workshare constructs. PREDICATE may be NULL if it's
4608 always true. */
4610 static void
4611 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4612 omp_context *ctx)
4614 tree x, c, label = NULL, orig_clauses = clauses;
4615 bool par_clauses = false;
4616 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4618 /* Early exit if there are no lastprivate or linear clauses. */
4619 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4620 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4621 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4622 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4623 break;
4624 if (clauses == NULL)
4626 /* If this was a workshare clause, see if it had been combined
4627 with its parallel. In that case, look for the clauses on the
4628 parallel statement itself. */
4629 if (is_parallel_ctx (ctx))
4630 return;
4632 ctx = ctx->outer;
4633 if (ctx == NULL || !is_parallel_ctx (ctx))
4634 return;
4636 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4637 OMP_CLAUSE_LASTPRIVATE);
4638 if (clauses == NULL)
4639 return;
4640 par_clauses = true;
4643 bool maybe_simt = false;
4644 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4645 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4647 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4648 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4649 if (simduid)
4650 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4653 if (predicate)
4655 gcond *stmt;
4656 tree label_true, arm1, arm2;
4657 enum tree_code pred_code = TREE_CODE (predicate);
4659 label = create_artificial_label (UNKNOWN_LOCATION);
4660 label_true = create_artificial_label (UNKNOWN_LOCATION);
4661 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4663 arm1 = TREE_OPERAND (predicate, 0);
4664 arm2 = TREE_OPERAND (predicate, 1);
4665 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4666 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4668 else
4670 arm1 = predicate;
4671 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4672 arm2 = boolean_false_node;
4673 pred_code = NE_EXPR;
4675 if (maybe_simt)
4677 c = build2 (pred_code, boolean_type_node, arm1, arm2);
4678 c = fold_convert (integer_type_node, c);
4679 simtcond = create_tmp_var (integer_type_node);
4680 gimplify_assign (simtcond, c, stmt_list);
4681 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4682 1, simtcond);
4683 c = create_tmp_var (integer_type_node);
4684 gimple_call_set_lhs (g, c);
4685 gimple_seq_add_stmt (stmt_list, g);
4686 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4687 label_true, label);
4689 else
4690 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4691 gimple_seq_add_stmt (stmt_list, stmt);
4692 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4695 for (c = clauses; c ;)
4697 tree var, new_var;
4698 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4700 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4701 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4702 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4704 var = OMP_CLAUSE_DECL (c);
4705 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4706 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4707 && is_taskloop_ctx (ctx))
4709 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4710 new_var = lookup_decl (var, ctx->outer);
4712 else
4714 new_var = lookup_decl (var, ctx);
4715 /* Avoid uninitialized warnings for lastprivate and
4716 for linear iterators. */
4717 if (predicate
4718 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4719 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4720 TREE_NO_WARNING (new_var) = 1;
4723 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4725 tree val = DECL_VALUE_EXPR (new_var);
4726 if (TREE_CODE (val) == ARRAY_REF
4727 && VAR_P (TREE_OPERAND (val, 0))
4728 && lookup_attribute ("omp simd array",
4729 DECL_ATTRIBUTES (TREE_OPERAND (val,
4730 0))))
4732 if (lastlane == NULL)
4734 lastlane = create_tmp_var (unsigned_type_node);
4735 gcall *g
4736 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4737 2, simduid,
4738 TREE_OPERAND (val, 1));
4739 gimple_call_set_lhs (g, lastlane);
4740 gimple_seq_add_stmt (stmt_list, g);
4742 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4743 TREE_OPERAND (val, 0), lastlane,
4744 NULL_TREE, NULL_TREE);
4747 else if (maybe_simt)
4749 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
4750 ? DECL_VALUE_EXPR (new_var)
4751 : new_var);
4752 if (simtlast == NULL)
4754 simtlast = create_tmp_var (unsigned_type_node);
4755 gcall *g = gimple_build_call_internal
4756 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4757 gimple_call_set_lhs (g, simtlast);
4758 gimple_seq_add_stmt (stmt_list, g);
4760 x = build_call_expr_internal_loc
4761 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4762 TREE_TYPE (val), 2, val, simtlast);
4763 new_var = unshare_expr (new_var);
4764 gimplify_assign (new_var, x, stmt_list);
4765 new_var = unshare_expr (new_var);
4768 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4769 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4771 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4772 gimple_seq_add_seq (stmt_list,
4773 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4774 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4776 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4777 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4779 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4780 gimple_seq_add_seq (stmt_list,
4781 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4782 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4785 x = NULL_TREE;
4786 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4787 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4789 gcc_checking_assert (is_taskloop_ctx (ctx));
4790 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4791 ctx->outer->outer);
4792 if (is_global_var (ovar))
4793 x = ovar;
4795 if (!x)
4796 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4797 if (omp_is_reference (var))
4798 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4799 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4800 gimplify_and_add (x, stmt_list);
4802 c = OMP_CLAUSE_CHAIN (c);
4803 if (c == NULL && !par_clauses)
4805 /* If this was a workshare clause, see if it had been combined
4806 with its parallel. In that case, continue looking for the
4807 clauses also on the parallel statement itself. */
4808 if (is_parallel_ctx (ctx))
4809 break;
4811 ctx = ctx->outer;
4812 if (ctx == NULL || !is_parallel_ctx (ctx))
4813 break;
4815 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4816 OMP_CLAUSE_LASTPRIVATE);
4817 par_clauses = true;
4821 if (label)
4822 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4825 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4826 (which might be a placeholder). INNER is true if this is an inner
4827 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4828 join markers. Generate the before-loop forking sequence in
4829 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4830 general form of these sequences is
4832 GOACC_REDUCTION_SETUP
4833 GOACC_FORK
4834 GOACC_REDUCTION_INIT
4836 GOACC_REDUCTION_FINI
4837 GOACC_JOIN
4838 GOACC_REDUCTION_TEARDOWN. */
4840 static void
4841 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4842 gcall *fork, gcall *join, gimple_seq *fork_seq,
4843 gimple_seq *join_seq, omp_context *ctx)
4845 gimple_seq before_fork = NULL;
4846 gimple_seq after_fork = NULL;
4847 gimple_seq before_join = NULL;
4848 gimple_seq after_join = NULL;
4849 tree init_code = NULL_TREE, fini_code = NULL_TREE,
4850 setup_code = NULL_TREE, teardown_code = NULL_TREE;
4851 unsigned offset = 0;
4853 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4854 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4856 tree orig = OMP_CLAUSE_DECL (c);
4857 tree var = maybe_lookup_decl (orig, ctx);
4858 tree ref_to_res = NULL_TREE;
4859 tree incoming, outgoing, v1, v2, v3;
4860 bool is_private = false;
4862 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4863 if (rcode == MINUS_EXPR)
4864 rcode = PLUS_EXPR;
4865 else if (rcode == TRUTH_ANDIF_EXPR)
4866 rcode = BIT_AND_EXPR;
4867 else if (rcode == TRUTH_ORIF_EXPR)
4868 rcode = BIT_IOR_EXPR;
4869 tree op = build_int_cst (unsigned_type_node, rcode);
4871 if (!var)
4872 var = orig;
4874 incoming = outgoing = var;
4876 if (!inner)
4878 /* See if an outer construct also reduces this variable. */
4879 omp_context *outer = ctx;
4881 while (omp_context *probe = outer->outer)
4883 enum gimple_code type = gimple_code (probe->stmt);
4884 tree cls;
4886 switch (type)
4888 case GIMPLE_OMP_FOR:
4889 cls = gimple_omp_for_clauses (probe->stmt);
4890 break;
4892 case GIMPLE_OMP_TARGET:
4893 if (gimple_omp_target_kind (probe->stmt)
4894 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
4895 goto do_lookup;
4897 cls = gimple_omp_target_clauses (probe->stmt);
4898 break;
4900 default:
4901 goto do_lookup;
4904 outer = probe;
4905 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
4906 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4907 && orig == OMP_CLAUSE_DECL (cls))
4909 incoming = outgoing = lookup_decl (orig, probe);
4910 goto has_outer_reduction;
4912 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4913 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4914 && orig == OMP_CLAUSE_DECL (cls))
4916 is_private = true;
4917 goto do_lookup;
4921 do_lookup:
4922 /* This is the outermost construct with this reduction,
4923 see if there's a mapping for it. */
4924 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
4925 && maybe_lookup_field (orig, outer) && !is_private)
4927 ref_to_res = build_receiver_ref (orig, false, outer);
4928 if (omp_is_reference (orig))
4929 ref_to_res = build_simple_mem_ref (ref_to_res);
4931 tree type = TREE_TYPE (var);
4932 if (POINTER_TYPE_P (type))
4933 type = TREE_TYPE (type);
4935 outgoing = var;
4936 incoming = omp_reduction_init_op (loc, rcode, type);
4938 else
4940 /* Try to look at enclosing contexts for reduction var,
4941 use original if no mapping found. */
4942 tree t = NULL_TREE;
4943 omp_context *c = ctx->outer;
4944 while (c && !t)
4946 t = maybe_lookup_decl (orig, c);
4947 c = c->outer;
4949 incoming = outgoing = (t ? t : orig);
4952 has_outer_reduction:;
4955 if (!ref_to_res)
4956 ref_to_res = integer_zero_node;
4958 if (omp_is_reference (orig))
4960 tree type = TREE_TYPE (var);
4961 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
4963 if (!inner)
4965 tree x = create_tmp_var (TREE_TYPE (type), id);
4966 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
4969 v1 = create_tmp_var (type, id);
4970 v2 = create_tmp_var (type, id);
4971 v3 = create_tmp_var (type, id);
4973 gimplify_assign (v1, var, fork_seq);
4974 gimplify_assign (v2, var, fork_seq);
4975 gimplify_assign (v3, var, fork_seq);
4977 var = build_simple_mem_ref (var);
4978 v1 = build_simple_mem_ref (v1);
4979 v2 = build_simple_mem_ref (v2);
4980 v3 = build_simple_mem_ref (v3);
4981 outgoing = build_simple_mem_ref (outgoing);
4983 if (!TREE_CONSTANT (incoming))
4984 incoming = build_simple_mem_ref (incoming);
4986 else
4987 v1 = v2 = v3 = var;
4989 /* Determine position in reduction buffer, which may be used
4990 by target. The parser has ensured that this is not a
4991 variable-sized type. */
4992 fixed_size_mode mode
4993 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
4994 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
4995 offset = (offset + align - 1) & ~(align - 1);
4996 tree off = build_int_cst (sizetype, offset);
4997 offset += GET_MODE_SIZE (mode);
4999 if (!init_code)
5001 init_code = build_int_cst (integer_type_node,
5002 IFN_GOACC_REDUCTION_INIT);
5003 fini_code = build_int_cst (integer_type_node,
5004 IFN_GOACC_REDUCTION_FINI);
5005 setup_code = build_int_cst (integer_type_node,
5006 IFN_GOACC_REDUCTION_SETUP);
5007 teardown_code = build_int_cst (integer_type_node,
5008 IFN_GOACC_REDUCTION_TEARDOWN);
5011 tree setup_call
5012 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5013 TREE_TYPE (var), 6, setup_code,
5014 unshare_expr (ref_to_res),
5015 incoming, level, op, off);
5016 tree init_call
5017 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5018 TREE_TYPE (var), 6, init_code,
5019 unshare_expr (ref_to_res),
5020 v1, level, op, off);
5021 tree fini_call
5022 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5023 TREE_TYPE (var), 6, fini_code,
5024 unshare_expr (ref_to_res),
5025 v2, level, op, off);
5026 tree teardown_call
5027 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5028 TREE_TYPE (var), 6, teardown_code,
5029 ref_to_res, v3, level, op, off);
5031 gimplify_assign (v1, setup_call, &before_fork);
5032 gimplify_assign (v2, init_call, &after_fork);
5033 gimplify_assign (v3, fini_call, &before_join);
5034 gimplify_assign (outgoing, teardown_call, &after_join);
5037 /* Now stitch things together. */
5038 gimple_seq_add_seq (fork_seq, before_fork);
5039 if (fork)
5040 gimple_seq_add_stmt (fork_seq, fork);
5041 gimple_seq_add_seq (fork_seq, after_fork);
5043 gimple_seq_add_seq (join_seq, before_join);
5044 if (join)
5045 gimple_seq_add_stmt (join_seq, join);
5046 gimple_seq_add_seq (join_seq, after_join);
5049 /* Generate code to implement the REDUCTION clauses. */
5051 static void
5052 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5054 gimple_seq sub_seq = NULL;
5055 gimple *stmt;
5056 tree x, c;
5057 int count = 0;
5059 /* OpenACC loop reductions are handled elsewhere. */
5060 if (is_gimple_omp_oacc (ctx->stmt))
5061 return;
5063 /* SIMD reductions are handled in lower_rec_input_clauses. */
5064 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5065 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5066 return;
5068 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5069 update in that case, otherwise use a lock. */
5070 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5071 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5073 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5074 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5076 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5077 count = -1;
5078 break;
5080 count++;
5083 if (count == 0)
5084 return;
5086 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5088 tree var, ref, new_var, orig_var;
5089 enum tree_code code;
5090 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5092 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5093 continue;
5095 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
5096 orig_var = var = OMP_CLAUSE_DECL (c);
5097 if (TREE_CODE (var) == MEM_REF)
5099 var = TREE_OPERAND (var, 0);
5100 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5101 var = TREE_OPERAND (var, 0);
5102 if (TREE_CODE (var) == ADDR_EXPR)
5103 var = TREE_OPERAND (var, 0);
5104 else
5106 /* If this is a pointer or referenced based array
5107 section, the var could be private in the outer
5108 context e.g. on orphaned loop construct. Pretend this
5109 is private variable's outer reference. */
5110 ccode = OMP_CLAUSE_PRIVATE;
5111 if (TREE_CODE (var) == INDIRECT_REF)
5112 var = TREE_OPERAND (var, 0);
5114 orig_var = var;
5115 if (is_variable_sized (var))
5117 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5118 var = DECL_VALUE_EXPR (var);
5119 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5120 var = TREE_OPERAND (var, 0);
5121 gcc_assert (DECL_P (var));
5124 new_var = lookup_decl (var, ctx);
5125 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5126 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5127 ref = build_outer_var_ref (var, ctx, ccode);
5128 code = OMP_CLAUSE_REDUCTION_CODE (c);
5130 /* reduction(-:var) sums up the partial results, so it acts
5131 identically to reduction(+:var). */
5132 if (code == MINUS_EXPR)
5133 code = PLUS_EXPR;
5135 if (count == 1)
5137 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5139 addr = save_expr (addr);
5140 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5141 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5142 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5143 gimplify_and_add (x, stmt_seqp);
5144 return;
5146 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5148 tree d = OMP_CLAUSE_DECL (c);
5149 tree type = TREE_TYPE (d);
5150 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5151 tree i = create_tmp_var (TREE_TYPE (v), NULL);
5152 tree ptype = build_pointer_type (TREE_TYPE (type));
5153 tree bias = TREE_OPERAND (d, 1);
5154 d = TREE_OPERAND (d, 0);
5155 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5157 tree b = TREE_OPERAND (d, 1);
5158 b = maybe_lookup_decl (b, ctx);
5159 if (b == NULL)
5161 b = TREE_OPERAND (d, 1);
5162 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5164 if (integer_zerop (bias))
5165 bias = b;
5166 else
5168 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5169 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5170 TREE_TYPE (b), b, bias);
5172 d = TREE_OPERAND (d, 0);
5174 /* For ref build_outer_var_ref already performs this, so
5175 only new_var needs a dereference. */
5176 if (TREE_CODE (d) == INDIRECT_REF)
5178 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5179 gcc_assert (omp_is_reference (var) && var == orig_var);
5181 else if (TREE_CODE (d) == ADDR_EXPR)
5183 if (orig_var == var)
5185 new_var = build_fold_addr_expr (new_var);
5186 ref = build_fold_addr_expr (ref);
5189 else
5191 gcc_assert (orig_var == var);
5192 if (omp_is_reference (var))
5193 ref = build_fold_addr_expr (ref);
5195 if (DECL_P (v))
5197 tree t = maybe_lookup_decl (v, ctx);
5198 if (t)
5199 v = t;
5200 else
5201 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5202 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5204 if (!integer_zerop (bias))
5206 bias = fold_convert_loc (clause_loc, sizetype, bias);
5207 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5208 TREE_TYPE (new_var), new_var,
5209 unshare_expr (bias));
5210 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5211 TREE_TYPE (ref), ref, bias);
5213 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5214 ref = fold_convert_loc (clause_loc, ptype, ref);
5215 tree m = create_tmp_var (ptype, NULL);
5216 gimplify_assign (m, new_var, stmt_seqp);
5217 new_var = m;
5218 m = create_tmp_var (ptype, NULL);
5219 gimplify_assign (m, ref, stmt_seqp);
5220 ref = m;
5221 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5222 tree body = create_artificial_label (UNKNOWN_LOCATION);
5223 tree end = create_artificial_label (UNKNOWN_LOCATION);
5224 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5225 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5226 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5227 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5229 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5230 tree decl_placeholder
5231 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5232 SET_DECL_VALUE_EXPR (placeholder, out);
5233 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5234 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5235 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5236 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5237 gimple_seq_add_seq (&sub_seq,
5238 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5239 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5240 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5241 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5243 else
5245 x = build2 (code, TREE_TYPE (out), out, priv);
5246 out = unshare_expr (out);
5247 gimplify_assign (out, x, &sub_seq);
5249 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5250 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5251 gimple_seq_add_stmt (&sub_seq, g);
5252 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5253 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5254 gimple_seq_add_stmt (&sub_seq, g);
5255 g = gimple_build_assign (i, PLUS_EXPR, i,
5256 build_int_cst (TREE_TYPE (i), 1));
5257 gimple_seq_add_stmt (&sub_seq, g);
5258 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5259 gimple_seq_add_stmt (&sub_seq, g);
5260 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5262 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5264 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5266 if (omp_is_reference (var)
5267 && !useless_type_conversion_p (TREE_TYPE (placeholder),
5268 TREE_TYPE (ref)))
5269 ref = build_fold_addr_expr_loc (clause_loc, ref);
5270 SET_DECL_VALUE_EXPR (placeholder, ref);
5271 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5272 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5273 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5274 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5275 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5277 else
5279 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5280 ref = build_outer_var_ref (var, ctx);
5281 gimplify_assign (ref, x, &sub_seq);
5285 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5287 gimple_seq_add_stmt (stmt_seqp, stmt);
5289 gimple_seq_add_seq (stmt_seqp, sub_seq);
5291 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5293 gimple_seq_add_stmt (stmt_seqp, stmt);
5297 /* Generate code to implement the COPYPRIVATE clauses. */
5299 static void
5300 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5301 omp_context *ctx)
5303 tree c;
5305 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5307 tree var, new_var, ref, x;
5308 bool by_ref;
5309 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5311 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5312 continue;
5314 var = OMP_CLAUSE_DECL (c);
5315 by_ref = use_pointer_for_field (var, NULL);
5317 ref = build_sender_ref (var, ctx);
5318 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5319 if (by_ref)
5321 x = build_fold_addr_expr_loc (clause_loc, new_var);
5322 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5324 gimplify_assign (ref, x, slist);
5326 ref = build_receiver_ref (var, false, ctx);
5327 if (by_ref)
5329 ref = fold_convert_loc (clause_loc,
5330 build_pointer_type (TREE_TYPE (new_var)),
5331 ref);
5332 ref = build_fold_indirect_ref_loc (clause_loc, ref);
5334 if (omp_is_reference (var))
5336 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5337 ref = build_simple_mem_ref_loc (clause_loc, ref);
5338 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5340 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5341 gimplify_and_add (x, rlist);
5346 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5347 and REDUCTION from the sender (aka parent) side. */
5349 static void
5350 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5351 omp_context *ctx)
5353 tree c, t;
5354 int ignored_looptemp = 0;
5355 bool is_taskloop = false;
5357 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5358 by GOMP_taskloop. */
5359 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5361 ignored_looptemp = 2;
5362 is_taskloop = true;
5365 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5367 tree val, ref, x, var;
5368 bool by_ref, do_in = false, do_out = false;
5369 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5371 switch (OMP_CLAUSE_CODE (c))
5373 case OMP_CLAUSE_PRIVATE:
5374 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5375 break;
5376 continue;
5377 case OMP_CLAUSE_FIRSTPRIVATE:
5378 case OMP_CLAUSE_COPYIN:
5379 case OMP_CLAUSE_LASTPRIVATE:
5380 case OMP_CLAUSE_REDUCTION:
5381 break;
5382 case OMP_CLAUSE_SHARED:
5383 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5384 break;
5385 continue;
5386 case OMP_CLAUSE__LOOPTEMP_:
5387 if (ignored_looptemp)
5389 ignored_looptemp--;
5390 continue;
5392 break;
5393 default:
5394 continue;
5397 val = OMP_CLAUSE_DECL (c);
5398 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5399 && TREE_CODE (val) == MEM_REF)
5401 val = TREE_OPERAND (val, 0);
5402 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5403 val = TREE_OPERAND (val, 0);
5404 if (TREE_CODE (val) == INDIRECT_REF
5405 || TREE_CODE (val) == ADDR_EXPR)
5406 val = TREE_OPERAND (val, 0);
5407 if (is_variable_sized (val))
5408 continue;
5411 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5412 outer taskloop region. */
5413 omp_context *ctx_for_o = ctx;
5414 if (is_taskloop
5415 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5416 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5417 ctx_for_o = ctx->outer;
5419 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5421 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5422 && is_global_var (var))
5423 continue;
5425 t = omp_member_access_dummy_var (var);
5426 if (t)
5428 var = DECL_VALUE_EXPR (var);
5429 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5430 if (o != t)
5431 var = unshare_and_remap (var, t, o);
5432 else
5433 var = unshare_expr (var);
5436 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5438 /* Handle taskloop firstprivate/lastprivate, where the
5439 lastprivate on GIMPLE_OMP_TASK is represented as
5440 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5441 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5442 x = omp_build_component_ref (ctx->sender_decl, f);
5443 if (use_pointer_for_field (val, ctx))
5444 var = build_fold_addr_expr (var);
5445 gimplify_assign (x, var, ilist);
5446 DECL_ABSTRACT_ORIGIN (f) = NULL;
5447 continue;
5450 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5451 || val == OMP_CLAUSE_DECL (c))
5452 && is_variable_sized (val))
5453 continue;
5454 by_ref = use_pointer_for_field (val, NULL);
5456 switch (OMP_CLAUSE_CODE (c))
5458 case OMP_CLAUSE_FIRSTPRIVATE:
5459 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5460 && !by_ref
5461 && is_task_ctx (ctx))
5462 TREE_NO_WARNING (var) = 1;
5463 do_in = true;
5464 break;
5466 case OMP_CLAUSE_PRIVATE:
5467 case OMP_CLAUSE_COPYIN:
5468 case OMP_CLAUSE__LOOPTEMP_:
5469 do_in = true;
5470 break;
5472 case OMP_CLAUSE_LASTPRIVATE:
5473 if (by_ref || omp_is_reference (val))
5475 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5476 continue;
5477 do_in = true;
5479 else
5481 do_out = true;
5482 if (lang_hooks.decls.omp_private_outer_ref (val))
5483 do_in = true;
5485 break;
5487 case OMP_CLAUSE_REDUCTION:
5488 do_in = true;
5489 if (val == OMP_CLAUSE_DECL (c))
5490 do_out = !(by_ref || omp_is_reference (val));
5491 else
5492 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5493 break;
5495 default:
5496 gcc_unreachable ();
5499 if (do_in)
5501 ref = build_sender_ref (val, ctx);
5502 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5503 gimplify_assign (ref, x, ilist);
5504 if (is_task_ctx (ctx))
5505 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5508 if (do_out)
5510 ref = build_sender_ref (val, ctx);
5511 gimplify_assign (var, ref, olist);
5516 /* Generate code to implement SHARED from the sender (aka parent)
5517 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5518 list things that got automatically shared. */
5520 static void
5521 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5523 tree var, ovar, nvar, t, f, x, record_type;
5525 if (ctx->record_type == NULL)
5526 return;
5528 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5529 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5531 ovar = DECL_ABSTRACT_ORIGIN (f);
5532 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5533 continue;
5535 nvar = maybe_lookup_decl (ovar, ctx);
5536 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5537 continue;
5539 /* If CTX is a nested parallel directive. Find the immediately
5540 enclosing parallel or workshare construct that contains a
5541 mapping for OVAR. */
5542 var = lookup_decl_in_outer_ctx (ovar, ctx);
5544 t = omp_member_access_dummy_var (var);
5545 if (t)
5547 var = DECL_VALUE_EXPR (var);
5548 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5549 if (o != t)
5550 var = unshare_and_remap (var, t, o);
5551 else
5552 var = unshare_expr (var);
5555 if (use_pointer_for_field (ovar, ctx))
5557 x = build_sender_ref (ovar, ctx);
5558 var = build_fold_addr_expr (var);
5559 gimplify_assign (x, var, ilist);
5561 else
5563 x = build_sender_ref (ovar, ctx);
5564 gimplify_assign (x, var, ilist);
5566 if (!TREE_READONLY (var)
5567 /* We don't need to receive a new reference to a result
5568 or parm decl. In fact we may not store to it as we will
5569 invalidate any pending RSO and generate wrong gimple
5570 during inlining. */
5571 && !((TREE_CODE (var) == RESULT_DECL
5572 || TREE_CODE (var) == PARM_DECL)
5573 && DECL_BY_REFERENCE (var)))
5575 x = build_sender_ref (ovar, ctx);
5576 gimplify_assign (var, x, olist);
5582 /* Emit an OpenACC head marker call, encapulating the partitioning and
5583 other information that must be processed by the target compiler.
5584 Return the maximum number of dimensions the associated loop might
5585 be partitioned over. */
5587 static unsigned
5588 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5589 gimple_seq *seq, omp_context *ctx)
5591 unsigned levels = 0;
5592 unsigned tag = 0;
5593 tree gang_static = NULL_TREE;
5594 auto_vec<tree, 5> args;
5596 args.quick_push (build_int_cst
5597 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5598 args.quick_push (ddvar);
5599 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5601 switch (OMP_CLAUSE_CODE (c))
5603 case OMP_CLAUSE_GANG:
5604 tag |= OLF_DIM_GANG;
5605 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5606 /* static:* is represented by -1, and we can ignore it, as
5607 scheduling is always static. */
5608 if (gang_static && integer_minus_onep (gang_static))
5609 gang_static = NULL_TREE;
5610 levels++;
5611 break;
5613 case OMP_CLAUSE_WORKER:
5614 tag |= OLF_DIM_WORKER;
5615 levels++;
5616 break;
5618 case OMP_CLAUSE_VECTOR:
5619 tag |= OLF_DIM_VECTOR;
5620 levels++;
5621 break;
5623 case OMP_CLAUSE_SEQ:
5624 tag |= OLF_SEQ;
5625 break;
5627 case OMP_CLAUSE_AUTO:
5628 tag |= OLF_AUTO;
5629 break;
5631 case OMP_CLAUSE_INDEPENDENT:
5632 tag |= OLF_INDEPENDENT;
5633 break;
5635 case OMP_CLAUSE_TILE:
5636 tag |= OLF_TILE;
5637 break;
5639 default:
5640 continue;
5644 if (gang_static)
5646 if (DECL_P (gang_static))
5647 gang_static = build_outer_var_ref (gang_static, ctx);
5648 tag |= OLF_GANG_STATIC;
5651 /* In a parallel region, loops are implicitly INDEPENDENT. */
5652 omp_context *tgt = enclosing_target_ctx (ctx);
5653 if (!tgt || is_oacc_parallel (tgt))
5654 tag |= OLF_INDEPENDENT;
5656 if (tag & OLF_TILE)
5657 /* Tiling could use all 3 levels. */
5658 levels = 3;
5659 else
5661 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5662 Ensure at least one level, or 2 for possible auto
5663 partitioning */
5664 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5665 << OLF_DIM_BASE) | OLF_SEQ));
5667 if (levels < 1u + maybe_auto)
5668 levels = 1u + maybe_auto;
5671 args.quick_push (build_int_cst (integer_type_node, levels));
5672 args.quick_push (build_int_cst (integer_type_node, tag));
5673 if (gang_static)
5674 args.quick_push (gang_static);
5676 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5677 gimple_set_location (call, loc);
5678 gimple_set_lhs (call, ddvar);
5679 gimple_seq_add_stmt (seq, call);
5681 return levels;
5684 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5685 partitioning level of the enclosed region. */
5687 static void
5688 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5689 tree tofollow, gimple_seq *seq)
5691 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5692 : IFN_UNIQUE_OACC_TAIL_MARK);
5693 tree marker = build_int_cst (integer_type_node, marker_kind);
5694 int nargs = 2 + (tofollow != NULL_TREE);
5695 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5696 marker, ddvar, tofollow);
5697 gimple_set_location (call, loc);
5698 gimple_set_lhs (call, ddvar);
5699 gimple_seq_add_stmt (seq, call);
5702 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5703 the loop clauses, from which we extract reductions. Initialize
5704 HEAD and TAIL. */
5706 static void
5707 lower_oacc_head_tail (location_t loc, tree clauses,
5708 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5710 bool inner = false;
5711 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5712 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5714 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5715 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5716 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5718 gcc_assert (count);
5719 for (unsigned done = 1; count; count--, done++)
5721 gimple_seq fork_seq = NULL;
5722 gimple_seq join_seq = NULL;
5724 tree place = build_int_cst (integer_type_node, -1);
5725 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5726 fork_kind, ddvar, place);
5727 gimple_set_location (fork, loc);
5728 gimple_set_lhs (fork, ddvar);
5730 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5731 join_kind, ddvar, place);
5732 gimple_set_location (join, loc);
5733 gimple_set_lhs (join, ddvar);
5735 /* Mark the beginning of this level sequence. */
5736 if (inner)
5737 lower_oacc_loop_marker (loc, ddvar, true,
5738 build_int_cst (integer_type_node, count),
5739 &fork_seq);
5740 lower_oacc_loop_marker (loc, ddvar, false,
5741 build_int_cst (integer_type_node, done),
5742 &join_seq);
5744 lower_oacc_reductions (loc, clauses, place, inner,
5745 fork, join, &fork_seq, &join_seq, ctx);
5747 /* Append this level to head. */
5748 gimple_seq_add_seq (head, fork_seq);
5749 /* Prepend it to tail. */
5750 gimple_seq_add_seq (&join_seq, *tail);
5751 *tail = join_seq;
5753 inner = true;
5756 /* Mark the end of the sequence. */
5757 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5758 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5761 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5762 catch handler and return it. This prevents programs from violating the
5763 structured block semantics with throws. */
5765 static gimple_seq
5766 maybe_catch_exception (gimple_seq body)
5768 gimple *g;
5769 tree decl;
5771 if (!flag_exceptions)
5772 return body;
5774 if (lang_hooks.eh_protect_cleanup_actions != NULL)
5775 decl = lang_hooks.eh_protect_cleanup_actions ();
5776 else
5777 decl = builtin_decl_explicit (BUILT_IN_TRAP);
5779 g = gimple_build_eh_must_not_throw (decl);
5780 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5781 GIMPLE_TRY_CATCH);
5783 return gimple_seq_alloc_with_stmt (g);
5787 /* Routines to lower OMP directives into OMP-GIMPLE. */
5789 /* If ctx is a worksharing context inside of a cancellable parallel
5790 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5791 and conditional branch to parallel's cancel_label to handle
5792 cancellation in the implicit barrier. */
5794 static void
5795 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5797 gimple *omp_return = gimple_seq_last_stmt (*body);
5798 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5799 if (gimple_omp_return_nowait_p (omp_return))
5800 return;
5801 if (ctx->outer
5802 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5803 && ctx->outer->cancellable)
5805 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5806 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5807 tree lhs = create_tmp_var (c_bool_type);
5808 gimple_omp_return_set_lhs (omp_return, lhs);
5809 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5810 gimple *g = gimple_build_cond (NE_EXPR, lhs,
5811 fold_convert (c_bool_type,
5812 boolean_false_node),
5813 ctx->outer->cancel_label, fallthru_label);
5814 gimple_seq_add_stmt (body, g);
5815 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5819 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5820 CTX is the enclosing OMP context for the current statement. */
5822 static void
5823 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5825 tree block, control;
5826 gimple_stmt_iterator tgsi;
5827 gomp_sections *stmt;
5828 gimple *t;
5829 gbind *new_stmt, *bind;
5830 gimple_seq ilist, dlist, olist, new_body;
5832 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5834 push_gimplify_context ();
5836 dlist = NULL;
5837 ilist = NULL;
5838 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5839 &ilist, &dlist, ctx, NULL);
5841 new_body = gimple_omp_body (stmt);
5842 gimple_omp_set_body (stmt, NULL);
5843 tgsi = gsi_start (new_body);
5844 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5846 omp_context *sctx;
5847 gimple *sec_start;
5849 sec_start = gsi_stmt (tgsi);
5850 sctx = maybe_lookup_ctx (sec_start);
5851 gcc_assert (sctx);
5853 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5854 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5855 GSI_CONTINUE_LINKING);
5856 gimple_omp_set_body (sec_start, NULL);
5858 if (gsi_one_before_end_p (tgsi))
5860 gimple_seq l = NULL;
5861 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5862 &l, ctx);
5863 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5864 gimple_omp_section_set_last (sec_start);
5867 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5868 GSI_CONTINUE_LINKING);
5871 block = make_node (BLOCK);
5872 bind = gimple_build_bind (NULL, new_body, block);
5874 olist = NULL;
5875 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
5877 block = make_node (BLOCK);
5878 new_stmt = gimple_build_bind (NULL, NULL, block);
5879 gsi_replace (gsi_p, new_stmt, true);
5881 pop_gimplify_context (new_stmt);
5882 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5883 BLOCK_VARS (block) = gimple_bind_vars (bind);
5884 if (BLOCK_VARS (block))
5885 TREE_USED (block) = 1;
5887 new_body = NULL;
5888 gimple_seq_add_seq (&new_body, ilist);
5889 gimple_seq_add_stmt (&new_body, stmt);
5890 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5891 gimple_seq_add_stmt (&new_body, bind);
5893 control = create_tmp_var (unsigned_type_node, ".section");
5894 t = gimple_build_omp_continue (control, control);
5895 gimple_omp_sections_set_control (stmt, control);
5896 gimple_seq_add_stmt (&new_body, t);
5898 gimple_seq_add_seq (&new_body, olist);
5899 if (ctx->cancellable)
5900 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5901 gimple_seq_add_seq (&new_body, dlist);
5903 new_body = maybe_catch_exception (new_body);
5905 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5906 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5907 t = gimple_build_omp_return (nowait);
5908 gimple_seq_add_stmt (&new_body, t);
5909 maybe_add_implicit_barrier_cancel (ctx, &new_body);
5911 gimple_bind_set_body (new_stmt, new_body);
5915 /* A subroutine of lower_omp_single. Expand the simple form of
5916 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5918 if (GOMP_single_start ())
5919 BODY;
5920 [ GOMP_barrier (); ] -> unless 'nowait' is present.
5922 FIXME. It may be better to delay expanding the logic of this until
5923 pass_expand_omp. The expanded logic may make the job more difficult
5924 to a synchronization analysis pass. */
5926 static void
5927 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
5929 location_t loc = gimple_location (single_stmt);
5930 tree tlabel = create_artificial_label (loc);
5931 tree flabel = create_artificial_label (loc);
5932 gimple *call, *cond;
5933 tree lhs, decl;
5935 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
5936 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
5937 call = gimple_build_call (decl, 0);
5938 gimple_call_set_lhs (call, lhs);
5939 gimple_seq_add_stmt (pre_p, call);
5941 cond = gimple_build_cond (EQ_EXPR, lhs,
5942 fold_convert_loc (loc, TREE_TYPE (lhs),
5943 boolean_true_node),
5944 tlabel, flabel);
5945 gimple_seq_add_stmt (pre_p, cond);
5946 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
5947 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
5948 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
5952 /* A subroutine of lower_omp_single. Expand the simple form of
5953 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
5955 #pragma omp single copyprivate (a, b, c)
5957 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
5960 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
5962 BODY;
5963 copyout.a = a;
5964 copyout.b = b;
5965 copyout.c = c;
5966 GOMP_single_copy_end (&copyout);
5968 else
5970 a = copyout_p->a;
5971 b = copyout_p->b;
5972 c = copyout_p->c;
5974 GOMP_barrier ();
5977 FIXME. It may be better to delay expanding the logic of this until
5978 pass_expand_omp. The expanded logic may make the job more difficult
5979 to a synchronization analysis pass. */
5981 static void
5982 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
5983 omp_context *ctx)
5985 tree ptr_type, t, l0, l1, l2, bfn_decl;
5986 gimple_seq copyin_seq;
5987 location_t loc = gimple_location (single_stmt);
5989 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
5991 ptr_type = build_pointer_type (ctx->record_type);
5992 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
5994 l0 = create_artificial_label (loc);
5995 l1 = create_artificial_label (loc);
5996 l2 = create_artificial_label (loc);
5998 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
5999 t = build_call_expr_loc (loc, bfn_decl, 0);
6000 t = fold_convert_loc (loc, ptr_type, t);
6001 gimplify_assign (ctx->receiver_decl, t, pre_p);
6003 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6004 build_int_cst (ptr_type, 0));
6005 t = build3 (COND_EXPR, void_type_node, t,
6006 build_and_jump (&l0), build_and_jump (&l1));
6007 gimplify_and_add (t, pre_p);
6009 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6011 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6013 copyin_seq = NULL;
6014 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6015 &copyin_seq, ctx);
6017 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6018 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6019 t = build_call_expr_loc (loc, bfn_decl, 1, t);
6020 gimplify_and_add (t, pre_p);
6022 t = build_and_jump (&l2);
6023 gimplify_and_add (t, pre_p);
6025 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6027 gimple_seq_add_seq (pre_p, copyin_seq);
6029 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6033 /* Expand code for an OpenMP single directive. */
6035 static void
6036 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6038 tree block;
6039 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6040 gbind *bind;
6041 gimple_seq bind_body, bind_body_tail = NULL, dlist;
6043 push_gimplify_context ();
6045 block = make_node (BLOCK);
6046 bind = gimple_build_bind (NULL, NULL, block);
6047 gsi_replace (gsi_p, bind, true);
6048 bind_body = NULL;
6049 dlist = NULL;
6050 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6051 &bind_body, &dlist, ctx, NULL);
6052 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6054 gimple_seq_add_stmt (&bind_body, single_stmt);
6056 if (ctx->record_type)
6057 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6058 else
6059 lower_omp_single_simple (single_stmt, &bind_body);
6061 gimple_omp_set_body (single_stmt, NULL);
6063 gimple_seq_add_seq (&bind_body, dlist);
6065 bind_body = maybe_catch_exception (bind_body);
6067 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6068 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6069 gimple *g = gimple_build_omp_return (nowait);
6070 gimple_seq_add_stmt (&bind_body_tail, g);
6071 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6072 if (ctx->record_type)
6074 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6075 tree clobber = build_constructor (ctx->record_type, NULL);
6076 TREE_THIS_VOLATILE (clobber) = 1;
6077 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6078 clobber), GSI_SAME_STMT);
6080 gimple_seq_add_seq (&bind_body, bind_body_tail);
6081 gimple_bind_set_body (bind, bind_body);
6083 pop_gimplify_context (bind);
6085 gimple_bind_append_vars (bind, ctx->block_vars);
6086 BLOCK_VARS (block) = ctx->block_vars;
6087 if (BLOCK_VARS (block))
6088 TREE_USED (block) = 1;
6092 /* Expand code for an OpenMP master directive. */
6094 static void
6095 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6097 tree block, lab = NULL, x, bfn_decl;
6098 gimple *stmt = gsi_stmt (*gsi_p);
6099 gbind *bind;
6100 location_t loc = gimple_location (stmt);
6101 gimple_seq tseq;
6103 push_gimplify_context ();
6105 block = make_node (BLOCK);
6106 bind = gimple_build_bind (NULL, NULL, block);
6107 gsi_replace (gsi_p, bind, true);
6108 gimple_bind_add_stmt (bind, stmt);
6110 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6111 x = build_call_expr_loc (loc, bfn_decl, 0);
6112 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6113 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6114 tseq = NULL;
6115 gimplify_and_add (x, &tseq);
6116 gimple_bind_add_seq (bind, tseq);
6118 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6119 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6120 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6121 gimple_omp_set_body (stmt, NULL);
6123 gimple_bind_add_stmt (bind, gimple_build_label (lab));
6125 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6127 pop_gimplify_context (bind);
6129 gimple_bind_append_vars (bind, ctx->block_vars);
6130 BLOCK_VARS (block) = ctx->block_vars;
6134 /* Expand code for an OpenMP taskgroup directive. */
6136 static void
6137 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6139 gimple *stmt = gsi_stmt (*gsi_p);
6140 gcall *x;
6141 gbind *bind;
6142 tree block = make_node (BLOCK);
6144 bind = gimple_build_bind (NULL, NULL, block);
6145 gsi_replace (gsi_p, bind, true);
6146 gimple_bind_add_stmt (bind, stmt);
6148 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6150 gimple_bind_add_stmt (bind, x);
6152 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6153 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6154 gimple_omp_set_body (stmt, NULL);
6156 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6158 gimple_bind_append_vars (bind, ctx->block_vars);
6159 BLOCK_VARS (block) = ctx->block_vars;
6163 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6165 static void
6166 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6167 omp_context *ctx)
6169 struct omp_for_data fd;
6170 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6171 return;
6173 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6174 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6175 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6176 if (!fd.ordered)
6177 return;
6179 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6180 tree c = gimple_omp_ordered_clauses (ord_stmt);
6181 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6182 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6184 /* Merge depend clauses from multiple adjacent
6185 #pragma omp ordered depend(sink:...) constructs
6186 into one #pragma omp ordered depend(sink:...), so that
6187 we can optimize them together. */
6188 gimple_stmt_iterator gsi = *gsi_p;
6189 gsi_next (&gsi);
6190 while (!gsi_end_p (gsi))
6192 gimple *stmt = gsi_stmt (gsi);
6193 if (is_gimple_debug (stmt)
6194 || gimple_code (stmt) == GIMPLE_NOP)
6196 gsi_next (&gsi);
6197 continue;
6199 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6200 break;
6201 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6202 c = gimple_omp_ordered_clauses (ord_stmt2);
6203 if (c == NULL_TREE
6204 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6205 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6206 break;
6207 while (*list_p)
6208 list_p = &OMP_CLAUSE_CHAIN (*list_p);
6209 *list_p = c;
6210 gsi_remove (&gsi, true);
6214 /* Canonicalize sink dependence clauses into one folded clause if
6215 possible.
6217 The basic algorithm is to create a sink vector whose first
6218 element is the GCD of all the first elements, and whose remaining
6219 elements are the minimum of the subsequent columns.
6221 We ignore dependence vectors whose first element is zero because
6222 such dependencies are known to be executed by the same thread.
6224 We take into account the direction of the loop, so a minimum
6225 becomes a maximum if the loop is iterating forwards. We also
6226 ignore sink clauses where the loop direction is unknown, or where
6227 the offsets are clearly invalid because they are not a multiple
6228 of the loop increment.
6230 For example:
6232 #pragma omp for ordered(2)
6233 for (i=0; i < N; ++i)
6234 for (j=0; j < M; ++j)
6236 #pragma omp ordered \
6237 depend(sink:i-8,j-2) \
6238 depend(sink:i,j-1) \ // Completely ignored because i+0.
6239 depend(sink:i-4,j-3) \
6240 depend(sink:i-6,j-4)
6241 #pragma omp ordered depend(source)
6244 Folded clause is:
6246 depend(sink:-gcd(8,4,6),-min(2,3,4))
6247 -or-
6248 depend(sink:-2,-2)
6251 /* FIXME: Computing GCD's where the first element is zero is
6252 non-trivial in the presence of collapsed loops. Do this later. */
6253 if (fd.collapse > 1)
6254 return;
6256 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6258 /* wide_int is not a POD so it must be default-constructed. */
6259 for (unsigned i = 0; i != 2 * len - 1; ++i)
6260 new (static_cast<void*>(folded_deps + i)) wide_int ();
6262 tree folded_dep = NULL_TREE;
6263 /* TRUE if the first dimension's offset is negative. */
6264 bool neg_offset_p = false;
6266 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6267 unsigned int i;
6268 while ((c = *list_p) != NULL)
6270 bool remove = false;
6272 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6273 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6274 goto next_ordered_clause;
6276 tree vec;
6277 for (vec = OMP_CLAUSE_DECL (c), i = 0;
6278 vec && TREE_CODE (vec) == TREE_LIST;
6279 vec = TREE_CHAIN (vec), ++i)
6281 gcc_assert (i < len);
6283 /* omp_extract_for_data has canonicalized the condition. */
6284 gcc_assert (fd.loops[i].cond_code == LT_EXPR
6285 || fd.loops[i].cond_code == GT_EXPR);
6286 bool forward = fd.loops[i].cond_code == LT_EXPR;
6287 bool maybe_lexically_later = true;
6289 /* While the committee makes up its mind, bail if we have any
6290 non-constant steps. */
6291 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6292 goto lower_omp_ordered_ret;
6294 tree itype = TREE_TYPE (TREE_VALUE (vec));
6295 if (POINTER_TYPE_P (itype))
6296 itype = sizetype;
6297 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
6298 TYPE_PRECISION (itype),
6299 TYPE_SIGN (itype));
6301 /* Ignore invalid offsets that are not multiples of the step. */
6302 if (!wi::multiple_of_p (wi::abs (offset),
6303 wi::abs (wi::to_wide (fd.loops[i].step)),
6304 UNSIGNED))
6306 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6307 "ignoring sink clause with offset that is not "
6308 "a multiple of the loop step");
6309 remove = true;
6310 goto next_ordered_clause;
6313 /* Calculate the first dimension. The first dimension of
6314 the folded dependency vector is the GCD of the first
6315 elements, while ignoring any first elements whose offset
6316 is 0. */
6317 if (i == 0)
6319 /* Ignore dependence vectors whose first dimension is 0. */
6320 if (offset == 0)
6322 remove = true;
6323 goto next_ordered_clause;
6325 else
6327 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6329 error_at (OMP_CLAUSE_LOCATION (c),
6330 "first offset must be in opposite direction "
6331 "of loop iterations");
6332 goto lower_omp_ordered_ret;
6334 if (forward)
6335 offset = -offset;
6336 neg_offset_p = forward;
6337 /* Initialize the first time around. */
6338 if (folded_dep == NULL_TREE)
6340 folded_dep = c;
6341 folded_deps[0] = offset;
6343 else
6344 folded_deps[0] = wi::gcd (folded_deps[0],
6345 offset, UNSIGNED);
6348 /* Calculate minimum for the remaining dimensions. */
6349 else
6351 folded_deps[len + i - 1] = offset;
6352 if (folded_dep == c)
6353 folded_deps[i] = offset;
6354 else if (maybe_lexically_later
6355 && !wi::eq_p (folded_deps[i], offset))
6357 if (forward ^ wi::gts_p (folded_deps[i], offset))
6359 unsigned int j;
6360 folded_dep = c;
6361 for (j = 1; j <= i; j++)
6362 folded_deps[j] = folded_deps[len + j - 1];
6364 else
6365 maybe_lexically_later = false;
6369 gcc_assert (i == len);
6371 remove = true;
6373 next_ordered_clause:
6374 if (remove)
6375 *list_p = OMP_CLAUSE_CHAIN (c);
6376 else
6377 list_p = &OMP_CLAUSE_CHAIN (c);
6380 if (folded_dep)
6382 if (neg_offset_p)
6383 folded_deps[0] = -folded_deps[0];
6385 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6386 if (POINTER_TYPE_P (itype))
6387 itype = sizetype;
6389 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6390 = wide_int_to_tree (itype, folded_deps[0]);
6391 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6392 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6395 lower_omp_ordered_ret:
6397 /* Ordered without clauses is #pragma omp threads, while we want
6398 a nop instead if we remove all clauses. */
6399 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6400 gsi_replace (gsi_p, gimple_build_nop (), true);
6404 /* Expand code for an OpenMP ordered directive. */
6406 static void
6407 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6409 tree block;
6410 gimple *stmt = gsi_stmt (*gsi_p), *g;
6411 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6412 gcall *x;
6413 gbind *bind;
6414 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6415 OMP_CLAUSE_SIMD);
6416 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6417 loop. */
6418 bool maybe_simt
6419 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6420 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6421 OMP_CLAUSE_THREADS);
6423 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6424 OMP_CLAUSE_DEPEND))
6426 /* FIXME: This is needs to be moved to the expansion to verify various
6427 conditions only testable on cfg with dominators computed, and also
6428 all the depend clauses to be merged still might need to be available
6429 for the runtime checks. */
6430 if (0)
6431 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6432 return;
6435 push_gimplify_context ();
6437 block = make_node (BLOCK);
6438 bind = gimple_build_bind (NULL, NULL, block);
6439 gsi_replace (gsi_p, bind, true);
6440 gimple_bind_add_stmt (bind, stmt);
6442 if (simd)
6444 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6445 build_int_cst (NULL_TREE, threads));
6446 cfun->has_simduid_loops = true;
6448 else
6449 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6451 gimple_bind_add_stmt (bind, x);
6453 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6454 if (maybe_simt)
6456 counter = create_tmp_var (integer_type_node);
6457 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6458 gimple_call_set_lhs (g, counter);
6459 gimple_bind_add_stmt (bind, g);
6461 body = create_artificial_label (UNKNOWN_LOCATION);
6462 test = create_artificial_label (UNKNOWN_LOCATION);
6463 gimple_bind_add_stmt (bind, gimple_build_label (body));
6465 tree simt_pred = create_tmp_var (integer_type_node);
6466 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6467 gimple_call_set_lhs (g, simt_pred);
6468 gimple_bind_add_stmt (bind, g);
6470 tree t = create_artificial_label (UNKNOWN_LOCATION);
6471 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6472 gimple_bind_add_stmt (bind, g);
6474 gimple_bind_add_stmt (bind, gimple_build_label (t));
6476 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6477 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6478 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6479 gimple_omp_set_body (stmt, NULL);
6481 if (maybe_simt)
6483 gimple_bind_add_stmt (bind, gimple_build_label (test));
6484 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6485 gimple_bind_add_stmt (bind, g);
6487 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6488 tree nonneg = create_tmp_var (integer_type_node);
6489 gimple_seq tseq = NULL;
6490 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6491 gimple_bind_add_seq (bind, tseq);
6493 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6494 gimple_call_set_lhs (g, nonneg);
6495 gimple_bind_add_stmt (bind, g);
6497 tree end = create_artificial_label (UNKNOWN_LOCATION);
6498 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6499 gimple_bind_add_stmt (bind, g);
6501 gimple_bind_add_stmt (bind, gimple_build_label (end));
6503 if (simd)
6504 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6505 build_int_cst (NULL_TREE, threads));
6506 else
6507 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6509 gimple_bind_add_stmt (bind, x);
6511 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6513 pop_gimplify_context (bind);
6515 gimple_bind_append_vars (bind, ctx->block_vars);
6516 BLOCK_VARS (block) = gimple_bind_vars (bind);
6520 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6521 substitution of a couple of function calls. But in the NAMED case,
6522 requires that languages coordinate a symbol name. It is therefore
6523 best put here in common code. */
6525 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6527 static void
6528 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6530 tree block;
6531 tree name, lock, unlock;
6532 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6533 gbind *bind;
6534 location_t loc = gimple_location (stmt);
6535 gimple_seq tbody;
6537 name = gimple_omp_critical_name (stmt);
6538 if (name)
6540 tree decl;
6542 if (!critical_name_mutexes)
6543 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6545 tree *n = critical_name_mutexes->get (name);
6546 if (n == NULL)
6548 char *new_str;
6550 decl = create_tmp_var_raw (ptr_type_node);
6552 new_str = ACONCAT ((".gomp_critical_user_",
6553 IDENTIFIER_POINTER (name), NULL));
6554 DECL_NAME (decl) = get_identifier (new_str);
6555 TREE_PUBLIC (decl) = 1;
6556 TREE_STATIC (decl) = 1;
6557 DECL_COMMON (decl) = 1;
6558 DECL_ARTIFICIAL (decl) = 1;
6559 DECL_IGNORED_P (decl) = 1;
6561 varpool_node::finalize_decl (decl);
6563 critical_name_mutexes->put (name, decl);
6565 else
6566 decl = *n;
6568 /* If '#pragma omp critical' is inside offloaded region or
6569 inside function marked as offloadable, the symbol must be
6570 marked as offloadable too. */
6571 omp_context *octx;
6572 if (cgraph_node::get (current_function_decl)->offloadable)
6573 varpool_node::get_create (decl)->offloadable = 1;
6574 else
6575 for (octx = ctx->outer; octx; octx = octx->outer)
6576 if (is_gimple_omp_offloaded (octx->stmt))
6578 varpool_node::get_create (decl)->offloadable = 1;
6579 break;
6582 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6583 lock = build_call_expr_loc (loc, lock, 1,
6584 build_fold_addr_expr_loc (loc, decl));
6586 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6587 unlock = build_call_expr_loc (loc, unlock, 1,
6588 build_fold_addr_expr_loc (loc, decl));
6590 else
6592 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6593 lock = build_call_expr_loc (loc, lock, 0);
6595 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6596 unlock = build_call_expr_loc (loc, unlock, 0);
6599 push_gimplify_context ();
6601 block = make_node (BLOCK);
6602 bind = gimple_build_bind (NULL, NULL, block);
6603 gsi_replace (gsi_p, bind, true);
6604 gimple_bind_add_stmt (bind, stmt);
6606 tbody = gimple_bind_body (bind);
6607 gimplify_and_add (lock, &tbody);
6608 gimple_bind_set_body (bind, tbody);
6610 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6611 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6612 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6613 gimple_omp_set_body (stmt, NULL);
6615 tbody = gimple_bind_body (bind);
6616 gimplify_and_add (unlock, &tbody);
6617 gimple_bind_set_body (bind, tbody);
6619 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6621 pop_gimplify_context (bind);
6622 gimple_bind_append_vars (bind, ctx->block_vars);
6623 BLOCK_VARS (block) = gimple_bind_vars (bind);
6626 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6627 for a lastprivate clause. Given a loop control predicate of (V
6628 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6629 is appended to *DLIST, iterator initialization is appended to
6630 *BODY_P. */
6632 static void
6633 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6634 gimple_seq *dlist, struct omp_context *ctx)
6636 tree clauses, cond, vinit;
6637 enum tree_code cond_code;
6638 gimple_seq stmts;
6640 cond_code = fd->loop.cond_code;
6641 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6643 /* When possible, use a strict equality expression. This can let VRP
6644 type optimizations deduce the value and remove a copy. */
6645 if (tree_fits_shwi_p (fd->loop.step))
6647 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6648 if (step == 1 || step == -1)
6649 cond_code = EQ_EXPR;
6652 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6653 || gimple_omp_for_grid_phony (fd->for_stmt))
6654 cond = omp_grid_lastprivate_predicate (fd);
6655 else
6657 tree n2 = fd->loop.n2;
6658 if (fd->collapse > 1
6659 && TREE_CODE (n2) != INTEGER_CST
6660 && gimple_omp_for_combined_into_p (fd->for_stmt))
6662 struct omp_context *taskreg_ctx = NULL;
6663 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6665 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6666 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6667 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6669 if (gimple_omp_for_combined_into_p (gfor))
6671 gcc_assert (ctx->outer->outer
6672 && is_parallel_ctx (ctx->outer->outer));
6673 taskreg_ctx = ctx->outer->outer;
6675 else
6677 struct omp_for_data outer_fd;
6678 omp_extract_for_data (gfor, &outer_fd, NULL);
6679 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6682 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6683 taskreg_ctx = ctx->outer->outer;
6685 else if (is_taskreg_ctx (ctx->outer))
6686 taskreg_ctx = ctx->outer;
6687 if (taskreg_ctx)
6689 int i;
6690 tree taskreg_clauses
6691 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6692 tree innerc = omp_find_clause (taskreg_clauses,
6693 OMP_CLAUSE__LOOPTEMP_);
6694 gcc_assert (innerc);
6695 for (i = 0; i < fd->collapse; i++)
6697 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6698 OMP_CLAUSE__LOOPTEMP_);
6699 gcc_assert (innerc);
6701 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6702 OMP_CLAUSE__LOOPTEMP_);
6703 if (innerc)
6704 n2 = fold_convert (TREE_TYPE (n2),
6705 lookup_decl (OMP_CLAUSE_DECL (innerc),
6706 taskreg_ctx));
6709 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6712 clauses = gimple_omp_for_clauses (fd->for_stmt);
6713 stmts = NULL;
6714 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6715 if (!gimple_seq_empty_p (stmts))
6717 gimple_seq_add_seq (&stmts, *dlist);
6718 *dlist = stmts;
6720 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6721 vinit = fd->loop.n1;
6722 if (cond_code == EQ_EXPR
6723 && tree_fits_shwi_p (fd->loop.n2)
6724 && ! integer_zerop (fd->loop.n2))
6725 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6726 else
6727 vinit = unshare_expr (vinit);
6729 /* Initialize the iterator variable, so that threads that don't execute
6730 any iterations don't execute the lastprivate clauses by accident. */
6731 gimplify_assign (fd->loop.v, vinit, body_p);
6736 /* Lower code for an OMP loop directive. */
6738 static void
6739 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6741 tree *rhs_p, block;
6742 struct omp_for_data fd, *fdp = NULL;
6743 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6744 gbind *new_stmt;
6745 gimple_seq omp_for_body, body, dlist;
6746 gimple_seq oacc_head = NULL, oacc_tail = NULL;
6747 size_t i;
6749 push_gimplify_context ();
6751 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6753 block = make_node (BLOCK);
6754 new_stmt = gimple_build_bind (NULL, NULL, block);
6755 /* Replace at gsi right away, so that 'stmt' is no member
6756 of a sequence anymore as we're going to add to a different
6757 one below. */
6758 gsi_replace (gsi_p, new_stmt, true);
6760 /* Move declaration of temporaries in the loop body before we make
6761 it go away. */
6762 omp_for_body = gimple_omp_body (stmt);
6763 if (!gimple_seq_empty_p (omp_for_body)
6764 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6766 gbind *inner_bind
6767 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6768 tree vars = gimple_bind_vars (inner_bind);
6769 gimple_bind_append_vars (new_stmt, vars);
6770 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6771 keep them on the inner_bind and it's block. */
6772 gimple_bind_set_vars (inner_bind, NULL_TREE);
6773 if (gimple_bind_block (inner_bind))
6774 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6777 if (gimple_omp_for_combined_into_p (stmt))
6779 omp_extract_for_data (stmt, &fd, NULL);
6780 fdp = &fd;
6782 /* We need two temporaries with fd.loop.v type (istart/iend)
6783 and then (fd.collapse - 1) temporaries with the same
6784 type for count2 ... countN-1 vars if not constant. */
6785 size_t count = 2;
6786 tree type = fd.iter_type;
6787 if (fd.collapse > 1
6788 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6789 count += fd.collapse - 1;
6790 bool taskreg_for
6791 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6792 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6793 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6794 tree simtc = NULL;
6795 tree clauses = *pc;
6796 if (taskreg_for)
6797 outerc
6798 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6799 OMP_CLAUSE__LOOPTEMP_);
6800 if (ctx->simt_stmt)
6801 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6802 OMP_CLAUSE__LOOPTEMP_);
6803 for (i = 0; i < count; i++)
6805 tree temp;
6806 if (taskreg_for)
6808 gcc_assert (outerc);
6809 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6810 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6811 OMP_CLAUSE__LOOPTEMP_);
6813 else
6815 /* If there are 2 adjacent SIMD stmts, one with _simt_
6816 clause, another without, make sure they have the same
6817 decls in _looptemp_ clauses, because the outer stmt
6818 they are combined into will look up just one inner_stmt. */
6819 if (ctx->simt_stmt)
6820 temp = OMP_CLAUSE_DECL (simtc);
6821 else
6822 temp = create_tmp_var (type);
6823 insert_decl_map (&ctx->outer->cb, temp, temp);
6825 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6826 OMP_CLAUSE_DECL (*pc) = temp;
6827 pc = &OMP_CLAUSE_CHAIN (*pc);
6828 if (ctx->simt_stmt)
6829 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6830 OMP_CLAUSE__LOOPTEMP_);
6832 *pc = clauses;
6835 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6836 dlist = NULL;
6837 body = NULL;
6838 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6839 fdp);
6840 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6842 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6844 /* Lower the header expressions. At this point, we can assume that
6845 the header is of the form:
6847 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6849 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6850 using the .omp_data_s mapping, if needed. */
6851 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6853 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6854 if (!is_gimple_min_invariant (*rhs_p))
6855 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6856 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6857 recompute_tree_invariant_for_addr_expr (*rhs_p);
6859 rhs_p = gimple_omp_for_final_ptr (stmt, i);
6860 if (!is_gimple_min_invariant (*rhs_p))
6861 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6862 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6863 recompute_tree_invariant_for_addr_expr (*rhs_p);
6865 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6866 if (!is_gimple_min_invariant (*rhs_p))
6867 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6870 /* Once lowered, extract the bounds and clauses. */
6871 omp_extract_for_data (stmt, &fd, NULL);
6873 if (is_gimple_omp_oacc (ctx->stmt)
6874 && !ctx_in_oacc_kernels_region (ctx))
6875 lower_oacc_head_tail (gimple_location (stmt),
6876 gimple_omp_for_clauses (stmt),
6877 &oacc_head, &oacc_tail, ctx);
6879 /* Add OpenACC partitioning and reduction markers just before the loop. */
6880 if (oacc_head)
6881 gimple_seq_add_seq (&body, oacc_head);
6883 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
6885 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6886 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
6887 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6888 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6890 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6891 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6892 OMP_CLAUSE_LINEAR_STEP (c)
6893 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6894 ctx);
6897 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6898 && gimple_omp_for_grid_phony (stmt));
6899 if (!phony_loop)
6900 gimple_seq_add_stmt (&body, stmt);
6901 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6903 if (!phony_loop)
6904 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6905 fd.loop.v));
6907 /* After the loop, add exit clauses. */
6908 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
6910 if (ctx->cancellable)
6911 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
6913 gimple_seq_add_seq (&body, dlist);
6915 body = maybe_catch_exception (body);
6917 if (!phony_loop)
6919 /* Region exit marker goes at the end of the loop body. */
6920 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
6921 maybe_add_implicit_barrier_cancel (ctx, &body);
6924 /* Add OpenACC joining and reduction markers just after the loop. */
6925 if (oacc_tail)
6926 gimple_seq_add_seq (&body, oacc_tail);
6928 pop_gimplify_context (new_stmt);
6930 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6931 maybe_remove_omp_member_access_dummy_vars (new_stmt);
6932 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
6933 if (BLOCK_VARS (block))
6934 TREE_USED (block) = 1;
6936 gimple_bind_set_body (new_stmt, body);
6937 gimple_omp_set_body (stmt, NULL);
6938 gimple_omp_for_set_pre_body (stmt, NULL);
6941 /* Callback for walk_stmts. Check if the current statement only contains
6942 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
6944 static tree
6945 check_combined_parallel (gimple_stmt_iterator *gsi_p,
6946 bool *handled_ops_p,
6947 struct walk_stmt_info *wi)
6949 int *info = (int *) wi->info;
6950 gimple *stmt = gsi_stmt (*gsi_p);
6952 *handled_ops_p = true;
6953 switch (gimple_code (stmt))
6955 WALK_SUBSTMTS;
6957 case GIMPLE_DEBUG:
6958 break;
6959 case GIMPLE_OMP_FOR:
6960 case GIMPLE_OMP_SECTIONS:
6961 *info = *info == 0 ? 1 : -1;
6962 break;
6963 default:
6964 *info = -1;
6965 break;
6967 return NULL;
6970 struct omp_taskcopy_context
6972 /* This field must be at the beginning, as we do "inheritance": Some
6973 callback functions for tree-inline.c (e.g., omp_copy_decl)
6974 receive a copy_body_data pointer that is up-casted to an
6975 omp_context pointer. */
6976 copy_body_data cb;
6977 omp_context *ctx;
6980 static tree
6981 task_copyfn_copy_decl (tree var, copy_body_data *cb)
6983 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
6985 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
6986 return create_tmp_var (TREE_TYPE (var));
6988 return var;
6991 static tree
6992 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
6994 tree name, new_fields = NULL, type, f;
6996 type = lang_hooks.types.make_type (RECORD_TYPE);
6997 name = DECL_NAME (TYPE_NAME (orig_type));
6998 name = build_decl (gimple_location (tcctx->ctx->stmt),
6999 TYPE_DECL, name, type);
7000 TYPE_NAME (type) = name;
7002 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
7004 tree new_f = copy_node (f);
7005 DECL_CONTEXT (new_f) = type;
7006 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7007 TREE_CHAIN (new_f) = new_fields;
7008 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7009 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7010 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7011 &tcctx->cb, NULL);
7012 new_fields = new_f;
7013 tcctx->cb.decl_map->put (f, new_f);
7015 TYPE_FIELDS (type) = nreverse (new_fields);
7016 layout_type (type);
7017 return type;
7020 /* Create task copyfn. */
7022 static void
7023 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7025 struct function *child_cfun;
7026 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7027 tree record_type, srecord_type, bind, list;
7028 bool record_needs_remap = false, srecord_needs_remap = false;
7029 splay_tree_node n;
7030 struct omp_taskcopy_context tcctx;
7031 location_t loc = gimple_location (task_stmt);
7032 size_t looptempno = 0;
7034 child_fn = gimple_omp_task_copy_fn (task_stmt);
7035 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7036 gcc_assert (child_cfun->cfg == NULL);
7037 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
7039 /* Reset DECL_CONTEXT on function arguments. */
7040 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7041 DECL_CONTEXT (t) = child_fn;
7043 /* Populate the function. */
7044 push_gimplify_context ();
7045 push_cfun (child_cfun);
7047 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7048 TREE_SIDE_EFFECTS (bind) = 1;
7049 list = NULL;
7050 DECL_SAVED_TREE (child_fn) = bind;
7051 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
7053 /* Remap src and dst argument types if needed. */
7054 record_type = ctx->record_type;
7055 srecord_type = ctx->srecord_type;
7056 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7057 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7059 record_needs_remap = true;
7060 break;
7062 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7063 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7065 srecord_needs_remap = true;
7066 break;
7069 if (record_needs_remap || srecord_needs_remap)
7071 memset (&tcctx, '\0', sizeof (tcctx));
7072 tcctx.cb.src_fn = ctx->cb.src_fn;
7073 tcctx.cb.dst_fn = child_fn;
7074 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7075 gcc_checking_assert (tcctx.cb.src_node);
7076 tcctx.cb.dst_node = tcctx.cb.src_node;
7077 tcctx.cb.src_cfun = ctx->cb.src_cfun;
7078 tcctx.cb.copy_decl = task_copyfn_copy_decl;
7079 tcctx.cb.eh_lp_nr = 0;
7080 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7081 tcctx.cb.decl_map = new hash_map<tree, tree>;
7082 tcctx.ctx = ctx;
7084 if (record_needs_remap)
7085 record_type = task_copyfn_remap_type (&tcctx, record_type);
7086 if (srecord_needs_remap)
7087 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7089 else
7090 tcctx.cb.decl_map = NULL;
7092 arg = DECL_ARGUMENTS (child_fn);
7093 TREE_TYPE (arg) = build_pointer_type (record_type);
7094 sarg = DECL_CHAIN (arg);
7095 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7097 /* First pass: initialize temporaries used in record_type and srecord_type
7098 sizes and field offsets. */
7099 if (tcctx.cb.decl_map)
7100 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7101 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7103 tree *p;
7105 decl = OMP_CLAUSE_DECL (c);
7106 p = tcctx.cb.decl_map->get (decl);
7107 if (p == NULL)
7108 continue;
7109 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7110 sf = (tree) n->value;
7111 sf = *tcctx.cb.decl_map->get (sf);
7112 src = build_simple_mem_ref_loc (loc, sarg);
7113 src = omp_build_component_ref (src, sf);
7114 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7115 append_to_statement_list (t, &list);
7118 /* Second pass: copy shared var pointers and copy construct non-VLA
7119 firstprivate vars. */
7120 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7121 switch (OMP_CLAUSE_CODE (c))
7123 splay_tree_key key;
7124 case OMP_CLAUSE_SHARED:
7125 decl = OMP_CLAUSE_DECL (c);
7126 key = (splay_tree_key) decl;
7127 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7128 key = (splay_tree_key) &DECL_UID (decl);
7129 n = splay_tree_lookup (ctx->field_map, key);
7130 if (n == NULL)
7131 break;
7132 f = (tree) n->value;
7133 if (tcctx.cb.decl_map)
7134 f = *tcctx.cb.decl_map->get (f);
7135 n = splay_tree_lookup (ctx->sfield_map, key);
7136 sf = (tree) n->value;
7137 if (tcctx.cb.decl_map)
7138 sf = *tcctx.cb.decl_map->get (sf);
7139 src = build_simple_mem_ref_loc (loc, sarg);
7140 src = omp_build_component_ref (src, sf);
7141 dst = build_simple_mem_ref_loc (loc, arg);
7142 dst = omp_build_component_ref (dst, f);
7143 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7144 append_to_statement_list (t, &list);
7145 break;
7146 case OMP_CLAUSE__LOOPTEMP_:
7147 /* Fields for first two _looptemp_ clauses are initialized by
7148 GOMP_taskloop*, the rest are handled like firstprivate. */
7149 if (looptempno < 2)
7151 looptempno++;
7152 break;
7154 /* FALLTHRU */
7155 case OMP_CLAUSE_FIRSTPRIVATE:
7156 decl = OMP_CLAUSE_DECL (c);
7157 if (is_variable_sized (decl))
7158 break;
7159 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7160 if (n == NULL)
7161 break;
7162 f = (tree) n->value;
7163 if (tcctx.cb.decl_map)
7164 f = *tcctx.cb.decl_map->get (f);
7165 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7166 if (n != NULL)
7168 sf = (tree) n->value;
7169 if (tcctx.cb.decl_map)
7170 sf = *tcctx.cb.decl_map->get (sf);
7171 src = build_simple_mem_ref_loc (loc, sarg);
7172 src = omp_build_component_ref (src, sf);
7173 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7174 src = build_simple_mem_ref_loc (loc, src);
7176 else
7177 src = decl;
7178 dst = build_simple_mem_ref_loc (loc, arg);
7179 dst = omp_build_component_ref (dst, f);
7180 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__LOOPTEMP_)
7181 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7182 else
7183 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7184 append_to_statement_list (t, &list);
7185 break;
7186 case OMP_CLAUSE_PRIVATE:
7187 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7188 break;
7189 decl = OMP_CLAUSE_DECL (c);
7190 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7191 f = (tree) n->value;
7192 if (tcctx.cb.decl_map)
7193 f = *tcctx.cb.decl_map->get (f);
7194 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7195 if (n != NULL)
7197 sf = (tree) n->value;
7198 if (tcctx.cb.decl_map)
7199 sf = *tcctx.cb.decl_map->get (sf);
7200 src = build_simple_mem_ref_loc (loc, sarg);
7201 src = omp_build_component_ref (src, sf);
7202 if (use_pointer_for_field (decl, NULL))
7203 src = build_simple_mem_ref_loc (loc, src);
7205 else
7206 src = decl;
7207 dst = build_simple_mem_ref_loc (loc, arg);
7208 dst = omp_build_component_ref (dst, f);
7209 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7210 append_to_statement_list (t, &list);
7211 break;
7212 default:
7213 break;
7216 /* Last pass: handle VLA firstprivates. */
7217 if (tcctx.cb.decl_map)
7218 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7219 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7221 tree ind, ptr, df;
7223 decl = OMP_CLAUSE_DECL (c);
7224 if (!is_variable_sized (decl))
7225 continue;
7226 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7227 if (n == NULL)
7228 continue;
7229 f = (tree) n->value;
7230 f = *tcctx.cb.decl_map->get (f);
7231 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7232 ind = DECL_VALUE_EXPR (decl);
7233 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7234 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7235 n = splay_tree_lookup (ctx->sfield_map,
7236 (splay_tree_key) TREE_OPERAND (ind, 0));
7237 sf = (tree) n->value;
7238 sf = *tcctx.cb.decl_map->get (sf);
7239 src = build_simple_mem_ref_loc (loc, sarg);
7240 src = omp_build_component_ref (src, sf);
7241 src = build_simple_mem_ref_loc (loc, src);
7242 dst = build_simple_mem_ref_loc (loc, arg);
7243 dst = omp_build_component_ref (dst, f);
7244 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7245 append_to_statement_list (t, &list);
7246 n = splay_tree_lookup (ctx->field_map,
7247 (splay_tree_key) TREE_OPERAND (ind, 0));
7248 df = (tree) n->value;
7249 df = *tcctx.cb.decl_map->get (df);
7250 ptr = build_simple_mem_ref_loc (loc, arg);
7251 ptr = omp_build_component_ref (ptr, df);
7252 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7253 build_fold_addr_expr_loc (loc, dst));
7254 append_to_statement_list (t, &list);
7257 t = build1 (RETURN_EXPR, void_type_node, NULL);
7258 append_to_statement_list (t, &list);
7260 if (tcctx.cb.decl_map)
7261 delete tcctx.cb.decl_map;
7262 pop_gimplify_context (NULL);
7263 BIND_EXPR_BODY (bind) = list;
7264 pop_cfun ();
7267 static void
7268 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7270 tree c, clauses;
7271 gimple *g;
7272 size_t n_in = 0, n_out = 0, idx = 2, i;
7274 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7275 gcc_assert (clauses);
7276 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7277 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7278 switch (OMP_CLAUSE_DEPEND_KIND (c))
7280 case OMP_CLAUSE_DEPEND_IN:
7281 n_in++;
7282 break;
7283 case OMP_CLAUSE_DEPEND_OUT:
7284 case OMP_CLAUSE_DEPEND_INOUT:
7285 n_out++;
7286 break;
7287 case OMP_CLAUSE_DEPEND_SOURCE:
7288 case OMP_CLAUSE_DEPEND_SINK:
7289 /* FALLTHRU */
7290 default:
7291 gcc_unreachable ();
7293 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7294 tree array = create_tmp_var (type);
7295 TREE_ADDRESSABLE (array) = 1;
7296 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7297 NULL_TREE);
7298 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7299 gimple_seq_add_stmt (iseq, g);
7300 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7301 NULL_TREE);
7302 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7303 gimple_seq_add_stmt (iseq, g);
7304 for (i = 0; i < 2; i++)
7306 if ((i ? n_in : n_out) == 0)
7307 continue;
7308 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7309 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7310 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7312 tree t = OMP_CLAUSE_DECL (c);
7313 t = fold_convert (ptr_type_node, t);
7314 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7315 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7316 NULL_TREE, NULL_TREE);
7317 g = gimple_build_assign (r, t);
7318 gimple_seq_add_stmt (iseq, g);
7321 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7322 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7323 OMP_CLAUSE_CHAIN (c) = *pclauses;
7324 *pclauses = c;
7325 tree clobber = build_constructor (type, NULL);
7326 TREE_THIS_VOLATILE (clobber) = 1;
7327 g = gimple_build_assign (array, clobber);
7328 gimple_seq_add_stmt (oseq, g);
7331 /* Lower the OpenMP parallel or task directive in the current statement
7332 in GSI_P. CTX holds context information for the directive. */
7334 static void
7335 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7337 tree clauses;
7338 tree child_fn, t;
7339 gimple *stmt = gsi_stmt (*gsi_p);
7340 gbind *par_bind, *bind, *dep_bind = NULL;
7341 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7342 location_t loc = gimple_location (stmt);
7344 clauses = gimple_omp_taskreg_clauses (stmt);
7345 par_bind
7346 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7347 par_body = gimple_bind_body (par_bind);
7348 child_fn = ctx->cb.dst_fn;
7349 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7350 && !gimple_omp_parallel_combined_p (stmt))
7352 struct walk_stmt_info wi;
7353 int ws_num = 0;
7355 memset (&wi, 0, sizeof (wi));
7356 wi.info = &ws_num;
7357 wi.val_only = true;
7358 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7359 if (ws_num == 1)
7360 gimple_omp_parallel_set_combined_p (stmt, true);
7362 gimple_seq dep_ilist = NULL;
7363 gimple_seq dep_olist = NULL;
7364 if (gimple_code (stmt) == GIMPLE_OMP_TASK
7365 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7367 push_gimplify_context ();
7368 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7369 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7370 &dep_ilist, &dep_olist);
7373 if (ctx->srecord_type)
7374 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7376 push_gimplify_context ();
7378 par_olist = NULL;
7379 par_ilist = NULL;
7380 par_rlist = NULL;
7381 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7382 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7383 if (phony_construct && ctx->record_type)
7385 gcc_checking_assert (!ctx->receiver_decl);
7386 ctx->receiver_decl = create_tmp_var
7387 (build_reference_type (ctx->record_type), ".omp_rec");
7389 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7390 lower_omp (&par_body, ctx);
7391 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7392 lower_reduction_clauses (clauses, &par_rlist, ctx);
7394 /* Declare all the variables created by mapping and the variables
7395 declared in the scope of the parallel body. */
7396 record_vars_into (ctx->block_vars, child_fn);
7397 maybe_remove_omp_member_access_dummy_vars (par_bind);
7398 record_vars_into (gimple_bind_vars (par_bind), child_fn);
7400 if (ctx->record_type)
7402 ctx->sender_decl
7403 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7404 : ctx->record_type, ".omp_data_o");
7405 DECL_NAMELESS (ctx->sender_decl) = 1;
7406 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7407 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7410 olist = NULL;
7411 ilist = NULL;
7412 lower_send_clauses (clauses, &ilist, &olist, ctx);
7413 lower_send_shared_vars (&ilist, &olist, ctx);
7415 if (ctx->record_type)
7417 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7418 TREE_THIS_VOLATILE (clobber) = 1;
7419 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7420 clobber));
7423 /* Once all the expansions are done, sequence all the different
7424 fragments inside gimple_omp_body. */
7426 new_body = NULL;
7428 if (ctx->record_type)
7430 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7431 /* fixup_child_record_type might have changed receiver_decl's type. */
7432 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7433 gimple_seq_add_stmt (&new_body,
7434 gimple_build_assign (ctx->receiver_decl, t));
7437 gimple_seq_add_seq (&new_body, par_ilist);
7438 gimple_seq_add_seq (&new_body, par_body);
7439 gimple_seq_add_seq (&new_body, par_rlist);
7440 if (ctx->cancellable)
7441 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7442 gimple_seq_add_seq (&new_body, par_olist);
7443 new_body = maybe_catch_exception (new_body);
7444 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7445 gimple_seq_add_stmt (&new_body,
7446 gimple_build_omp_continue (integer_zero_node,
7447 integer_zero_node));
7448 if (!phony_construct)
7450 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7451 gimple_omp_set_body (stmt, new_body);
7454 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7455 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7456 gimple_bind_add_seq (bind, ilist);
7457 if (!phony_construct)
7458 gimple_bind_add_stmt (bind, stmt);
7459 else
7460 gimple_bind_add_seq (bind, new_body);
7461 gimple_bind_add_seq (bind, olist);
7463 pop_gimplify_context (NULL);
7465 if (dep_bind)
7467 gimple_bind_add_seq (dep_bind, dep_ilist);
7468 gimple_bind_add_stmt (dep_bind, bind);
7469 gimple_bind_add_seq (dep_bind, dep_olist);
7470 pop_gimplify_context (dep_bind);
7474 /* Lower the GIMPLE_OMP_TARGET in the current statement
7475 in GSI_P. CTX holds context information for the directive. */
7477 static void
7478 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7480 tree clauses;
7481 tree child_fn, t, c;
7482 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7483 gbind *tgt_bind, *bind, *dep_bind = NULL;
7484 gimple_seq tgt_body, olist, ilist, fplist, new_body;
7485 location_t loc = gimple_location (stmt);
7486 bool offloaded, data_region;
7487 unsigned int map_cnt = 0;
7489 offloaded = is_gimple_omp_offloaded (stmt);
7490 switch (gimple_omp_target_kind (stmt))
7492 case GF_OMP_TARGET_KIND_REGION:
7493 case GF_OMP_TARGET_KIND_UPDATE:
7494 case GF_OMP_TARGET_KIND_ENTER_DATA:
7495 case GF_OMP_TARGET_KIND_EXIT_DATA:
7496 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7497 case GF_OMP_TARGET_KIND_OACC_KERNELS:
7498 case GF_OMP_TARGET_KIND_OACC_UPDATE:
7499 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7500 case GF_OMP_TARGET_KIND_OACC_DECLARE:
7501 data_region = false;
7502 break;
7503 case GF_OMP_TARGET_KIND_DATA:
7504 case GF_OMP_TARGET_KIND_OACC_DATA:
7505 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7506 data_region = true;
7507 break;
7508 default:
7509 gcc_unreachable ();
7512 clauses = gimple_omp_target_clauses (stmt);
7514 gimple_seq dep_ilist = NULL;
7515 gimple_seq dep_olist = NULL;
7516 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7518 push_gimplify_context ();
7519 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7520 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7521 &dep_ilist, &dep_olist);
7524 tgt_bind = NULL;
7525 tgt_body = NULL;
7526 if (offloaded)
7528 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7529 tgt_body = gimple_bind_body (tgt_bind);
7531 else if (data_region)
7532 tgt_body = gimple_omp_body (stmt);
7533 child_fn = ctx->cb.dst_fn;
7535 push_gimplify_context ();
7536 fplist = NULL;
7538 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7539 switch (OMP_CLAUSE_CODE (c))
7541 tree var, x;
7543 default:
7544 break;
7545 case OMP_CLAUSE_MAP:
7546 #if CHECKING_P
7547 /* First check what we're prepared to handle in the following. */
7548 switch (OMP_CLAUSE_MAP_KIND (c))
7550 case GOMP_MAP_ALLOC:
7551 case GOMP_MAP_TO:
7552 case GOMP_MAP_FROM:
7553 case GOMP_MAP_TOFROM:
7554 case GOMP_MAP_POINTER:
7555 case GOMP_MAP_TO_PSET:
7556 case GOMP_MAP_DELETE:
7557 case GOMP_MAP_RELEASE:
7558 case GOMP_MAP_ALWAYS_TO:
7559 case GOMP_MAP_ALWAYS_FROM:
7560 case GOMP_MAP_ALWAYS_TOFROM:
7561 case GOMP_MAP_FIRSTPRIVATE_POINTER:
7562 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7563 case GOMP_MAP_STRUCT:
7564 case GOMP_MAP_ALWAYS_POINTER:
7565 break;
7566 case GOMP_MAP_FORCE_ALLOC:
7567 case GOMP_MAP_FORCE_TO:
7568 case GOMP_MAP_FORCE_FROM:
7569 case GOMP_MAP_FORCE_TOFROM:
7570 case GOMP_MAP_FORCE_PRESENT:
7571 case GOMP_MAP_FORCE_DEVICEPTR:
7572 case GOMP_MAP_DEVICE_RESIDENT:
7573 case GOMP_MAP_LINK:
7574 gcc_assert (is_gimple_omp_oacc (stmt));
7575 break;
7576 default:
7577 gcc_unreachable ();
7579 #endif
7580 /* FALLTHRU */
7581 case OMP_CLAUSE_TO:
7582 case OMP_CLAUSE_FROM:
7583 oacc_firstprivate:
7584 var = OMP_CLAUSE_DECL (c);
7585 if (!DECL_P (var))
7587 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7588 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7589 && (OMP_CLAUSE_MAP_KIND (c)
7590 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
7591 map_cnt++;
7592 continue;
7595 if (DECL_SIZE (var)
7596 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7598 tree var2 = DECL_VALUE_EXPR (var);
7599 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7600 var2 = TREE_OPERAND (var2, 0);
7601 gcc_assert (DECL_P (var2));
7602 var = var2;
7605 if (offloaded
7606 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7607 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7608 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7610 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7612 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7613 && varpool_node::get_create (var)->offloadable)
7614 continue;
7616 tree type = build_pointer_type (TREE_TYPE (var));
7617 tree new_var = lookup_decl (var, ctx);
7618 x = create_tmp_var_raw (type, get_name (new_var));
7619 gimple_add_tmp_var (x);
7620 x = build_simple_mem_ref (x);
7621 SET_DECL_VALUE_EXPR (new_var, x);
7622 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7624 continue;
7627 if (!maybe_lookup_field (var, ctx))
7628 continue;
7630 /* Don't remap oacc parallel reduction variables, because the
7631 intermediate result must be local to each gang. */
7632 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7633 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7635 x = build_receiver_ref (var, true, ctx);
7636 tree new_var = lookup_decl (var, ctx);
7638 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7639 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7640 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7641 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7642 x = build_simple_mem_ref (x);
7643 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7645 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7646 if (omp_is_reference (new_var)
7647 && TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE)
7649 /* Create a local object to hold the instance
7650 value. */
7651 tree type = TREE_TYPE (TREE_TYPE (new_var));
7652 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7653 tree inst = create_tmp_var (type, id);
7654 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7655 x = build_fold_addr_expr (inst);
7657 gimplify_assign (new_var, x, &fplist);
7659 else if (DECL_P (new_var))
7661 SET_DECL_VALUE_EXPR (new_var, x);
7662 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7664 else
7665 gcc_unreachable ();
7667 map_cnt++;
7668 break;
7670 case OMP_CLAUSE_FIRSTPRIVATE:
7671 if (is_oacc_parallel (ctx))
7672 goto oacc_firstprivate;
7673 map_cnt++;
7674 var = OMP_CLAUSE_DECL (c);
7675 if (!omp_is_reference (var)
7676 && !is_gimple_reg_type (TREE_TYPE (var)))
7678 tree new_var = lookup_decl (var, ctx);
7679 if (is_variable_sized (var))
7681 tree pvar = DECL_VALUE_EXPR (var);
7682 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7683 pvar = TREE_OPERAND (pvar, 0);
7684 gcc_assert (DECL_P (pvar));
7685 tree new_pvar = lookup_decl (pvar, ctx);
7686 x = build_fold_indirect_ref (new_pvar);
7687 TREE_THIS_NOTRAP (x) = 1;
7689 else
7690 x = build_receiver_ref (var, true, ctx);
7691 SET_DECL_VALUE_EXPR (new_var, x);
7692 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7694 break;
7696 case OMP_CLAUSE_PRIVATE:
7697 if (is_gimple_omp_oacc (ctx->stmt))
7698 break;
7699 var = OMP_CLAUSE_DECL (c);
7700 if (is_variable_sized (var))
7702 tree new_var = lookup_decl (var, ctx);
7703 tree pvar = DECL_VALUE_EXPR (var);
7704 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7705 pvar = TREE_OPERAND (pvar, 0);
7706 gcc_assert (DECL_P (pvar));
7707 tree new_pvar = lookup_decl (pvar, ctx);
7708 x = build_fold_indirect_ref (new_pvar);
7709 TREE_THIS_NOTRAP (x) = 1;
7710 SET_DECL_VALUE_EXPR (new_var, x);
7711 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7713 break;
7715 case OMP_CLAUSE_USE_DEVICE_PTR:
7716 case OMP_CLAUSE_IS_DEVICE_PTR:
7717 var = OMP_CLAUSE_DECL (c);
7718 map_cnt++;
7719 if (is_variable_sized (var))
7721 tree new_var = lookup_decl (var, ctx);
7722 tree pvar = DECL_VALUE_EXPR (var);
7723 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7724 pvar = TREE_OPERAND (pvar, 0);
7725 gcc_assert (DECL_P (pvar));
7726 tree new_pvar = lookup_decl (pvar, ctx);
7727 x = build_fold_indirect_ref (new_pvar);
7728 TREE_THIS_NOTRAP (x) = 1;
7729 SET_DECL_VALUE_EXPR (new_var, x);
7730 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7732 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7734 tree new_var = lookup_decl (var, ctx);
7735 tree type = build_pointer_type (TREE_TYPE (var));
7736 x = create_tmp_var_raw (type, get_name (new_var));
7737 gimple_add_tmp_var (x);
7738 x = build_simple_mem_ref (x);
7739 SET_DECL_VALUE_EXPR (new_var, x);
7740 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7742 else
7744 tree new_var = lookup_decl (var, ctx);
7745 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7746 gimple_add_tmp_var (x);
7747 SET_DECL_VALUE_EXPR (new_var, x);
7748 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7750 break;
7753 if (offloaded)
7755 target_nesting_level++;
7756 lower_omp (&tgt_body, ctx);
7757 target_nesting_level--;
7759 else if (data_region)
7760 lower_omp (&tgt_body, ctx);
7762 if (offloaded)
7764 /* Declare all the variables created by mapping and the variables
7765 declared in the scope of the target body. */
7766 record_vars_into (ctx->block_vars, child_fn);
7767 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
7768 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7771 olist = NULL;
7772 ilist = NULL;
7773 if (ctx->record_type)
7775 ctx->sender_decl
7776 = create_tmp_var (ctx->record_type, ".omp_data_arr");
7777 DECL_NAMELESS (ctx->sender_decl) = 1;
7778 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7779 t = make_tree_vec (3);
7780 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7781 TREE_VEC_ELT (t, 1)
7782 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7783 ".omp_data_sizes");
7784 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7785 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7786 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7787 tree tkind_type = short_unsigned_type_node;
7788 int talign_shift = 8;
7789 TREE_VEC_ELT (t, 2)
7790 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7791 ".omp_data_kinds");
7792 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7793 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7794 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7795 gimple_omp_target_set_data_arg (stmt, t);
7797 vec<constructor_elt, va_gc> *vsize;
7798 vec<constructor_elt, va_gc> *vkind;
7799 vec_alloc (vsize, map_cnt);
7800 vec_alloc (vkind, map_cnt);
7801 unsigned int map_idx = 0;
7803 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7804 switch (OMP_CLAUSE_CODE (c))
7806 tree ovar, nc, s, purpose, var, x, type;
7807 unsigned int talign;
7809 default:
7810 break;
7812 case OMP_CLAUSE_MAP:
7813 case OMP_CLAUSE_TO:
7814 case OMP_CLAUSE_FROM:
7815 oacc_firstprivate_map:
7816 nc = c;
7817 ovar = OMP_CLAUSE_DECL (c);
7818 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7819 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7820 || (OMP_CLAUSE_MAP_KIND (c)
7821 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7822 break;
7823 if (!DECL_P (ovar))
7825 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7826 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7828 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7829 == get_base_address (ovar));
7830 nc = OMP_CLAUSE_CHAIN (c);
7831 ovar = OMP_CLAUSE_DECL (nc);
7833 else
7835 tree x = build_sender_ref (ovar, ctx);
7836 tree v
7837 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7838 gimplify_assign (x, v, &ilist);
7839 nc = NULL_TREE;
7842 else
7844 if (DECL_SIZE (ovar)
7845 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7847 tree ovar2 = DECL_VALUE_EXPR (ovar);
7848 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7849 ovar2 = TREE_OPERAND (ovar2, 0);
7850 gcc_assert (DECL_P (ovar2));
7851 ovar = ovar2;
7853 if (!maybe_lookup_field (ovar, ctx))
7854 continue;
7857 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7858 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7859 talign = DECL_ALIGN_UNIT (ovar);
7860 if (nc)
7862 var = lookup_decl_in_outer_ctx (ovar, ctx);
7863 x = build_sender_ref (ovar, ctx);
7865 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7866 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7867 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7868 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7870 gcc_assert (offloaded);
7871 tree avar
7872 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7873 mark_addressable (avar);
7874 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7875 talign = DECL_ALIGN_UNIT (avar);
7876 avar = build_fold_addr_expr (avar);
7877 gimplify_assign (x, avar, &ilist);
7879 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7881 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7882 if (!omp_is_reference (var))
7884 if (is_gimple_reg (var)
7885 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7886 TREE_NO_WARNING (var) = 1;
7887 var = build_fold_addr_expr (var);
7889 else
7890 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7891 gimplify_assign (x, var, &ilist);
7893 else if (is_gimple_reg (var))
7895 gcc_assert (offloaded);
7896 tree avar = create_tmp_var (TREE_TYPE (var));
7897 mark_addressable (avar);
7898 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7899 if (GOMP_MAP_COPY_TO_P (map_kind)
7900 || map_kind == GOMP_MAP_POINTER
7901 || map_kind == GOMP_MAP_TO_PSET
7902 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7904 /* If we need to initialize a temporary
7905 with VAR because it is not addressable, and
7906 the variable hasn't been initialized yet, then
7907 we'll get a warning for the store to avar.
7908 Don't warn in that case, the mapping might
7909 be implicit. */
7910 TREE_NO_WARNING (var) = 1;
7911 gimplify_assign (avar, var, &ilist);
7913 avar = build_fold_addr_expr (avar);
7914 gimplify_assign (x, avar, &ilist);
7915 if ((GOMP_MAP_COPY_FROM_P (map_kind)
7916 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7917 && !TYPE_READONLY (TREE_TYPE (var)))
7919 x = unshare_expr (x);
7920 x = build_simple_mem_ref (x);
7921 gimplify_assign (var, x, &olist);
7924 else
7926 var = build_fold_addr_expr (var);
7927 gimplify_assign (x, var, &ilist);
7930 s = NULL_TREE;
7931 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7933 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7934 s = TREE_TYPE (ovar);
7935 if (TREE_CODE (s) == REFERENCE_TYPE)
7936 s = TREE_TYPE (s);
7937 s = TYPE_SIZE_UNIT (s);
7939 else
7940 s = OMP_CLAUSE_SIZE (c);
7941 if (s == NULL_TREE)
7942 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
7943 s = fold_convert (size_type_node, s);
7944 purpose = size_int (map_idx++);
7945 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
7946 if (TREE_CODE (s) != INTEGER_CST)
7947 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
7949 unsigned HOST_WIDE_INT tkind, tkind_zero;
7950 switch (OMP_CLAUSE_CODE (c))
7952 case OMP_CLAUSE_MAP:
7953 tkind = OMP_CLAUSE_MAP_KIND (c);
7954 tkind_zero = tkind;
7955 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
7956 switch (tkind)
7958 case GOMP_MAP_ALLOC:
7959 case GOMP_MAP_TO:
7960 case GOMP_MAP_FROM:
7961 case GOMP_MAP_TOFROM:
7962 case GOMP_MAP_ALWAYS_TO:
7963 case GOMP_MAP_ALWAYS_FROM:
7964 case GOMP_MAP_ALWAYS_TOFROM:
7965 case GOMP_MAP_RELEASE:
7966 case GOMP_MAP_FORCE_TO:
7967 case GOMP_MAP_FORCE_FROM:
7968 case GOMP_MAP_FORCE_TOFROM:
7969 case GOMP_MAP_FORCE_PRESENT:
7970 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
7971 break;
7972 case GOMP_MAP_DELETE:
7973 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
7974 default:
7975 break;
7977 if (tkind_zero != tkind)
7979 if (integer_zerop (s))
7980 tkind = tkind_zero;
7981 else if (integer_nonzerop (s))
7982 tkind_zero = tkind;
7984 break;
7985 case OMP_CLAUSE_FIRSTPRIVATE:
7986 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7987 tkind = GOMP_MAP_TO;
7988 tkind_zero = tkind;
7989 break;
7990 case OMP_CLAUSE_TO:
7991 tkind = GOMP_MAP_TO;
7992 tkind_zero = tkind;
7993 break;
7994 case OMP_CLAUSE_FROM:
7995 tkind = GOMP_MAP_FROM;
7996 tkind_zero = tkind;
7997 break;
7998 default:
7999 gcc_unreachable ();
8001 gcc_checking_assert (tkind
8002 < (HOST_WIDE_INT_C (1U) << talign_shift));
8003 gcc_checking_assert (tkind_zero
8004 < (HOST_WIDE_INT_C (1U) << talign_shift));
8005 talign = ceil_log2 (talign);
8006 tkind |= talign << talign_shift;
8007 tkind_zero |= talign << talign_shift;
8008 gcc_checking_assert (tkind
8009 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8010 gcc_checking_assert (tkind_zero
8011 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8012 if (tkind == tkind_zero)
8013 x = build_int_cstu (tkind_type, tkind);
8014 else
8016 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8017 x = build3 (COND_EXPR, tkind_type,
8018 fold_build2 (EQ_EXPR, boolean_type_node,
8019 unshare_expr (s), size_zero_node),
8020 build_int_cstu (tkind_type, tkind_zero),
8021 build_int_cstu (tkind_type, tkind));
8023 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8024 if (nc && nc != c)
8025 c = nc;
8026 break;
8028 case OMP_CLAUSE_FIRSTPRIVATE:
8029 if (is_oacc_parallel (ctx))
8030 goto oacc_firstprivate_map;
8031 ovar = OMP_CLAUSE_DECL (c);
8032 if (omp_is_reference (ovar))
8033 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8034 else
8035 talign = DECL_ALIGN_UNIT (ovar);
8036 var = lookup_decl_in_outer_ctx (ovar, ctx);
8037 x = build_sender_ref (ovar, ctx);
8038 tkind = GOMP_MAP_FIRSTPRIVATE;
8039 type = TREE_TYPE (ovar);
8040 if (omp_is_reference (ovar))
8041 type = TREE_TYPE (type);
8042 if ((INTEGRAL_TYPE_P (type)
8043 && TYPE_PRECISION (type) <= POINTER_SIZE)
8044 || TREE_CODE (type) == POINTER_TYPE)
8046 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8047 tree t = var;
8048 if (omp_is_reference (var))
8049 t = build_simple_mem_ref (var);
8050 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8051 TREE_NO_WARNING (var) = 1;
8052 if (TREE_CODE (type) != POINTER_TYPE)
8053 t = fold_convert (pointer_sized_int_node, t);
8054 t = fold_convert (TREE_TYPE (x), t);
8055 gimplify_assign (x, t, &ilist);
8057 else if (omp_is_reference (var))
8058 gimplify_assign (x, var, &ilist);
8059 else if (is_gimple_reg (var))
8061 tree avar = create_tmp_var (TREE_TYPE (var));
8062 mark_addressable (avar);
8063 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8064 TREE_NO_WARNING (var) = 1;
8065 gimplify_assign (avar, var, &ilist);
8066 avar = build_fold_addr_expr (avar);
8067 gimplify_assign (x, avar, &ilist);
8069 else
8071 var = build_fold_addr_expr (var);
8072 gimplify_assign (x, var, &ilist);
8074 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8075 s = size_int (0);
8076 else if (omp_is_reference (ovar))
8077 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8078 else
8079 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8080 s = fold_convert (size_type_node, s);
8081 purpose = size_int (map_idx++);
8082 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8083 if (TREE_CODE (s) != INTEGER_CST)
8084 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8086 gcc_checking_assert (tkind
8087 < (HOST_WIDE_INT_C (1U) << talign_shift));
8088 talign = ceil_log2 (talign);
8089 tkind |= talign << talign_shift;
8090 gcc_checking_assert (tkind
8091 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8092 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8093 build_int_cstu (tkind_type, tkind));
8094 break;
8096 case OMP_CLAUSE_USE_DEVICE_PTR:
8097 case OMP_CLAUSE_IS_DEVICE_PTR:
8098 ovar = OMP_CLAUSE_DECL (c);
8099 var = lookup_decl_in_outer_ctx (ovar, ctx);
8100 x = build_sender_ref (ovar, ctx);
8101 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8102 tkind = GOMP_MAP_USE_DEVICE_PTR;
8103 else
8104 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8105 type = TREE_TYPE (ovar);
8106 if (TREE_CODE (type) == ARRAY_TYPE)
8107 var = build_fold_addr_expr (var);
8108 else
8110 if (omp_is_reference (ovar))
8112 type = TREE_TYPE (type);
8113 if (TREE_CODE (type) != ARRAY_TYPE)
8114 var = build_simple_mem_ref (var);
8115 var = fold_convert (TREE_TYPE (x), var);
8118 gimplify_assign (x, var, &ilist);
8119 s = size_int (0);
8120 purpose = size_int (map_idx++);
8121 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8122 gcc_checking_assert (tkind
8123 < (HOST_WIDE_INT_C (1U) << talign_shift));
8124 gcc_checking_assert (tkind
8125 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8126 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8127 build_int_cstu (tkind_type, tkind));
8128 break;
8131 gcc_assert (map_idx == map_cnt);
8133 DECL_INITIAL (TREE_VEC_ELT (t, 1))
8134 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8135 DECL_INITIAL (TREE_VEC_ELT (t, 2))
8136 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8137 for (int i = 1; i <= 2; i++)
8138 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8140 gimple_seq initlist = NULL;
8141 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8142 TREE_VEC_ELT (t, i)),
8143 &initlist, true, NULL_TREE);
8144 gimple_seq_add_seq (&ilist, initlist);
8146 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8147 NULL);
8148 TREE_THIS_VOLATILE (clobber) = 1;
8149 gimple_seq_add_stmt (&olist,
8150 gimple_build_assign (TREE_VEC_ELT (t, i),
8151 clobber));
8154 tree clobber = build_constructor (ctx->record_type, NULL);
8155 TREE_THIS_VOLATILE (clobber) = 1;
8156 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8157 clobber));
8160 /* Once all the expansions are done, sequence all the different
8161 fragments inside gimple_omp_body. */
8163 new_body = NULL;
8165 if (offloaded
8166 && ctx->record_type)
8168 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8169 /* fixup_child_record_type might have changed receiver_decl's type. */
8170 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8171 gimple_seq_add_stmt (&new_body,
8172 gimple_build_assign (ctx->receiver_decl, t));
8174 gimple_seq_add_seq (&new_body, fplist);
8176 if (offloaded || data_region)
8178 tree prev = NULL_TREE;
8179 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8180 switch (OMP_CLAUSE_CODE (c))
8182 tree var, x;
8183 default:
8184 break;
8185 case OMP_CLAUSE_FIRSTPRIVATE:
8186 if (is_gimple_omp_oacc (ctx->stmt))
8187 break;
8188 var = OMP_CLAUSE_DECL (c);
8189 if (omp_is_reference (var)
8190 || is_gimple_reg_type (TREE_TYPE (var)))
8192 tree new_var = lookup_decl (var, ctx);
8193 tree type;
8194 type = TREE_TYPE (var);
8195 if (omp_is_reference (var))
8196 type = TREE_TYPE (type);
8197 if ((INTEGRAL_TYPE_P (type)
8198 && TYPE_PRECISION (type) <= POINTER_SIZE)
8199 || TREE_CODE (type) == POINTER_TYPE)
8201 x = build_receiver_ref (var, false, ctx);
8202 if (TREE_CODE (type) != POINTER_TYPE)
8203 x = fold_convert (pointer_sized_int_node, x);
8204 x = fold_convert (type, x);
8205 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8206 fb_rvalue);
8207 if (omp_is_reference (var))
8209 tree v = create_tmp_var_raw (type, get_name (var));
8210 gimple_add_tmp_var (v);
8211 TREE_ADDRESSABLE (v) = 1;
8212 gimple_seq_add_stmt (&new_body,
8213 gimple_build_assign (v, x));
8214 x = build_fold_addr_expr (v);
8216 gimple_seq_add_stmt (&new_body,
8217 gimple_build_assign (new_var, x));
8219 else
8221 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8222 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8223 fb_rvalue);
8224 gimple_seq_add_stmt (&new_body,
8225 gimple_build_assign (new_var, x));
8228 else if (is_variable_sized (var))
8230 tree pvar = DECL_VALUE_EXPR (var);
8231 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8232 pvar = TREE_OPERAND (pvar, 0);
8233 gcc_assert (DECL_P (pvar));
8234 tree new_var = lookup_decl (pvar, ctx);
8235 x = build_receiver_ref (var, false, ctx);
8236 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8237 gimple_seq_add_stmt (&new_body,
8238 gimple_build_assign (new_var, x));
8240 break;
8241 case OMP_CLAUSE_PRIVATE:
8242 if (is_gimple_omp_oacc (ctx->stmt))
8243 break;
8244 var = OMP_CLAUSE_DECL (c);
8245 if (omp_is_reference (var))
8247 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8248 tree new_var = lookup_decl (var, ctx);
8249 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8250 if (TREE_CONSTANT (x))
8252 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8253 get_name (var));
8254 gimple_add_tmp_var (x);
8255 TREE_ADDRESSABLE (x) = 1;
8256 x = build_fold_addr_expr_loc (clause_loc, x);
8258 else
8259 break;
8261 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8262 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8263 gimple_seq_add_stmt (&new_body,
8264 gimple_build_assign (new_var, x));
8266 break;
8267 case OMP_CLAUSE_USE_DEVICE_PTR:
8268 case OMP_CLAUSE_IS_DEVICE_PTR:
8269 var = OMP_CLAUSE_DECL (c);
8270 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8271 x = build_sender_ref (var, ctx);
8272 else
8273 x = build_receiver_ref (var, false, ctx);
8274 if (is_variable_sized (var))
8276 tree pvar = DECL_VALUE_EXPR (var);
8277 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8278 pvar = TREE_OPERAND (pvar, 0);
8279 gcc_assert (DECL_P (pvar));
8280 tree new_var = lookup_decl (pvar, ctx);
8281 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8282 gimple_seq_add_stmt (&new_body,
8283 gimple_build_assign (new_var, x));
8285 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8287 tree new_var = lookup_decl (var, ctx);
8288 new_var = DECL_VALUE_EXPR (new_var);
8289 gcc_assert (TREE_CODE (new_var) == MEM_REF);
8290 new_var = TREE_OPERAND (new_var, 0);
8291 gcc_assert (DECL_P (new_var));
8292 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8293 gimple_seq_add_stmt (&new_body,
8294 gimple_build_assign (new_var, x));
8296 else
8298 tree type = TREE_TYPE (var);
8299 tree new_var = lookup_decl (var, ctx);
8300 if (omp_is_reference (var))
8302 type = TREE_TYPE (type);
8303 if (TREE_CODE (type) != ARRAY_TYPE)
8305 tree v = create_tmp_var_raw (type, get_name (var));
8306 gimple_add_tmp_var (v);
8307 TREE_ADDRESSABLE (v) = 1;
8308 x = fold_convert (type, x);
8309 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8310 fb_rvalue);
8311 gimple_seq_add_stmt (&new_body,
8312 gimple_build_assign (v, x));
8313 x = build_fold_addr_expr (v);
8316 new_var = DECL_VALUE_EXPR (new_var);
8317 x = fold_convert (TREE_TYPE (new_var), x);
8318 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8319 gimple_seq_add_stmt (&new_body,
8320 gimple_build_assign (new_var, x));
8322 break;
8324 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8325 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8326 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8327 or references to VLAs. */
8328 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8329 switch (OMP_CLAUSE_CODE (c))
8331 tree var;
8332 default:
8333 break;
8334 case OMP_CLAUSE_MAP:
8335 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8336 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8338 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8339 poly_int64 offset = 0;
8340 gcc_assert (prev);
8341 var = OMP_CLAUSE_DECL (c);
8342 if (DECL_P (var)
8343 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8344 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8345 ctx))
8346 && varpool_node::get_create (var)->offloadable)
8347 break;
8348 if (TREE_CODE (var) == INDIRECT_REF
8349 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8350 var = TREE_OPERAND (var, 0);
8351 if (TREE_CODE (var) == COMPONENT_REF)
8353 var = get_addr_base_and_unit_offset (var, &offset);
8354 gcc_assert (var != NULL_TREE && DECL_P (var));
8356 else if (DECL_SIZE (var)
8357 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8359 tree var2 = DECL_VALUE_EXPR (var);
8360 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8361 var2 = TREE_OPERAND (var2, 0);
8362 gcc_assert (DECL_P (var2));
8363 var = var2;
8365 tree new_var = lookup_decl (var, ctx), x;
8366 tree type = TREE_TYPE (new_var);
8367 bool is_ref;
8368 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8369 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8370 == COMPONENT_REF))
8372 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8373 is_ref = true;
8374 new_var = build2 (MEM_REF, type,
8375 build_fold_addr_expr (new_var),
8376 build_int_cst (build_pointer_type (type),
8377 offset));
8379 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8381 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8382 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8383 new_var = build2 (MEM_REF, type,
8384 build_fold_addr_expr (new_var),
8385 build_int_cst (build_pointer_type (type),
8386 offset));
8388 else
8389 is_ref = omp_is_reference (var);
8390 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8391 is_ref = false;
8392 bool ref_to_array = false;
8393 if (is_ref)
8395 type = TREE_TYPE (type);
8396 if (TREE_CODE (type) == ARRAY_TYPE)
8398 type = build_pointer_type (type);
8399 ref_to_array = true;
8402 else if (TREE_CODE (type) == ARRAY_TYPE)
8404 tree decl2 = DECL_VALUE_EXPR (new_var);
8405 gcc_assert (TREE_CODE (decl2) == MEM_REF);
8406 decl2 = TREE_OPERAND (decl2, 0);
8407 gcc_assert (DECL_P (decl2));
8408 new_var = decl2;
8409 type = TREE_TYPE (new_var);
8411 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8412 x = fold_convert_loc (clause_loc, type, x);
8413 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8415 tree bias = OMP_CLAUSE_SIZE (c);
8416 if (DECL_P (bias))
8417 bias = lookup_decl (bias, ctx);
8418 bias = fold_convert_loc (clause_loc, sizetype, bias);
8419 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8420 bias);
8421 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8422 TREE_TYPE (x), x, bias);
8424 if (ref_to_array)
8425 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8426 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8427 if (is_ref && !ref_to_array)
8429 tree t = create_tmp_var_raw (type, get_name (var));
8430 gimple_add_tmp_var (t);
8431 TREE_ADDRESSABLE (t) = 1;
8432 gimple_seq_add_stmt (&new_body,
8433 gimple_build_assign (t, x));
8434 x = build_fold_addr_expr_loc (clause_loc, t);
8436 gimple_seq_add_stmt (&new_body,
8437 gimple_build_assign (new_var, x));
8438 prev = NULL_TREE;
8440 else if (OMP_CLAUSE_CHAIN (c)
8441 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8442 == OMP_CLAUSE_MAP
8443 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8444 == GOMP_MAP_FIRSTPRIVATE_POINTER
8445 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8446 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8447 prev = c;
8448 break;
8449 case OMP_CLAUSE_PRIVATE:
8450 var = OMP_CLAUSE_DECL (c);
8451 if (is_variable_sized (var))
8453 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8454 tree new_var = lookup_decl (var, ctx);
8455 tree pvar = DECL_VALUE_EXPR (var);
8456 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8457 pvar = TREE_OPERAND (pvar, 0);
8458 gcc_assert (DECL_P (pvar));
8459 tree new_pvar = lookup_decl (pvar, ctx);
8460 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8461 tree al = size_int (DECL_ALIGN (var));
8462 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8463 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8464 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8465 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8466 gimple_seq_add_stmt (&new_body,
8467 gimple_build_assign (new_pvar, x));
8469 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8471 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8472 tree new_var = lookup_decl (var, ctx);
8473 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8474 if (TREE_CONSTANT (x))
8475 break;
8476 else
8478 tree atmp
8479 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8480 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8481 tree al = size_int (TYPE_ALIGN (rtype));
8482 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8485 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8486 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8487 gimple_seq_add_stmt (&new_body,
8488 gimple_build_assign (new_var, x));
8490 break;
8493 gimple_seq fork_seq = NULL;
8494 gimple_seq join_seq = NULL;
8496 if (is_oacc_parallel (ctx))
8498 /* If there are reductions on the offloaded region itself, treat
8499 them as a dummy GANG loop. */
8500 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8502 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8503 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8506 gimple_seq_add_seq (&new_body, fork_seq);
8507 gimple_seq_add_seq (&new_body, tgt_body);
8508 gimple_seq_add_seq (&new_body, join_seq);
8510 if (offloaded)
8511 new_body = maybe_catch_exception (new_body);
8513 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8514 gimple_omp_set_body (stmt, new_body);
8517 bind = gimple_build_bind (NULL, NULL,
8518 tgt_bind ? gimple_bind_block (tgt_bind)
8519 : NULL_TREE);
8520 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8521 gimple_bind_add_seq (bind, ilist);
8522 gimple_bind_add_stmt (bind, stmt);
8523 gimple_bind_add_seq (bind, olist);
8525 pop_gimplify_context (NULL);
8527 if (dep_bind)
8529 gimple_bind_add_seq (dep_bind, dep_ilist);
8530 gimple_bind_add_stmt (dep_bind, bind);
8531 gimple_bind_add_seq (dep_bind, dep_olist);
8532 pop_gimplify_context (dep_bind);
8536 /* Expand code for an OpenMP teams directive. */
8538 static void
8539 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8541 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8542 push_gimplify_context ();
8544 tree block = make_node (BLOCK);
8545 gbind *bind = gimple_build_bind (NULL, NULL, block);
8546 gsi_replace (gsi_p, bind, true);
8547 gimple_seq bind_body = NULL;
8548 gimple_seq dlist = NULL;
8549 gimple_seq olist = NULL;
8551 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8552 OMP_CLAUSE_NUM_TEAMS);
8553 if (num_teams == NULL_TREE)
8554 num_teams = build_int_cst (unsigned_type_node, 0);
8555 else
8557 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8558 num_teams = fold_convert (unsigned_type_node, num_teams);
8559 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8561 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8562 OMP_CLAUSE_THREAD_LIMIT);
8563 if (thread_limit == NULL_TREE)
8564 thread_limit = build_int_cst (unsigned_type_node, 0);
8565 else
8567 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8568 thread_limit = fold_convert (unsigned_type_node, thread_limit);
8569 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8570 fb_rvalue);
8573 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8574 &bind_body, &dlist, ctx, NULL);
8575 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8576 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8577 if (!gimple_omp_teams_grid_phony (teams_stmt))
8579 gimple_seq_add_stmt (&bind_body, teams_stmt);
8580 location_t loc = gimple_location (teams_stmt);
8581 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8582 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8583 gimple_set_location (call, loc);
8584 gimple_seq_add_stmt (&bind_body, call);
8587 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8588 gimple_omp_set_body (teams_stmt, NULL);
8589 gimple_seq_add_seq (&bind_body, olist);
8590 gimple_seq_add_seq (&bind_body, dlist);
8591 if (!gimple_omp_teams_grid_phony (teams_stmt))
8592 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8593 gimple_bind_set_body (bind, bind_body);
8595 pop_gimplify_context (bind);
8597 gimple_bind_append_vars (bind, ctx->block_vars);
8598 BLOCK_VARS (block) = ctx->block_vars;
8599 if (BLOCK_VARS (block))
8600 TREE_USED (block) = 1;
8603 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8605 static void
8606 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8608 gimple *stmt = gsi_stmt (*gsi_p);
8609 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8610 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8611 gimple_build_omp_return (false));
8615 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8616 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8617 of OMP context, but with task_shared_vars set. */
8619 static tree
8620 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8621 void *data)
8623 tree t = *tp;
8625 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8626 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8627 return t;
8629 if (task_shared_vars
8630 && DECL_P (t)
8631 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8632 return t;
8634 /* If a global variable has been privatized, TREE_CONSTANT on
8635 ADDR_EXPR might be wrong. */
8636 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8637 recompute_tree_invariant_for_addr_expr (t);
8639 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8640 return NULL_TREE;
8643 /* Data to be communicated between lower_omp_regimplify_operands and
8644 lower_omp_regimplify_operands_p. */
8646 struct lower_omp_regimplify_operands_data
8648 omp_context *ctx;
8649 vec<tree> *decls;
8652 /* Helper function for lower_omp_regimplify_operands. Find
8653 omp_member_access_dummy_var vars and adjust temporarily their
8654 DECL_VALUE_EXPRs if needed. */
8656 static tree
8657 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8658 void *data)
8660 tree t = omp_member_access_dummy_var (*tp);
8661 if (t)
8663 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8664 lower_omp_regimplify_operands_data *ldata
8665 = (lower_omp_regimplify_operands_data *) wi->info;
8666 tree o = maybe_lookup_decl (t, ldata->ctx);
8667 if (o != t)
8669 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8670 ldata->decls->safe_push (*tp);
8671 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8672 SET_DECL_VALUE_EXPR (*tp, v);
8675 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8676 return NULL_TREE;
8679 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8680 of omp_member_access_dummy_var vars during regimplification. */
8682 static void
8683 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8684 gimple_stmt_iterator *gsi_p)
8686 auto_vec<tree, 10> decls;
8687 if (ctx)
8689 struct walk_stmt_info wi;
8690 memset (&wi, '\0', sizeof (wi));
8691 struct lower_omp_regimplify_operands_data data;
8692 data.ctx = ctx;
8693 data.decls = &decls;
8694 wi.info = &data;
8695 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8697 gimple_regimplify_operands (stmt, gsi_p);
8698 while (!decls.is_empty ())
8700 tree t = decls.pop ();
8701 tree v = decls.pop ();
8702 SET_DECL_VALUE_EXPR (t, v);
8706 static void
8707 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8709 gimple *stmt = gsi_stmt (*gsi_p);
8710 struct walk_stmt_info wi;
8711 gcall *call_stmt;
8713 if (gimple_has_location (stmt))
8714 input_location = gimple_location (stmt);
8716 if (task_shared_vars)
8717 memset (&wi, '\0', sizeof (wi));
8719 /* If we have issued syntax errors, avoid doing any heavy lifting.
8720 Just replace the OMP directives with a NOP to avoid
8721 confusing RTL expansion. */
8722 if (seen_error () && is_gimple_omp (stmt))
8724 gsi_replace (gsi_p, gimple_build_nop (), true);
8725 return;
8728 switch (gimple_code (stmt))
8730 case GIMPLE_COND:
8732 gcond *cond_stmt = as_a <gcond *> (stmt);
8733 if ((ctx || task_shared_vars)
8734 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8735 lower_omp_regimplify_p,
8736 ctx ? NULL : &wi, NULL)
8737 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8738 lower_omp_regimplify_p,
8739 ctx ? NULL : &wi, NULL)))
8740 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8742 break;
8743 case GIMPLE_CATCH:
8744 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8745 break;
8746 case GIMPLE_EH_FILTER:
8747 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8748 break;
8749 case GIMPLE_TRY:
8750 lower_omp (gimple_try_eval_ptr (stmt), ctx);
8751 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8752 break;
8753 case GIMPLE_TRANSACTION:
8754 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8755 ctx);
8756 break;
8757 case GIMPLE_BIND:
8758 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8759 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
8760 break;
8761 case GIMPLE_OMP_PARALLEL:
8762 case GIMPLE_OMP_TASK:
8763 ctx = maybe_lookup_ctx (stmt);
8764 gcc_assert (ctx);
8765 if (ctx->cancellable)
8766 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8767 lower_omp_taskreg (gsi_p, ctx);
8768 break;
8769 case GIMPLE_OMP_FOR:
8770 ctx = maybe_lookup_ctx (stmt);
8771 gcc_assert (ctx);
8772 if (ctx->cancellable)
8773 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8774 lower_omp_for (gsi_p, ctx);
8775 break;
8776 case GIMPLE_OMP_SECTIONS:
8777 ctx = maybe_lookup_ctx (stmt);
8778 gcc_assert (ctx);
8779 if (ctx->cancellable)
8780 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8781 lower_omp_sections (gsi_p, ctx);
8782 break;
8783 case GIMPLE_OMP_SINGLE:
8784 ctx = maybe_lookup_ctx (stmt);
8785 gcc_assert (ctx);
8786 lower_omp_single (gsi_p, ctx);
8787 break;
8788 case GIMPLE_OMP_MASTER:
8789 ctx = maybe_lookup_ctx (stmt);
8790 gcc_assert (ctx);
8791 lower_omp_master (gsi_p, ctx);
8792 break;
8793 case GIMPLE_OMP_TASKGROUP:
8794 ctx = maybe_lookup_ctx (stmt);
8795 gcc_assert (ctx);
8796 lower_omp_taskgroup (gsi_p, ctx);
8797 break;
8798 case GIMPLE_OMP_ORDERED:
8799 ctx = maybe_lookup_ctx (stmt);
8800 gcc_assert (ctx);
8801 lower_omp_ordered (gsi_p, ctx);
8802 break;
8803 case GIMPLE_OMP_CRITICAL:
8804 ctx = maybe_lookup_ctx (stmt);
8805 gcc_assert (ctx);
8806 lower_omp_critical (gsi_p, ctx);
8807 break;
8808 case GIMPLE_OMP_ATOMIC_LOAD:
8809 if ((ctx || task_shared_vars)
8810 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8811 as_a <gomp_atomic_load *> (stmt)),
8812 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8813 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8814 break;
8815 case GIMPLE_OMP_TARGET:
8816 ctx = maybe_lookup_ctx (stmt);
8817 gcc_assert (ctx);
8818 lower_omp_target (gsi_p, ctx);
8819 break;
8820 case GIMPLE_OMP_TEAMS:
8821 ctx = maybe_lookup_ctx (stmt);
8822 gcc_assert (ctx);
8823 lower_omp_teams (gsi_p, ctx);
8824 break;
8825 case GIMPLE_OMP_GRID_BODY:
8826 ctx = maybe_lookup_ctx (stmt);
8827 gcc_assert (ctx);
8828 lower_omp_grid_body (gsi_p, ctx);
8829 break;
8830 case GIMPLE_CALL:
8831 tree fndecl;
8832 call_stmt = as_a <gcall *> (stmt);
8833 fndecl = gimple_call_fndecl (call_stmt);
8834 if (fndecl
8835 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8836 switch (DECL_FUNCTION_CODE (fndecl))
8838 case BUILT_IN_GOMP_BARRIER:
8839 if (ctx == NULL)
8840 break;
8841 /* FALLTHRU */
8842 case BUILT_IN_GOMP_CANCEL:
8843 case BUILT_IN_GOMP_CANCELLATION_POINT:
8844 omp_context *cctx;
8845 cctx = ctx;
8846 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8847 cctx = cctx->outer;
8848 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8849 if (!cctx->cancellable)
8851 if (DECL_FUNCTION_CODE (fndecl)
8852 == BUILT_IN_GOMP_CANCELLATION_POINT)
8854 stmt = gimple_build_nop ();
8855 gsi_replace (gsi_p, stmt, false);
8857 break;
8859 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8861 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8862 gimple_call_set_fndecl (call_stmt, fndecl);
8863 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8865 tree lhs;
8866 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8867 gimple_call_set_lhs (call_stmt, lhs);
8868 tree fallthru_label;
8869 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8870 gimple *g;
8871 g = gimple_build_label (fallthru_label);
8872 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8873 g = gimple_build_cond (NE_EXPR, lhs,
8874 fold_convert (TREE_TYPE (lhs),
8875 boolean_false_node),
8876 cctx->cancel_label, fallthru_label);
8877 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8878 break;
8879 default:
8880 break;
8882 /* FALLTHRU */
8883 default:
8884 if ((ctx || task_shared_vars)
8885 && walk_gimple_op (stmt, lower_omp_regimplify_p,
8886 ctx ? NULL : &wi))
8888 /* Just remove clobbers, this should happen only if we have
8889 "privatized" local addressable variables in SIMD regions,
8890 the clobber isn't needed in that case and gimplifying address
8891 of the ARRAY_REF into a pointer and creating MEM_REF based
8892 clobber would create worse code than we get with the clobber
8893 dropped. */
8894 if (gimple_clobber_p (stmt))
8896 gsi_replace (gsi_p, gimple_build_nop (), true);
8897 break;
8899 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8901 break;
8905 static void
8906 lower_omp (gimple_seq *body, omp_context *ctx)
8908 location_t saved_location = input_location;
8909 gimple_stmt_iterator gsi;
8910 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8911 lower_omp_1 (&gsi, ctx);
8912 /* During gimplification, we haven't folded statments inside offloading
8913 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8914 if (target_nesting_level || taskreg_nesting_level)
8915 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8916 fold_stmt (&gsi);
8917 input_location = saved_location;
8920 /* Main entry point. */
8922 static unsigned int
8923 execute_lower_omp (void)
8925 gimple_seq body;
8926 int i;
8927 omp_context *ctx;
8929 /* This pass always runs, to provide PROP_gimple_lomp.
8930 But often, there is nothing to do. */
8931 if (flag_openacc == 0 && flag_openmp == 0
8932 && flag_openmp_simd == 0)
8933 return 0;
8935 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
8936 delete_omp_context);
8938 body = gimple_body (current_function_decl);
8940 if (hsa_gen_requested_p ())
8941 omp_grid_gridify_all_targets (&body);
8943 scan_omp (&body, NULL);
8944 gcc_assert (taskreg_nesting_level == 0);
8945 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
8946 finish_taskreg_scan (ctx);
8947 taskreg_contexts.release ();
8949 if (all_contexts->root)
8951 if (task_shared_vars)
8952 push_gimplify_context ();
8953 lower_omp (&body, NULL);
8954 if (task_shared_vars)
8955 pop_gimplify_context (NULL);
8958 if (all_contexts)
8960 splay_tree_delete (all_contexts);
8961 all_contexts = NULL;
8963 BITMAP_FREE (task_shared_vars);
8965 /* If current function is a method, remove artificial dummy VAR_DECL created
8966 for non-static data member privatization, they aren't needed for
8967 debuginfo nor anything else, have been already replaced everywhere in the
8968 IL and cause problems with LTO. */
8969 if (DECL_ARGUMENTS (current_function_decl)
8970 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
8971 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
8972 == POINTER_TYPE))
8973 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
8974 return 0;
8977 namespace {
8979 const pass_data pass_data_lower_omp =
8981 GIMPLE_PASS, /* type */
8982 "omplower", /* name */
8983 OPTGROUP_OMP, /* optinfo_flags */
8984 TV_NONE, /* tv_id */
8985 PROP_gimple_any, /* properties_required */
8986 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
8987 0, /* properties_destroyed */
8988 0, /* todo_flags_start */
8989 0, /* todo_flags_finish */
8992 class pass_lower_omp : public gimple_opt_pass
8994 public:
8995 pass_lower_omp (gcc::context *ctxt)
8996 : gimple_opt_pass (pass_data_lower_omp, ctxt)
8999 /* opt_pass methods: */
9000 virtual unsigned int execute (function *) { return execute_lower_omp (); }
9002 }; // class pass_lower_omp
9004 } // anon namespace
9006 gimple_opt_pass *
9007 make_pass_lower_omp (gcc::context *ctxt)
9009 return new pass_lower_omp (ctxt);
9012 /* The following is a utility to diagnose structured block violations.
9013 It is not part of the "omplower" pass, as that's invoked too late. It
9014 should be invoked by the respective front ends after gimplification. */
9016 static splay_tree all_labels;
9018 /* Check for mismatched contexts and generate an error if needed. Return
9019 true if an error is detected. */
9021 static bool
9022 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9023 gimple *branch_ctx, gimple *label_ctx)
9025 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9026 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9028 if (label_ctx == branch_ctx)
9029 return false;
9031 const char* kind = NULL;
9033 if (flag_openacc)
9035 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9036 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9038 gcc_checking_assert (kind == NULL);
9039 kind = "OpenACC";
9042 if (kind == NULL)
9044 gcc_checking_assert (flag_openmp || flag_openmp_simd);
9045 kind = "OpenMP";
9048 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9049 so we could traverse it and issue a correct "exit" or "enter" error
9050 message upon a structured block violation.
9052 We built the context by building a list with tree_cons'ing, but there is
9053 no easy counterpart in gimple tuples. It seems like far too much work
9054 for issuing exit/enter error messages. If someone really misses the
9055 distinct error message... patches welcome. */
9057 #if 0
9058 /* Try to avoid confusing the user by producing and error message
9059 with correct "exit" or "enter" verbiage. We prefer "exit"
9060 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9061 if (branch_ctx == NULL)
9062 exit_p = false;
9063 else
9065 while (label_ctx)
9067 if (TREE_VALUE (label_ctx) == branch_ctx)
9069 exit_p = false;
9070 break;
9072 label_ctx = TREE_CHAIN (label_ctx);
9076 if (exit_p)
9077 error ("invalid exit from %s structured block", kind);
9078 else
9079 error ("invalid entry to %s structured block", kind);
9080 #endif
9082 /* If it's obvious we have an invalid entry, be specific about the error. */
9083 if (branch_ctx == NULL)
9084 error ("invalid entry to %s structured block", kind);
9085 else
9087 /* Otherwise, be vague and lazy, but efficient. */
9088 error ("invalid branch to/from %s structured block", kind);
9091 gsi_replace (gsi_p, gimple_build_nop (), false);
9092 return true;
9095 /* Pass 1: Create a minimal tree of structured blocks, and record
9096 where each label is found. */
9098 static tree
9099 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9100 struct walk_stmt_info *wi)
9102 gimple *context = (gimple *) wi->info;
9103 gimple *inner_context;
9104 gimple *stmt = gsi_stmt (*gsi_p);
9106 *handled_ops_p = true;
9108 switch (gimple_code (stmt))
9110 WALK_SUBSTMTS;
9112 case GIMPLE_OMP_PARALLEL:
9113 case GIMPLE_OMP_TASK:
9114 case GIMPLE_OMP_SECTIONS:
9115 case GIMPLE_OMP_SINGLE:
9116 case GIMPLE_OMP_SECTION:
9117 case GIMPLE_OMP_MASTER:
9118 case GIMPLE_OMP_ORDERED:
9119 case GIMPLE_OMP_CRITICAL:
9120 case GIMPLE_OMP_TARGET:
9121 case GIMPLE_OMP_TEAMS:
9122 case GIMPLE_OMP_TASKGROUP:
9123 /* The minimal context here is just the current OMP construct. */
9124 inner_context = stmt;
9125 wi->info = inner_context;
9126 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9127 wi->info = context;
9128 break;
9130 case GIMPLE_OMP_FOR:
9131 inner_context = stmt;
9132 wi->info = inner_context;
9133 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9134 walk them. */
9135 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9136 diagnose_sb_1, NULL, wi);
9137 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9138 wi->info = context;
9139 break;
9141 case GIMPLE_LABEL:
9142 splay_tree_insert (all_labels,
9143 (splay_tree_key) gimple_label_label (
9144 as_a <glabel *> (stmt)),
9145 (splay_tree_value) context);
9146 break;
9148 default:
9149 break;
9152 return NULL_TREE;
9155 /* Pass 2: Check each branch and see if its context differs from that of
9156 the destination label's context. */
9158 static tree
9159 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9160 struct walk_stmt_info *wi)
9162 gimple *context = (gimple *) wi->info;
9163 splay_tree_node n;
9164 gimple *stmt = gsi_stmt (*gsi_p);
9166 *handled_ops_p = true;
9168 switch (gimple_code (stmt))
9170 WALK_SUBSTMTS;
9172 case GIMPLE_OMP_PARALLEL:
9173 case GIMPLE_OMP_TASK:
9174 case GIMPLE_OMP_SECTIONS:
9175 case GIMPLE_OMP_SINGLE:
9176 case GIMPLE_OMP_SECTION:
9177 case GIMPLE_OMP_MASTER:
9178 case GIMPLE_OMP_ORDERED:
9179 case GIMPLE_OMP_CRITICAL:
9180 case GIMPLE_OMP_TARGET:
9181 case GIMPLE_OMP_TEAMS:
9182 case GIMPLE_OMP_TASKGROUP:
9183 wi->info = stmt;
9184 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9185 wi->info = context;
9186 break;
9188 case GIMPLE_OMP_FOR:
9189 wi->info = stmt;
9190 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9191 walk them. */
9192 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9193 diagnose_sb_2, NULL, wi);
9194 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9195 wi->info = context;
9196 break;
9198 case GIMPLE_COND:
9200 gcond *cond_stmt = as_a <gcond *> (stmt);
9201 tree lab = gimple_cond_true_label (cond_stmt);
9202 if (lab)
9204 n = splay_tree_lookup (all_labels,
9205 (splay_tree_key) lab);
9206 diagnose_sb_0 (gsi_p, context,
9207 n ? (gimple *) n->value : NULL);
9209 lab = gimple_cond_false_label (cond_stmt);
9210 if (lab)
9212 n = splay_tree_lookup (all_labels,
9213 (splay_tree_key) lab);
9214 diagnose_sb_0 (gsi_p, context,
9215 n ? (gimple *) n->value : NULL);
9218 break;
9220 case GIMPLE_GOTO:
9222 tree lab = gimple_goto_dest (stmt);
9223 if (TREE_CODE (lab) != LABEL_DECL)
9224 break;
9226 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9227 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9229 break;
9231 case GIMPLE_SWITCH:
9233 gswitch *switch_stmt = as_a <gswitch *> (stmt);
9234 unsigned int i;
9235 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9237 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9238 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9239 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9240 break;
9243 break;
9245 case GIMPLE_RETURN:
9246 diagnose_sb_0 (gsi_p, context, NULL);
9247 break;
9249 default:
9250 break;
9253 return NULL_TREE;
9256 static unsigned int
9257 diagnose_omp_structured_block_errors (void)
9259 struct walk_stmt_info wi;
9260 gimple_seq body = gimple_body (current_function_decl);
9262 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9264 memset (&wi, 0, sizeof (wi));
9265 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9267 memset (&wi, 0, sizeof (wi));
9268 wi.want_locations = true;
9269 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9271 gimple_set_body (current_function_decl, body);
9273 splay_tree_delete (all_labels);
9274 all_labels = NULL;
9276 return 0;
9279 namespace {
9281 const pass_data pass_data_diagnose_omp_blocks =
9283 GIMPLE_PASS, /* type */
9284 "*diagnose_omp_blocks", /* name */
9285 OPTGROUP_OMP, /* optinfo_flags */
9286 TV_NONE, /* tv_id */
9287 PROP_gimple_any, /* properties_required */
9288 0, /* properties_provided */
9289 0, /* properties_destroyed */
9290 0, /* todo_flags_start */
9291 0, /* todo_flags_finish */
9294 class pass_diagnose_omp_blocks : public gimple_opt_pass
9296 public:
9297 pass_diagnose_omp_blocks (gcc::context *ctxt)
9298 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9301 /* opt_pass methods: */
9302 virtual bool gate (function *)
9304 return flag_openacc || flag_openmp || flag_openmp_simd;
9306 virtual unsigned int execute (function *)
9308 return diagnose_omp_structured_block_errors ();
9311 }; // class pass_diagnose_omp_blocks
9313 } // anon namespace
9315 gimple_opt_pass *
9316 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9318 return new pass_diagnose_omp_blocks (ctxt);
9322 #include "gt-omp-low.h"