[48/77] Make subroutines of num_sign_bit_copies operate on scalar_int_mode
[official-gcc.git] / gcc / omp-low.c
blob5deb5398c75308441eb91a9e73cbddf62d1621e6
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2017 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* Nesting depth of this context. Used to beautify error messages re
118 invalid gotos. The outermost ctx is depth 1, with depth 0 being
119 reserved for the main body of the function. */
120 int depth;
122 /* True if this parallel directive is nested within another. */
123 bool is_nested;
125 /* True if this construct can be cancelled. */
126 bool cancellable;
129 static splay_tree all_contexts;
130 static int taskreg_nesting_level;
131 static int target_nesting_level;
132 static bitmap task_shared_vars;
133 static vec<omp_context *> taskreg_contexts;
135 static void scan_omp (gimple_seq *, omp_context *);
136 static tree scan_omp_1_op (tree *, int *, void *);
138 #define WALK_SUBSTMTS \
139 case GIMPLE_BIND: \
140 case GIMPLE_TRY: \
141 case GIMPLE_CATCH: \
142 case GIMPLE_EH_FILTER: \
143 case GIMPLE_TRANSACTION: \
144 /* The sub-statements for these should be walked. */ \
145 *handled_ops_p = false; \
146 break;
148 /* Return true if CTX corresponds to an oacc parallel region. */
150 static bool
151 is_oacc_parallel (omp_context *ctx)
153 enum gimple_code outer_type = gimple_code (ctx->stmt);
154 return ((outer_type == GIMPLE_OMP_TARGET)
155 && (gimple_omp_target_kind (ctx->stmt)
156 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
159 /* Return true if CTX corresponds to an oacc kernels region. */
161 static bool
162 is_oacc_kernels (omp_context *ctx)
164 enum gimple_code outer_type = gimple_code (ctx->stmt);
165 return ((outer_type == GIMPLE_OMP_TARGET)
166 && (gimple_omp_target_kind (ctx->stmt)
167 == GF_OMP_TARGET_KIND_OACC_KERNELS));
170 /* If DECL is the artificial dummy VAR_DECL created for non-static
171 data member privatization, return the underlying "this" parameter,
172 otherwise return NULL. */
174 tree
175 omp_member_access_dummy_var (tree decl)
177 if (!VAR_P (decl)
178 || !DECL_ARTIFICIAL (decl)
179 || !DECL_IGNORED_P (decl)
180 || !DECL_HAS_VALUE_EXPR_P (decl)
181 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
182 return NULL_TREE;
184 tree v = DECL_VALUE_EXPR (decl);
185 if (TREE_CODE (v) != COMPONENT_REF)
186 return NULL_TREE;
188 while (1)
189 switch (TREE_CODE (v))
191 case COMPONENT_REF:
192 case MEM_REF:
193 case INDIRECT_REF:
194 CASE_CONVERT:
195 case POINTER_PLUS_EXPR:
196 v = TREE_OPERAND (v, 0);
197 continue;
198 case PARM_DECL:
199 if (DECL_CONTEXT (v) == current_function_decl
200 && DECL_ARTIFICIAL (v)
201 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
202 return v;
203 return NULL_TREE;
204 default:
205 return NULL_TREE;
209 /* Helper for unshare_and_remap, called through walk_tree. */
211 static tree
212 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
214 tree *pair = (tree *) data;
215 if (*tp == pair[0])
217 *tp = unshare_expr (pair[1]);
218 *walk_subtrees = 0;
220 else if (IS_TYPE_OR_DECL_P (*tp))
221 *walk_subtrees = 0;
222 return NULL_TREE;
225 /* Return unshare_expr (X) with all occurrences of FROM
226 replaced with TO. */
228 static tree
229 unshare_and_remap (tree x, tree from, tree to)
231 tree pair[2] = { from, to };
232 x = unshare_expr (x);
233 walk_tree (&x, unshare_and_remap_1, pair, NULL);
234 return x;
237 /* Convenience function for calling scan_omp_1_op on tree operands. */
239 static inline tree
240 scan_omp_op (tree *tp, omp_context *ctx)
242 struct walk_stmt_info wi;
244 memset (&wi, 0, sizeof (wi));
245 wi.info = ctx;
246 wi.want_locations = true;
248 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
251 static void lower_omp (gimple_seq *, omp_context *);
252 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
253 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
255 /* Return true if CTX is for an omp parallel. */
257 static inline bool
258 is_parallel_ctx (omp_context *ctx)
260 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
264 /* Return true if CTX is for an omp task. */
266 static inline bool
267 is_task_ctx (omp_context *ctx)
269 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
273 /* Return true if CTX is for an omp taskloop. */
275 static inline bool
276 is_taskloop_ctx (omp_context *ctx)
278 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
279 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
283 /* Return true if CTX is for an omp parallel or omp task. */
285 static inline bool
286 is_taskreg_ctx (omp_context *ctx)
288 return is_parallel_ctx (ctx) || is_task_ctx (ctx);
291 /* Return true if EXPR is variable sized. */
293 static inline bool
294 is_variable_sized (const_tree expr)
296 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
299 /* Lookup variables. The "maybe" form
300 allows for the variable form to not have been entered, otherwise we
301 assert that the variable must have been entered. */
303 static inline tree
304 lookup_decl (tree var, omp_context *ctx)
306 tree *n = ctx->cb.decl_map->get (var);
307 return *n;
310 static inline tree
311 maybe_lookup_decl (const_tree var, omp_context *ctx)
313 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
314 return n ? *n : NULL_TREE;
317 static inline tree
318 lookup_field (tree var, omp_context *ctx)
320 splay_tree_node n;
321 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
322 return (tree) n->value;
325 static inline tree
326 lookup_sfield (splay_tree_key key, omp_context *ctx)
328 splay_tree_node n;
329 n = splay_tree_lookup (ctx->sfield_map
330 ? ctx->sfield_map : ctx->field_map, key);
331 return (tree) n->value;
334 static inline tree
335 lookup_sfield (tree var, omp_context *ctx)
337 return lookup_sfield ((splay_tree_key) var, ctx);
340 static inline tree
341 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
343 splay_tree_node n;
344 n = splay_tree_lookup (ctx->field_map, key);
345 return n ? (tree) n->value : NULL_TREE;
348 static inline tree
349 maybe_lookup_field (tree var, omp_context *ctx)
351 return maybe_lookup_field ((splay_tree_key) var, ctx);
354 /* Return true if DECL should be copied by pointer. SHARED_CTX is
355 the parallel context if DECL is to be shared. */
357 static bool
358 use_pointer_for_field (tree decl, omp_context *shared_ctx)
360 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
361 || TYPE_ATOMIC (TREE_TYPE (decl)))
362 return true;
364 /* We can only use copy-in/copy-out semantics for shared variables
365 when we know the value is not accessible from an outer scope. */
366 if (shared_ctx)
368 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
370 /* ??? Trivially accessible from anywhere. But why would we even
371 be passing an address in this case? Should we simply assert
372 this to be false, or should we have a cleanup pass that removes
373 these from the list of mappings? */
374 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
375 return true;
377 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
378 without analyzing the expression whether or not its location
379 is accessible to anyone else. In the case of nested parallel
380 regions it certainly may be. */
381 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
382 return true;
384 /* Do not use copy-in/copy-out for variables that have their
385 address taken. */
386 if (TREE_ADDRESSABLE (decl))
387 return true;
389 /* lower_send_shared_vars only uses copy-in, but not copy-out
390 for these. */
391 if (TREE_READONLY (decl)
392 || ((TREE_CODE (decl) == RESULT_DECL
393 || TREE_CODE (decl) == PARM_DECL)
394 && DECL_BY_REFERENCE (decl)))
395 return false;
397 /* Disallow copy-in/out in nested parallel if
398 decl is shared in outer parallel, otherwise
399 each thread could store the shared variable
400 in its own copy-in location, making the
401 variable no longer really shared. */
402 if (shared_ctx->is_nested)
404 omp_context *up;
406 for (up = shared_ctx->outer; up; up = up->outer)
407 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
408 break;
410 if (up)
412 tree c;
414 for (c = gimple_omp_taskreg_clauses (up->stmt);
415 c; c = OMP_CLAUSE_CHAIN (c))
416 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
417 && OMP_CLAUSE_DECL (c) == decl)
418 break;
420 if (c)
421 goto maybe_mark_addressable_and_ret;
425 /* For tasks avoid using copy-in/out. As tasks can be
426 deferred or executed in different thread, when GOMP_task
427 returns, the task hasn't necessarily terminated. */
428 if (is_task_ctx (shared_ctx))
430 tree outer;
431 maybe_mark_addressable_and_ret:
432 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
433 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
435 /* Taking address of OUTER in lower_send_shared_vars
436 might need regimplification of everything that uses the
437 variable. */
438 if (!task_shared_vars)
439 task_shared_vars = BITMAP_ALLOC (NULL);
440 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
441 TREE_ADDRESSABLE (outer) = 1;
443 return true;
447 return false;
450 /* Construct a new automatic decl similar to VAR. */
452 static tree
453 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
455 tree copy = copy_var_decl (var, name, type);
457 DECL_CONTEXT (copy) = current_function_decl;
458 DECL_CHAIN (copy) = ctx->block_vars;
459 /* If VAR is listed in task_shared_vars, it means it wasn't
460 originally addressable and is just because task needs to take
461 it's address. But we don't need to take address of privatizations
462 from that var. */
463 if (TREE_ADDRESSABLE (var)
464 && task_shared_vars
465 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
466 TREE_ADDRESSABLE (copy) = 0;
467 ctx->block_vars = copy;
469 return copy;
472 static tree
473 omp_copy_decl_1 (tree var, omp_context *ctx)
475 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
478 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
479 as appropriate. */
480 static tree
481 omp_build_component_ref (tree obj, tree field)
483 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
484 if (TREE_THIS_VOLATILE (field))
485 TREE_THIS_VOLATILE (ret) |= 1;
486 if (TREE_READONLY (field))
487 TREE_READONLY (ret) |= 1;
488 return ret;
491 /* Build tree nodes to access the field for VAR on the receiver side. */
493 static tree
494 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
496 tree x, field = lookup_field (var, ctx);
498 /* If the receiver record type was remapped in the child function,
499 remap the field into the new record type. */
500 x = maybe_lookup_field (field, ctx);
501 if (x != NULL)
502 field = x;
504 x = build_simple_mem_ref (ctx->receiver_decl);
505 TREE_THIS_NOTRAP (x) = 1;
506 x = omp_build_component_ref (x, field);
507 if (by_ref)
509 x = build_simple_mem_ref (x);
510 TREE_THIS_NOTRAP (x) = 1;
513 return x;
516 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
517 of a parallel, this is a component reference; for workshare constructs
518 this is some variable. */
520 static tree
521 build_outer_var_ref (tree var, omp_context *ctx,
522 enum omp_clause_code code = OMP_CLAUSE_ERROR)
524 tree x;
526 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
527 x = var;
528 else if (is_variable_sized (var))
530 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
531 x = build_outer_var_ref (x, ctx, code);
532 x = build_simple_mem_ref (x);
534 else if (is_taskreg_ctx (ctx))
536 bool by_ref = use_pointer_for_field (var, NULL);
537 x = build_receiver_ref (var, by_ref, ctx);
539 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
540 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
541 || (code == OMP_CLAUSE_PRIVATE
542 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
543 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
544 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
546 /* #pragma omp simd isn't a worksharing construct, and can reference
547 even private vars in its linear etc. clauses.
548 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
549 to private vars in all worksharing constructs. */
550 x = NULL_TREE;
551 if (ctx->outer && is_taskreg_ctx (ctx))
552 x = lookup_decl (var, ctx->outer);
553 else if (ctx->outer)
554 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
555 if (x == NULL_TREE)
556 x = var;
558 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
560 gcc_assert (ctx->outer);
561 splay_tree_node n
562 = splay_tree_lookup (ctx->outer->field_map,
563 (splay_tree_key) &DECL_UID (var));
564 if (n == NULL)
566 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
567 x = var;
568 else
569 x = lookup_decl (var, ctx->outer);
571 else
573 tree field = (tree) n->value;
574 /* If the receiver record type was remapped in the child function,
575 remap the field into the new record type. */
576 x = maybe_lookup_field (field, ctx->outer);
577 if (x != NULL)
578 field = x;
580 x = build_simple_mem_ref (ctx->outer->receiver_decl);
581 x = omp_build_component_ref (x, field);
582 if (use_pointer_for_field (var, ctx->outer))
583 x = build_simple_mem_ref (x);
586 else if (ctx->outer)
588 omp_context *outer = ctx->outer;
589 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
591 outer = outer->outer;
592 gcc_assert (outer
593 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
595 x = lookup_decl (var, outer);
597 else if (omp_is_reference (var))
598 /* This can happen with orphaned constructs. If var is reference, it is
599 possible it is shared and as such valid. */
600 x = var;
601 else if (omp_member_access_dummy_var (var))
602 x = var;
603 else
604 gcc_unreachable ();
606 if (x == var)
608 tree t = omp_member_access_dummy_var (var);
609 if (t)
611 x = DECL_VALUE_EXPR (var);
612 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
613 if (o != t)
614 x = unshare_and_remap (x, t, o);
615 else
616 x = unshare_expr (x);
620 if (omp_is_reference (var))
621 x = build_simple_mem_ref (x);
623 return x;
626 /* Build tree nodes to access the field for VAR on the sender side. */
628 static tree
629 build_sender_ref (splay_tree_key key, omp_context *ctx)
631 tree field = lookup_sfield (key, ctx);
632 return omp_build_component_ref (ctx->sender_decl, field);
635 static tree
636 build_sender_ref (tree var, omp_context *ctx)
638 return build_sender_ref ((splay_tree_key) var, ctx);
641 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
642 BASE_POINTERS_RESTRICT, declare the field with restrict. */
644 static void
645 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx,
646 bool base_pointers_restrict = false)
648 tree field, type, sfield = NULL_TREE;
649 splay_tree_key key = (splay_tree_key) var;
651 if ((mask & 8) != 0)
653 key = (splay_tree_key) &DECL_UID (var);
654 gcc_checking_assert (key != (splay_tree_key) var);
656 gcc_assert ((mask & 1) == 0
657 || !splay_tree_lookup (ctx->field_map, key));
658 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
659 || !splay_tree_lookup (ctx->sfield_map, key));
660 gcc_assert ((mask & 3) == 3
661 || !is_gimple_omp_oacc (ctx->stmt));
663 type = TREE_TYPE (var);
664 /* Prevent redeclaring the var in the split-off function with a restrict
665 pointer type. Note that we only clear type itself, restrict qualifiers in
666 the pointed-to type will be ignored by points-to analysis. */
667 if (POINTER_TYPE_P (type)
668 && TYPE_RESTRICT (type))
669 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
671 if (mask & 4)
673 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
674 type = build_pointer_type (build_pointer_type (type));
676 else if (by_ref)
678 type = build_pointer_type (type);
679 if (base_pointers_restrict)
680 type = build_qualified_type (type, TYPE_QUAL_RESTRICT);
682 else if ((mask & 3) == 1 && omp_is_reference (var))
683 type = TREE_TYPE (type);
685 field = build_decl (DECL_SOURCE_LOCATION (var),
686 FIELD_DECL, DECL_NAME (var), type);
688 /* Remember what variable this field was created for. This does have a
689 side effect of making dwarf2out ignore this member, so for helpful
690 debugging we clear it later in delete_omp_context. */
691 DECL_ABSTRACT_ORIGIN (field) = var;
692 if (type == TREE_TYPE (var))
694 SET_DECL_ALIGN (field, DECL_ALIGN (var));
695 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
696 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
698 else
699 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
701 if ((mask & 3) == 3)
703 insert_field_into_struct (ctx->record_type, field);
704 if (ctx->srecord_type)
706 sfield = build_decl (DECL_SOURCE_LOCATION (var),
707 FIELD_DECL, DECL_NAME (var), type);
708 DECL_ABSTRACT_ORIGIN (sfield) = var;
709 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
710 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
711 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
712 insert_field_into_struct (ctx->srecord_type, sfield);
715 else
717 if (ctx->srecord_type == NULL_TREE)
719 tree t;
721 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
722 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
723 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
725 sfield = build_decl (DECL_SOURCE_LOCATION (t),
726 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
727 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
728 insert_field_into_struct (ctx->srecord_type, sfield);
729 splay_tree_insert (ctx->sfield_map,
730 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
731 (splay_tree_value) sfield);
734 sfield = field;
735 insert_field_into_struct ((mask & 1) ? ctx->record_type
736 : ctx->srecord_type, field);
739 if (mask & 1)
740 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
741 if ((mask & 2) && ctx->sfield_map)
742 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
745 static tree
746 install_var_local (tree var, omp_context *ctx)
748 tree new_var = omp_copy_decl_1 (var, ctx);
749 insert_decl_map (&ctx->cb, var, new_var);
750 return new_var;
753 /* Adjust the replacement for DECL in CTX for the new context. This means
754 copying the DECL_VALUE_EXPR, and fixing up the type. */
756 static void
757 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
759 tree new_decl, size;
761 new_decl = lookup_decl (decl, ctx);
763 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
765 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
766 && DECL_HAS_VALUE_EXPR_P (decl))
768 tree ve = DECL_VALUE_EXPR (decl);
769 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
770 SET_DECL_VALUE_EXPR (new_decl, ve);
771 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
774 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
776 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
777 if (size == error_mark_node)
778 size = TYPE_SIZE (TREE_TYPE (new_decl));
779 DECL_SIZE (new_decl) = size;
781 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
782 if (size == error_mark_node)
783 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
784 DECL_SIZE_UNIT (new_decl) = size;
788 /* The callback for remap_decl. Search all containing contexts for a
789 mapping of the variable; this avoids having to duplicate the splay
790 tree ahead of time. We know a mapping doesn't already exist in the
791 given context. Create new mappings to implement default semantics. */
793 static tree
794 omp_copy_decl (tree var, copy_body_data *cb)
796 omp_context *ctx = (omp_context *) cb;
797 tree new_var;
799 if (TREE_CODE (var) == LABEL_DECL)
801 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
802 return var;
803 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
804 DECL_CONTEXT (new_var) = current_function_decl;
805 insert_decl_map (&ctx->cb, var, new_var);
806 return new_var;
809 while (!is_taskreg_ctx (ctx))
811 ctx = ctx->outer;
812 if (ctx == NULL)
813 return var;
814 new_var = maybe_lookup_decl (var, ctx);
815 if (new_var)
816 return new_var;
819 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
820 return var;
822 return error_mark_node;
825 /* Create a new context, with OUTER_CTX being the surrounding context. */
827 static omp_context *
828 new_omp_context (gimple *stmt, omp_context *outer_ctx)
830 omp_context *ctx = XCNEW (omp_context);
832 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
833 (splay_tree_value) ctx);
834 ctx->stmt = stmt;
836 if (outer_ctx)
838 ctx->outer = outer_ctx;
839 ctx->cb = outer_ctx->cb;
840 ctx->cb.block = NULL;
841 ctx->depth = outer_ctx->depth + 1;
843 else
845 ctx->cb.src_fn = current_function_decl;
846 ctx->cb.dst_fn = current_function_decl;
847 ctx->cb.src_node = cgraph_node::get (current_function_decl);
848 gcc_checking_assert (ctx->cb.src_node);
849 ctx->cb.dst_node = ctx->cb.src_node;
850 ctx->cb.src_cfun = cfun;
851 ctx->cb.copy_decl = omp_copy_decl;
852 ctx->cb.eh_lp_nr = 0;
853 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
854 ctx->depth = 1;
857 ctx->cb.decl_map = new hash_map<tree, tree>;
859 return ctx;
862 static gimple_seq maybe_catch_exception (gimple_seq);
864 /* Finalize task copyfn. */
866 static void
867 finalize_task_copyfn (gomp_task *task_stmt)
869 struct function *child_cfun;
870 tree child_fn;
871 gimple_seq seq = NULL, new_seq;
872 gbind *bind;
874 child_fn = gimple_omp_task_copy_fn (task_stmt);
875 if (child_fn == NULL_TREE)
876 return;
878 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
879 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
881 push_cfun (child_cfun);
882 bind = gimplify_body (child_fn, false);
883 gimple_seq_add_stmt (&seq, bind);
884 new_seq = maybe_catch_exception (seq);
885 if (new_seq != seq)
887 bind = gimple_build_bind (NULL, new_seq, NULL);
888 seq = NULL;
889 gimple_seq_add_stmt (&seq, bind);
891 gimple_set_body (child_fn, seq);
892 pop_cfun ();
894 /* Inform the callgraph about the new function. */
895 cgraph_node *node = cgraph_node::get_create (child_fn);
896 node->parallelized_function = 1;
897 cgraph_node::add_new_function (child_fn, false);
900 /* Destroy a omp_context data structures. Called through the splay tree
901 value delete callback. */
903 static void
904 delete_omp_context (splay_tree_value value)
906 omp_context *ctx = (omp_context *) value;
908 delete ctx->cb.decl_map;
910 if (ctx->field_map)
911 splay_tree_delete (ctx->field_map);
912 if (ctx->sfield_map)
913 splay_tree_delete (ctx->sfield_map);
915 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
916 it produces corrupt debug information. */
917 if (ctx->record_type)
919 tree t;
920 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
921 DECL_ABSTRACT_ORIGIN (t) = NULL;
923 if (ctx->srecord_type)
925 tree t;
926 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
927 DECL_ABSTRACT_ORIGIN (t) = NULL;
930 if (is_task_ctx (ctx))
931 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
933 XDELETE (ctx);
936 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
937 context. */
939 static void
940 fixup_child_record_type (omp_context *ctx)
942 tree f, type = ctx->record_type;
944 if (!ctx->receiver_decl)
945 return;
946 /* ??? It isn't sufficient to just call remap_type here, because
947 variably_modified_type_p doesn't work the way we expect for
948 record types. Testing each field for whether it needs remapping
949 and creating a new record by hand works, however. */
950 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
951 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
952 break;
953 if (f)
955 tree name, new_fields = NULL;
957 type = lang_hooks.types.make_type (RECORD_TYPE);
958 name = DECL_NAME (TYPE_NAME (ctx->record_type));
959 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
960 TYPE_DECL, name, type);
961 TYPE_NAME (type) = name;
963 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
965 tree new_f = copy_node (f);
966 DECL_CONTEXT (new_f) = type;
967 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
968 DECL_CHAIN (new_f) = new_fields;
969 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
970 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
971 &ctx->cb, NULL);
972 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
973 &ctx->cb, NULL);
974 new_fields = new_f;
976 /* Arrange to be able to look up the receiver field
977 given the sender field. */
978 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
979 (splay_tree_value) new_f);
981 TYPE_FIELDS (type) = nreverse (new_fields);
982 layout_type (type);
985 /* In a target region we never modify any of the pointers in *.omp_data_i,
986 so attempt to help the optimizers. */
987 if (is_gimple_omp_offloaded (ctx->stmt))
988 type = build_qualified_type (type, TYPE_QUAL_CONST);
990 TREE_TYPE (ctx->receiver_decl)
991 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
994 /* Instantiate decls as necessary in CTX to satisfy the data sharing
995 specified by CLAUSES. If BASE_POINTERS_RESTRICT, install var field with
996 restrict. */
998 static void
999 scan_sharing_clauses (tree clauses, omp_context *ctx,
1000 bool base_pointers_restrict = false)
1002 tree c, decl;
1003 bool scan_array_reductions = false;
1005 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1007 bool by_ref;
1009 switch (OMP_CLAUSE_CODE (c))
1011 case OMP_CLAUSE_PRIVATE:
1012 decl = OMP_CLAUSE_DECL (c);
1013 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1014 goto do_private;
1015 else if (!is_variable_sized (decl))
1016 install_var_local (decl, ctx);
1017 break;
1019 case OMP_CLAUSE_SHARED:
1020 decl = OMP_CLAUSE_DECL (c);
1021 /* Ignore shared directives in teams construct. */
1022 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1024 /* Global variables don't need to be copied,
1025 the receiver side will use them directly. */
1026 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1027 if (is_global_var (odecl))
1028 break;
1029 insert_decl_map (&ctx->cb, decl, odecl);
1030 break;
1032 gcc_assert (is_taskreg_ctx (ctx));
1033 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1034 || !is_variable_sized (decl));
1035 /* Global variables don't need to be copied,
1036 the receiver side will use them directly. */
1037 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1038 break;
1039 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1041 use_pointer_for_field (decl, ctx);
1042 break;
1044 by_ref = use_pointer_for_field (decl, NULL);
1045 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1046 || TREE_ADDRESSABLE (decl)
1047 || by_ref
1048 || omp_is_reference (decl))
1050 by_ref = use_pointer_for_field (decl, ctx);
1051 install_var_field (decl, by_ref, 3, ctx);
1052 install_var_local (decl, ctx);
1053 break;
1055 /* We don't need to copy const scalar vars back. */
1056 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1057 goto do_private;
1059 case OMP_CLAUSE_REDUCTION:
1060 decl = OMP_CLAUSE_DECL (c);
1061 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1062 && TREE_CODE (decl) == MEM_REF)
1064 tree t = TREE_OPERAND (decl, 0);
1065 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1066 t = TREE_OPERAND (t, 0);
1067 if (TREE_CODE (t) == INDIRECT_REF
1068 || TREE_CODE (t) == ADDR_EXPR)
1069 t = TREE_OPERAND (t, 0);
1070 install_var_local (t, ctx);
1071 if (is_taskreg_ctx (ctx)
1072 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1073 && !is_variable_sized (t))
1075 by_ref = use_pointer_for_field (t, ctx);
1076 install_var_field (t, by_ref, 3, ctx);
1078 break;
1080 goto do_private;
1082 case OMP_CLAUSE_LASTPRIVATE:
1083 /* Let the corresponding firstprivate clause create
1084 the variable. */
1085 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1086 break;
1087 /* FALLTHRU */
1089 case OMP_CLAUSE_FIRSTPRIVATE:
1090 case OMP_CLAUSE_LINEAR:
1091 decl = OMP_CLAUSE_DECL (c);
1092 do_private:
1093 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1094 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1095 && is_gimple_omp_offloaded (ctx->stmt))
1097 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1098 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1099 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1100 install_var_field (decl, true, 3, ctx);
1101 else
1102 install_var_field (decl, false, 3, ctx);
1104 if (is_variable_sized (decl))
1106 if (is_task_ctx (ctx))
1107 install_var_field (decl, false, 1, ctx);
1108 break;
1110 else if (is_taskreg_ctx (ctx))
1112 bool global
1113 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1114 by_ref = use_pointer_for_field (decl, NULL);
1116 if (is_task_ctx (ctx)
1117 && (global || by_ref || omp_is_reference (decl)))
1119 install_var_field (decl, false, 1, ctx);
1120 if (!global)
1121 install_var_field (decl, by_ref, 2, ctx);
1123 else if (!global)
1124 install_var_field (decl, by_ref, 3, ctx);
1126 install_var_local (decl, ctx);
1127 break;
1129 case OMP_CLAUSE_USE_DEVICE_PTR:
1130 decl = OMP_CLAUSE_DECL (c);
1131 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1132 install_var_field (decl, true, 3, ctx);
1133 else
1134 install_var_field (decl, false, 3, ctx);
1135 if (DECL_SIZE (decl)
1136 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1138 tree decl2 = DECL_VALUE_EXPR (decl);
1139 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1140 decl2 = TREE_OPERAND (decl2, 0);
1141 gcc_assert (DECL_P (decl2));
1142 install_var_local (decl2, ctx);
1144 install_var_local (decl, ctx);
1145 break;
1147 case OMP_CLAUSE_IS_DEVICE_PTR:
1148 decl = OMP_CLAUSE_DECL (c);
1149 goto do_private;
1151 case OMP_CLAUSE__LOOPTEMP_:
1152 gcc_assert (is_taskreg_ctx (ctx));
1153 decl = OMP_CLAUSE_DECL (c);
1154 install_var_field (decl, false, 3, ctx);
1155 install_var_local (decl, ctx);
1156 break;
1158 case OMP_CLAUSE_COPYPRIVATE:
1159 case OMP_CLAUSE_COPYIN:
1160 decl = OMP_CLAUSE_DECL (c);
1161 by_ref = use_pointer_for_field (decl, NULL);
1162 install_var_field (decl, by_ref, 3, ctx);
1163 break;
1165 case OMP_CLAUSE_FINAL:
1166 case OMP_CLAUSE_IF:
1167 case OMP_CLAUSE_NUM_THREADS:
1168 case OMP_CLAUSE_NUM_TEAMS:
1169 case OMP_CLAUSE_THREAD_LIMIT:
1170 case OMP_CLAUSE_DEVICE:
1171 case OMP_CLAUSE_SCHEDULE:
1172 case OMP_CLAUSE_DIST_SCHEDULE:
1173 case OMP_CLAUSE_DEPEND:
1174 case OMP_CLAUSE_PRIORITY:
1175 case OMP_CLAUSE_GRAINSIZE:
1176 case OMP_CLAUSE_NUM_TASKS:
1177 case OMP_CLAUSE__CILK_FOR_COUNT_:
1178 case OMP_CLAUSE_NUM_GANGS:
1179 case OMP_CLAUSE_NUM_WORKERS:
1180 case OMP_CLAUSE_VECTOR_LENGTH:
1181 if (ctx->outer)
1182 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1183 break;
1185 case OMP_CLAUSE_TO:
1186 case OMP_CLAUSE_FROM:
1187 case OMP_CLAUSE_MAP:
1188 if (ctx->outer)
1189 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1190 decl = OMP_CLAUSE_DECL (c);
1191 /* Global variables with "omp declare target" attribute
1192 don't need to be copied, the receiver side will use them
1193 directly. However, global variables with "omp declare target link"
1194 attribute need to be copied. */
1195 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1196 && DECL_P (decl)
1197 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1198 && (OMP_CLAUSE_MAP_KIND (c)
1199 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1200 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1201 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1202 && varpool_node::get_create (decl)->offloadable
1203 && !lookup_attribute ("omp declare target link",
1204 DECL_ATTRIBUTES (decl)))
1205 break;
1206 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1207 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1209 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1210 not offloaded; there is nothing to map for those. */
1211 if (!is_gimple_omp_offloaded (ctx->stmt)
1212 && !POINTER_TYPE_P (TREE_TYPE (decl))
1213 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1214 break;
1216 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1217 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1218 || (OMP_CLAUSE_MAP_KIND (c)
1219 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1221 if (TREE_CODE (decl) == COMPONENT_REF
1222 || (TREE_CODE (decl) == INDIRECT_REF
1223 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1224 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1225 == REFERENCE_TYPE)))
1226 break;
1227 if (DECL_SIZE (decl)
1228 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1230 tree decl2 = DECL_VALUE_EXPR (decl);
1231 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1232 decl2 = TREE_OPERAND (decl2, 0);
1233 gcc_assert (DECL_P (decl2));
1234 install_var_local (decl2, ctx);
1236 install_var_local (decl, ctx);
1237 break;
1239 if (DECL_P (decl))
1241 if (DECL_SIZE (decl)
1242 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1244 tree decl2 = DECL_VALUE_EXPR (decl);
1245 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1246 decl2 = TREE_OPERAND (decl2, 0);
1247 gcc_assert (DECL_P (decl2));
1248 install_var_field (decl2, true, 3, ctx);
1249 install_var_local (decl2, ctx);
1250 install_var_local (decl, ctx);
1252 else
1254 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1255 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1256 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1257 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1258 install_var_field (decl, true, 7, ctx);
1259 else
1260 install_var_field (decl, true, 3, ctx,
1261 base_pointers_restrict);
1262 if (is_gimple_omp_offloaded (ctx->stmt)
1263 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1264 install_var_local (decl, ctx);
1267 else
1269 tree base = get_base_address (decl);
1270 tree nc = OMP_CLAUSE_CHAIN (c);
1271 if (DECL_P (base)
1272 && nc != NULL_TREE
1273 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1274 && OMP_CLAUSE_DECL (nc) == base
1275 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1276 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1278 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1279 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1281 else
1283 if (ctx->outer)
1285 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1286 decl = OMP_CLAUSE_DECL (c);
1288 gcc_assert (!splay_tree_lookup (ctx->field_map,
1289 (splay_tree_key) decl));
1290 tree field
1291 = build_decl (OMP_CLAUSE_LOCATION (c),
1292 FIELD_DECL, NULL_TREE, ptr_type_node);
1293 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1294 insert_field_into_struct (ctx->record_type, field);
1295 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1296 (splay_tree_value) field);
1299 break;
1301 case OMP_CLAUSE__GRIDDIM_:
1302 if (ctx->outer)
1304 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1305 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1307 break;
1309 case OMP_CLAUSE_NOWAIT:
1310 case OMP_CLAUSE_ORDERED:
1311 case OMP_CLAUSE_COLLAPSE:
1312 case OMP_CLAUSE_UNTIED:
1313 case OMP_CLAUSE_MERGEABLE:
1314 case OMP_CLAUSE_PROC_BIND:
1315 case OMP_CLAUSE_SAFELEN:
1316 case OMP_CLAUSE_SIMDLEN:
1317 case OMP_CLAUSE_THREADS:
1318 case OMP_CLAUSE_SIMD:
1319 case OMP_CLAUSE_NOGROUP:
1320 case OMP_CLAUSE_DEFAULTMAP:
1321 case OMP_CLAUSE_ASYNC:
1322 case OMP_CLAUSE_WAIT:
1323 case OMP_CLAUSE_GANG:
1324 case OMP_CLAUSE_WORKER:
1325 case OMP_CLAUSE_VECTOR:
1326 case OMP_CLAUSE_INDEPENDENT:
1327 case OMP_CLAUSE_AUTO:
1328 case OMP_CLAUSE_SEQ:
1329 case OMP_CLAUSE_TILE:
1330 case OMP_CLAUSE__SIMT_:
1331 case OMP_CLAUSE_DEFAULT:
1332 break;
1334 case OMP_CLAUSE_ALIGNED:
1335 decl = OMP_CLAUSE_DECL (c);
1336 if (is_global_var (decl)
1337 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1338 install_var_local (decl, ctx);
1339 break;
1341 case OMP_CLAUSE__CACHE_:
1342 default:
1343 gcc_unreachable ();
1347 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1349 switch (OMP_CLAUSE_CODE (c))
1351 case OMP_CLAUSE_LASTPRIVATE:
1352 /* Let the corresponding firstprivate clause create
1353 the variable. */
1354 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1355 scan_array_reductions = true;
1356 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1357 break;
1358 /* FALLTHRU */
1360 case OMP_CLAUSE_FIRSTPRIVATE:
1361 case OMP_CLAUSE_PRIVATE:
1362 case OMP_CLAUSE_LINEAR:
1363 case OMP_CLAUSE_IS_DEVICE_PTR:
1364 decl = OMP_CLAUSE_DECL (c);
1365 if (is_variable_sized (decl))
1367 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1368 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1369 && is_gimple_omp_offloaded (ctx->stmt))
1371 tree decl2 = DECL_VALUE_EXPR (decl);
1372 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1373 decl2 = TREE_OPERAND (decl2, 0);
1374 gcc_assert (DECL_P (decl2));
1375 install_var_local (decl2, ctx);
1376 fixup_remapped_decl (decl2, ctx, false);
1378 install_var_local (decl, ctx);
1380 fixup_remapped_decl (decl, ctx,
1381 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1382 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1383 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1384 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1385 scan_array_reductions = true;
1386 break;
1388 case OMP_CLAUSE_REDUCTION:
1389 decl = OMP_CLAUSE_DECL (c);
1390 if (TREE_CODE (decl) != MEM_REF)
1392 if (is_variable_sized (decl))
1393 install_var_local (decl, ctx);
1394 fixup_remapped_decl (decl, ctx, false);
1396 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1397 scan_array_reductions = true;
1398 break;
1400 case OMP_CLAUSE_SHARED:
1401 /* Ignore shared directives in teams construct. */
1402 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1403 break;
1404 decl = OMP_CLAUSE_DECL (c);
1405 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1406 break;
1407 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1409 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1410 ctx->outer)))
1411 break;
1412 bool by_ref = use_pointer_for_field (decl, ctx);
1413 install_var_field (decl, by_ref, 11, ctx);
1414 break;
1416 fixup_remapped_decl (decl, ctx, false);
1417 break;
1419 case OMP_CLAUSE_MAP:
1420 if (!is_gimple_omp_offloaded (ctx->stmt))
1421 break;
1422 decl = OMP_CLAUSE_DECL (c);
1423 if (DECL_P (decl)
1424 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1425 && (OMP_CLAUSE_MAP_KIND (c)
1426 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1427 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1428 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1429 && varpool_node::get_create (decl)->offloadable)
1430 break;
1431 if (DECL_P (decl))
1433 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1434 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1435 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1436 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1438 tree new_decl = lookup_decl (decl, ctx);
1439 TREE_TYPE (new_decl)
1440 = remap_type (TREE_TYPE (decl), &ctx->cb);
1442 else if (DECL_SIZE (decl)
1443 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1445 tree decl2 = DECL_VALUE_EXPR (decl);
1446 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1447 decl2 = TREE_OPERAND (decl2, 0);
1448 gcc_assert (DECL_P (decl2));
1449 fixup_remapped_decl (decl2, ctx, false);
1450 fixup_remapped_decl (decl, ctx, true);
1452 else
1453 fixup_remapped_decl (decl, ctx, false);
1455 break;
1457 case OMP_CLAUSE_COPYPRIVATE:
1458 case OMP_CLAUSE_COPYIN:
1459 case OMP_CLAUSE_DEFAULT:
1460 case OMP_CLAUSE_IF:
1461 case OMP_CLAUSE_NUM_THREADS:
1462 case OMP_CLAUSE_NUM_TEAMS:
1463 case OMP_CLAUSE_THREAD_LIMIT:
1464 case OMP_CLAUSE_DEVICE:
1465 case OMP_CLAUSE_SCHEDULE:
1466 case OMP_CLAUSE_DIST_SCHEDULE:
1467 case OMP_CLAUSE_NOWAIT:
1468 case OMP_CLAUSE_ORDERED:
1469 case OMP_CLAUSE_COLLAPSE:
1470 case OMP_CLAUSE_UNTIED:
1471 case OMP_CLAUSE_FINAL:
1472 case OMP_CLAUSE_MERGEABLE:
1473 case OMP_CLAUSE_PROC_BIND:
1474 case OMP_CLAUSE_SAFELEN:
1475 case OMP_CLAUSE_SIMDLEN:
1476 case OMP_CLAUSE_ALIGNED:
1477 case OMP_CLAUSE_DEPEND:
1478 case OMP_CLAUSE__LOOPTEMP_:
1479 case OMP_CLAUSE_TO:
1480 case OMP_CLAUSE_FROM:
1481 case OMP_CLAUSE_PRIORITY:
1482 case OMP_CLAUSE_GRAINSIZE:
1483 case OMP_CLAUSE_NUM_TASKS:
1484 case OMP_CLAUSE_THREADS:
1485 case OMP_CLAUSE_SIMD:
1486 case OMP_CLAUSE_NOGROUP:
1487 case OMP_CLAUSE_DEFAULTMAP:
1488 case OMP_CLAUSE_USE_DEVICE_PTR:
1489 case OMP_CLAUSE__CILK_FOR_COUNT_:
1490 case OMP_CLAUSE_ASYNC:
1491 case OMP_CLAUSE_WAIT:
1492 case OMP_CLAUSE_NUM_GANGS:
1493 case OMP_CLAUSE_NUM_WORKERS:
1494 case OMP_CLAUSE_VECTOR_LENGTH:
1495 case OMP_CLAUSE_GANG:
1496 case OMP_CLAUSE_WORKER:
1497 case OMP_CLAUSE_VECTOR:
1498 case OMP_CLAUSE_INDEPENDENT:
1499 case OMP_CLAUSE_AUTO:
1500 case OMP_CLAUSE_SEQ:
1501 case OMP_CLAUSE_TILE:
1502 case OMP_CLAUSE__GRIDDIM_:
1503 case OMP_CLAUSE__SIMT_:
1504 break;
1506 case OMP_CLAUSE__CACHE_:
1507 default:
1508 gcc_unreachable ();
1512 gcc_checking_assert (!scan_array_reductions
1513 || !is_gimple_omp_oacc (ctx->stmt));
1514 if (scan_array_reductions)
1516 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1517 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1518 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1520 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1521 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1523 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1524 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1525 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1526 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1527 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1528 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1532 /* Create a new name for omp child function. Returns an identifier. If
1533 IS_CILK_FOR is true then the suffix for the child function is
1534 "_cilk_for_fn." */
1536 static tree
1537 create_omp_child_function_name (bool task_copy, bool is_cilk_for)
1539 if (is_cilk_for)
1540 return clone_function_name (current_function_decl, "_cilk_for_fn");
1541 return clone_function_name (current_function_decl,
1542 task_copy ? "_omp_cpyfn" : "_omp_fn");
1545 /* Returns the type of the induction variable for the child function for
1546 _Cilk_for and the types for _high and _low variables based on TYPE. */
1548 static tree
1549 cilk_for_check_loop_diff_type (tree type)
1551 if (TYPE_PRECISION (type) <= TYPE_PRECISION (uint32_type_node))
1553 if (TYPE_UNSIGNED (type))
1554 return uint32_type_node;
1555 else
1556 return integer_type_node;
1558 else
1560 if (TYPE_UNSIGNED (type))
1561 return uint64_type_node;
1562 else
1563 return long_long_integer_type_node;
1567 /* Return true if CTX may belong to offloaded code: either if current function
1568 is offloaded, or any enclosing context corresponds to a target region. */
1570 static bool
1571 omp_maybe_offloaded_ctx (omp_context *ctx)
1573 if (cgraph_node::get (current_function_decl)->offloadable)
1574 return true;
1575 for (; ctx; ctx = ctx->outer)
1576 if (is_gimple_omp_offloaded (ctx->stmt))
1577 return true;
1578 return false;
1581 /* Build a decl for the omp child function. It'll not contain a body
1582 yet, just the bare decl. */
1584 static void
1585 create_omp_child_function (omp_context *ctx, bool task_copy)
1587 tree decl, type, name, t;
1589 tree cilk_for_count
1590 = (flag_cilkplus && gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1591 ? omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
1592 OMP_CLAUSE__CILK_FOR_COUNT_) : NULL_TREE;
1593 tree cilk_var_type = NULL_TREE;
1595 name = create_omp_child_function_name (task_copy,
1596 cilk_for_count != NULL_TREE);
1597 if (task_copy)
1598 type = build_function_type_list (void_type_node, ptr_type_node,
1599 ptr_type_node, NULL_TREE);
1600 else if (cilk_for_count)
1602 type = TREE_TYPE (OMP_CLAUSE_OPERAND (cilk_for_count, 0));
1603 cilk_var_type = cilk_for_check_loop_diff_type (type);
1604 type = build_function_type_list (void_type_node, ptr_type_node,
1605 cilk_var_type, cilk_var_type, NULL_TREE);
1607 else
1608 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1610 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1612 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1613 || !task_copy);
1614 if (!task_copy)
1615 ctx->cb.dst_fn = decl;
1616 else
1617 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1619 TREE_STATIC (decl) = 1;
1620 TREE_USED (decl) = 1;
1621 DECL_ARTIFICIAL (decl) = 1;
1622 DECL_IGNORED_P (decl) = 0;
1623 TREE_PUBLIC (decl) = 0;
1624 DECL_UNINLINABLE (decl) = 1;
1625 DECL_EXTERNAL (decl) = 0;
1626 DECL_CONTEXT (decl) = NULL_TREE;
1627 DECL_INITIAL (decl) = make_node (BLOCK);
1628 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1629 if (omp_maybe_offloaded_ctx (ctx))
1631 cgraph_node::get_create (decl)->offloadable = 1;
1632 if (ENABLE_OFFLOADING)
1633 g->have_offload = true;
1636 if (cgraph_node::get_create (decl)->offloadable
1637 && !lookup_attribute ("omp declare target",
1638 DECL_ATTRIBUTES (current_function_decl)))
1640 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1641 ? "omp target entrypoint"
1642 : "omp declare target");
1643 DECL_ATTRIBUTES (decl)
1644 = tree_cons (get_identifier (target_attr),
1645 NULL_TREE, DECL_ATTRIBUTES (decl));
1648 t = build_decl (DECL_SOURCE_LOCATION (decl),
1649 RESULT_DECL, NULL_TREE, void_type_node);
1650 DECL_ARTIFICIAL (t) = 1;
1651 DECL_IGNORED_P (t) = 1;
1652 DECL_CONTEXT (t) = decl;
1653 DECL_RESULT (decl) = t;
1655 /* _Cilk_for's child function requires two extra parameters called
1656 __low and __high that are set the by Cilk runtime when it calls this
1657 function. */
1658 if (cilk_for_count)
1660 t = build_decl (DECL_SOURCE_LOCATION (decl),
1661 PARM_DECL, get_identifier ("__high"), cilk_var_type);
1662 DECL_ARTIFICIAL (t) = 1;
1663 DECL_NAMELESS (t) = 1;
1664 DECL_ARG_TYPE (t) = ptr_type_node;
1665 DECL_CONTEXT (t) = current_function_decl;
1666 TREE_USED (t) = 1;
1667 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1668 DECL_ARGUMENTS (decl) = t;
1670 t = build_decl (DECL_SOURCE_LOCATION (decl),
1671 PARM_DECL, get_identifier ("__low"), cilk_var_type);
1672 DECL_ARTIFICIAL (t) = 1;
1673 DECL_NAMELESS (t) = 1;
1674 DECL_ARG_TYPE (t) = ptr_type_node;
1675 DECL_CONTEXT (t) = current_function_decl;
1676 TREE_USED (t) = 1;
1677 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1678 DECL_ARGUMENTS (decl) = t;
1681 tree data_name = get_identifier (".omp_data_i");
1682 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1683 ptr_type_node);
1684 DECL_ARTIFICIAL (t) = 1;
1685 DECL_NAMELESS (t) = 1;
1686 DECL_ARG_TYPE (t) = ptr_type_node;
1687 DECL_CONTEXT (t) = current_function_decl;
1688 TREE_USED (t) = 1;
1689 TREE_READONLY (t) = 1;
1690 if (cilk_for_count)
1691 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1692 DECL_ARGUMENTS (decl) = t;
1693 if (!task_copy)
1694 ctx->receiver_decl = t;
1695 else
1697 t = build_decl (DECL_SOURCE_LOCATION (decl),
1698 PARM_DECL, get_identifier (".omp_data_o"),
1699 ptr_type_node);
1700 DECL_ARTIFICIAL (t) = 1;
1701 DECL_NAMELESS (t) = 1;
1702 DECL_ARG_TYPE (t) = ptr_type_node;
1703 DECL_CONTEXT (t) = current_function_decl;
1704 TREE_USED (t) = 1;
1705 TREE_ADDRESSABLE (t) = 1;
1706 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1707 DECL_ARGUMENTS (decl) = t;
1710 /* Allocate memory for the function structure. The call to
1711 allocate_struct_function clobbers CFUN, so we need to restore
1712 it afterward. */
1713 push_struct_function (decl);
1714 cfun->function_end_locus = gimple_location (ctx->stmt);
1715 init_tree_ssa (cfun);
1716 pop_cfun ();
1719 /* Callback for walk_gimple_seq. Check if combined parallel
1720 contains gimple_omp_for_combined_into_p OMP_FOR. */
1722 tree
1723 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1724 bool *handled_ops_p,
1725 struct walk_stmt_info *wi)
1727 gimple *stmt = gsi_stmt (*gsi_p);
1729 *handled_ops_p = true;
1730 switch (gimple_code (stmt))
1732 WALK_SUBSTMTS;
1734 case GIMPLE_OMP_FOR:
1735 if (gimple_omp_for_combined_into_p (stmt)
1736 && gimple_omp_for_kind (stmt)
1737 == *(const enum gf_mask *) (wi->info))
1739 wi->info = stmt;
1740 return integer_zero_node;
1742 break;
1743 default:
1744 break;
1746 return NULL;
1749 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1751 static void
1752 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1753 omp_context *outer_ctx)
1755 struct walk_stmt_info wi;
1757 memset (&wi, 0, sizeof (wi));
1758 wi.val_only = true;
1759 wi.info = (void *) &msk;
1760 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1761 if (wi.info != (void *) &msk)
1763 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1764 struct omp_for_data fd;
1765 omp_extract_for_data (for_stmt, &fd, NULL);
1766 /* We need two temporaries with fd.loop.v type (istart/iend)
1767 and then (fd.collapse - 1) temporaries with the same
1768 type for count2 ... countN-1 vars if not constant. */
1769 size_t count = 2, i;
1770 tree type = fd.iter_type;
1771 if (fd.collapse > 1
1772 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1774 count += fd.collapse - 1;
1775 /* If there are lastprivate clauses on the inner
1776 GIMPLE_OMP_FOR, add one more temporaries for the total number
1777 of iterations (product of count1 ... countN-1). */
1778 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1779 OMP_CLAUSE_LASTPRIVATE))
1780 count++;
1781 else if (msk == GF_OMP_FOR_KIND_FOR
1782 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1783 OMP_CLAUSE_LASTPRIVATE))
1784 count++;
1786 for (i = 0; i < count; i++)
1788 tree temp = create_tmp_var (type);
1789 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1790 insert_decl_map (&outer_ctx->cb, temp, temp);
1791 OMP_CLAUSE_DECL (c) = temp;
1792 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1793 gimple_omp_taskreg_set_clauses (stmt, c);
1798 /* Scan an OpenMP parallel directive. */
1800 static void
1801 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1803 omp_context *ctx;
1804 tree name;
1805 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1807 /* Ignore parallel directives with empty bodies, unless there
1808 are copyin clauses. */
1809 if (optimize > 0
1810 && empty_body_p (gimple_omp_body (stmt))
1811 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1812 OMP_CLAUSE_COPYIN) == NULL)
1814 gsi_replace (gsi, gimple_build_nop (), false);
1815 return;
1818 if (gimple_omp_parallel_combined_p (stmt))
1819 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1821 ctx = new_omp_context (stmt, outer_ctx);
1822 taskreg_contexts.safe_push (ctx);
1823 if (taskreg_nesting_level > 1)
1824 ctx->is_nested = true;
1825 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1826 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1827 name = create_tmp_var_name (".omp_data_s");
1828 name = build_decl (gimple_location (stmt),
1829 TYPE_DECL, name, ctx->record_type);
1830 DECL_ARTIFICIAL (name) = 1;
1831 DECL_NAMELESS (name) = 1;
1832 TYPE_NAME (ctx->record_type) = name;
1833 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1834 if (!gimple_omp_parallel_grid_phony (stmt))
1836 create_omp_child_function (ctx, false);
1837 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1840 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1841 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1843 if (TYPE_FIELDS (ctx->record_type) == NULL)
1844 ctx->record_type = ctx->receiver_decl = NULL;
1847 /* Scan an OpenMP task directive. */
1849 static void
1850 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1852 omp_context *ctx;
1853 tree name, t;
1854 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1856 /* Ignore task directives with empty bodies, unless they have depend
1857 clause. */
1858 if (optimize > 0
1859 && empty_body_p (gimple_omp_body (stmt))
1860 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1862 gsi_replace (gsi, gimple_build_nop (), false);
1863 return;
1866 if (gimple_omp_task_taskloop_p (stmt))
1867 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1869 ctx = new_omp_context (stmt, outer_ctx);
1870 taskreg_contexts.safe_push (ctx);
1871 if (taskreg_nesting_level > 1)
1872 ctx->is_nested = true;
1873 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1874 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1875 name = create_tmp_var_name (".omp_data_s");
1876 name = build_decl (gimple_location (stmt),
1877 TYPE_DECL, name, ctx->record_type);
1878 DECL_ARTIFICIAL (name) = 1;
1879 DECL_NAMELESS (name) = 1;
1880 TYPE_NAME (ctx->record_type) = name;
1881 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1882 create_omp_child_function (ctx, false);
1883 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1885 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1887 if (ctx->srecord_type)
1889 name = create_tmp_var_name (".omp_data_a");
1890 name = build_decl (gimple_location (stmt),
1891 TYPE_DECL, name, ctx->srecord_type);
1892 DECL_ARTIFICIAL (name) = 1;
1893 DECL_NAMELESS (name) = 1;
1894 TYPE_NAME (ctx->srecord_type) = name;
1895 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1896 create_omp_child_function (ctx, true);
1899 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1901 if (TYPE_FIELDS (ctx->record_type) == NULL)
1903 ctx->record_type = ctx->receiver_decl = NULL;
1904 t = build_int_cst (long_integer_type_node, 0);
1905 gimple_omp_task_set_arg_size (stmt, t);
1906 t = build_int_cst (long_integer_type_node, 1);
1907 gimple_omp_task_set_arg_align (stmt, t);
1911 /* Helper function for finish_taskreg_scan, called through walk_tree.
1912 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1913 tree, replace it in the expression. */
1915 static tree
1916 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1918 if (VAR_P (*tp))
1920 omp_context *ctx = (omp_context *) data;
1921 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1922 if (t != *tp)
1924 if (DECL_HAS_VALUE_EXPR_P (t))
1925 t = unshare_expr (DECL_VALUE_EXPR (t));
1926 *tp = t;
1928 *walk_subtrees = 0;
1930 else if (IS_TYPE_OR_DECL_P (*tp))
1931 *walk_subtrees = 0;
1932 return NULL_TREE;
1935 /* If any decls have been made addressable during scan_omp,
1936 adjust their fields if needed, and layout record types
1937 of parallel/task constructs. */
1939 static void
1940 finish_taskreg_scan (omp_context *ctx)
1942 if (ctx->record_type == NULL_TREE)
1943 return;
1945 /* If any task_shared_vars were needed, verify all
1946 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1947 statements if use_pointer_for_field hasn't changed
1948 because of that. If it did, update field types now. */
1949 if (task_shared_vars)
1951 tree c;
1953 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1954 c; c = OMP_CLAUSE_CHAIN (c))
1955 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1956 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1958 tree decl = OMP_CLAUSE_DECL (c);
1960 /* Global variables don't need to be copied,
1961 the receiver side will use them directly. */
1962 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1963 continue;
1964 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1965 || !use_pointer_for_field (decl, ctx))
1966 continue;
1967 tree field = lookup_field (decl, ctx);
1968 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1969 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1970 continue;
1971 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1972 TREE_THIS_VOLATILE (field) = 0;
1973 DECL_USER_ALIGN (field) = 0;
1974 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1975 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1976 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1977 if (ctx->srecord_type)
1979 tree sfield = lookup_sfield (decl, ctx);
1980 TREE_TYPE (sfield) = TREE_TYPE (field);
1981 TREE_THIS_VOLATILE (sfield) = 0;
1982 DECL_USER_ALIGN (sfield) = 0;
1983 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1984 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1985 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1990 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1992 layout_type (ctx->record_type);
1993 fixup_child_record_type (ctx);
1995 else
1997 location_t loc = gimple_location (ctx->stmt);
1998 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1999 /* Move VLA fields to the end. */
2000 p = &TYPE_FIELDS (ctx->record_type);
2001 while (*p)
2002 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2003 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2005 *q = *p;
2006 *p = TREE_CHAIN (*p);
2007 TREE_CHAIN (*q) = NULL_TREE;
2008 q = &TREE_CHAIN (*q);
2010 else
2011 p = &DECL_CHAIN (*p);
2012 *p = vla_fields;
2013 if (gimple_omp_task_taskloop_p (ctx->stmt))
2015 /* Move fields corresponding to first and second _looptemp_
2016 clause first. There are filled by GOMP_taskloop
2017 and thus need to be in specific positions. */
2018 tree c1 = gimple_omp_task_clauses (ctx->stmt);
2019 c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
2020 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2021 OMP_CLAUSE__LOOPTEMP_);
2022 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2023 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2024 p = &TYPE_FIELDS (ctx->record_type);
2025 while (*p)
2026 if (*p == f1 || *p == f2)
2027 *p = DECL_CHAIN (*p);
2028 else
2029 p = &DECL_CHAIN (*p);
2030 DECL_CHAIN (f1) = f2;
2031 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2032 TYPE_FIELDS (ctx->record_type) = f1;
2033 if (ctx->srecord_type)
2035 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2036 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2037 p = &TYPE_FIELDS (ctx->srecord_type);
2038 while (*p)
2039 if (*p == f1 || *p == f2)
2040 *p = DECL_CHAIN (*p);
2041 else
2042 p = &DECL_CHAIN (*p);
2043 DECL_CHAIN (f1) = f2;
2044 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2045 TYPE_FIELDS (ctx->srecord_type) = f1;
2048 layout_type (ctx->record_type);
2049 fixup_child_record_type (ctx);
2050 if (ctx->srecord_type)
2051 layout_type (ctx->srecord_type);
2052 tree t = fold_convert_loc (loc, long_integer_type_node,
2053 TYPE_SIZE_UNIT (ctx->record_type));
2054 if (TREE_CODE (t) != INTEGER_CST)
2056 t = unshare_expr (t);
2057 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2059 gimple_omp_task_set_arg_size (ctx->stmt, t);
2060 t = build_int_cst (long_integer_type_node,
2061 TYPE_ALIGN_UNIT (ctx->record_type));
2062 gimple_omp_task_set_arg_align (ctx->stmt, t);
2066 /* Find the enclosing offload context. */
2068 static omp_context *
2069 enclosing_target_ctx (omp_context *ctx)
2071 for (; ctx; ctx = ctx->outer)
2072 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2073 break;
2075 return ctx;
2078 /* Return true if ctx is part of an oacc kernels region. */
2080 static bool
2081 ctx_in_oacc_kernels_region (omp_context *ctx)
2083 for (;ctx != NULL; ctx = ctx->outer)
2085 gimple *stmt = ctx->stmt;
2086 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2087 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2088 return true;
2091 return false;
2094 /* Check the parallelism clauses inside a kernels regions.
2095 Until kernels handling moves to use the same loop indirection
2096 scheme as parallel, we need to do this checking early. */
2098 static unsigned
2099 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2101 bool checking = true;
2102 unsigned outer_mask = 0;
2103 unsigned this_mask = 0;
2104 bool has_seq = false, has_auto = false;
2106 if (ctx->outer)
2107 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2108 if (!stmt)
2110 checking = false;
2111 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2112 return outer_mask;
2113 stmt = as_a <gomp_for *> (ctx->stmt);
2116 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2118 switch (OMP_CLAUSE_CODE (c))
2120 case OMP_CLAUSE_GANG:
2121 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2122 break;
2123 case OMP_CLAUSE_WORKER:
2124 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2125 break;
2126 case OMP_CLAUSE_VECTOR:
2127 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2128 break;
2129 case OMP_CLAUSE_SEQ:
2130 has_seq = true;
2131 break;
2132 case OMP_CLAUSE_AUTO:
2133 has_auto = true;
2134 break;
2135 default:
2136 break;
2140 if (checking)
2142 if (has_seq && (this_mask || has_auto))
2143 error_at (gimple_location (stmt), "%<seq%> overrides other"
2144 " OpenACC loop specifiers");
2145 else if (has_auto && this_mask)
2146 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2147 " OpenACC loop specifiers");
2149 if (this_mask & outer_mask)
2150 error_at (gimple_location (stmt), "inner loop uses same"
2151 " OpenACC parallelism as containing loop");
2154 return outer_mask | this_mask;
2157 /* Scan a GIMPLE_OMP_FOR. */
2159 static omp_context *
2160 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2162 omp_context *ctx;
2163 size_t i;
2164 tree clauses = gimple_omp_for_clauses (stmt);
2166 ctx = new_omp_context (stmt, outer_ctx);
2168 if (is_gimple_omp_oacc (stmt))
2170 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2172 if (!tgt || is_oacc_parallel (tgt))
2173 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2175 char const *check = NULL;
2177 switch (OMP_CLAUSE_CODE (c))
2179 case OMP_CLAUSE_GANG:
2180 check = "gang";
2181 break;
2183 case OMP_CLAUSE_WORKER:
2184 check = "worker";
2185 break;
2187 case OMP_CLAUSE_VECTOR:
2188 check = "vector";
2189 break;
2191 default:
2192 break;
2195 if (check && OMP_CLAUSE_OPERAND (c, 0))
2196 error_at (gimple_location (stmt),
2197 "argument not permitted on %qs clause in"
2198 " OpenACC %<parallel%>", check);
2201 if (tgt && is_oacc_kernels (tgt))
2203 /* Strip out reductions, as they are not handled yet. */
2204 tree *prev_ptr = &clauses;
2206 while (tree probe = *prev_ptr)
2208 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2210 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2211 *prev_ptr = *next_ptr;
2212 else
2213 prev_ptr = next_ptr;
2216 gimple_omp_for_set_clauses (stmt, clauses);
2217 check_oacc_kernel_gwv (stmt, ctx);
2221 scan_sharing_clauses (clauses, ctx);
2223 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2224 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2226 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2227 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2228 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2229 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2231 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2232 return ctx;
2235 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2237 static void
2238 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2239 omp_context *outer_ctx)
2241 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2242 gsi_replace (gsi, bind, false);
2243 gimple_seq seq = NULL;
2244 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2245 tree cond = create_tmp_var_raw (integer_type_node);
2246 DECL_CONTEXT (cond) = current_function_decl;
2247 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2248 gimple_bind_set_vars (bind, cond);
2249 gimple_call_set_lhs (g, cond);
2250 gimple_seq_add_stmt (&seq, g);
2251 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2252 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2253 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2254 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2255 gimple_seq_add_stmt (&seq, g);
2256 g = gimple_build_label (lab1);
2257 gimple_seq_add_stmt (&seq, g);
2258 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2259 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2260 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2261 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2262 gimple_omp_for_set_clauses (new_stmt, clause);
2263 gimple_seq_add_stmt (&seq, new_stmt);
2264 g = gimple_build_goto (lab3);
2265 gimple_seq_add_stmt (&seq, g);
2266 g = gimple_build_label (lab2);
2267 gimple_seq_add_stmt (&seq, g);
2268 gimple_seq_add_stmt (&seq, stmt);
2269 g = gimple_build_label (lab3);
2270 gimple_seq_add_stmt (&seq, g);
2271 gimple_bind_set_body (bind, seq);
2272 update_stmt (bind);
2273 scan_omp_for (new_stmt, outer_ctx);
2274 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2277 /* Scan an OpenMP sections directive. */
2279 static void
2280 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2282 omp_context *ctx;
2284 ctx = new_omp_context (stmt, outer_ctx);
2285 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2286 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2289 /* Scan an OpenMP single directive. */
2291 static void
2292 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2294 omp_context *ctx;
2295 tree name;
2297 ctx = new_omp_context (stmt, outer_ctx);
2298 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2299 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2300 name = create_tmp_var_name (".omp_copy_s");
2301 name = build_decl (gimple_location (stmt),
2302 TYPE_DECL, name, ctx->record_type);
2303 TYPE_NAME (ctx->record_type) = name;
2305 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2306 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2308 if (TYPE_FIELDS (ctx->record_type) == NULL)
2309 ctx->record_type = NULL;
2310 else
2311 layout_type (ctx->record_type);
2314 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2315 used in the corresponding offloaded function are restrict. */
2317 static bool
2318 omp_target_base_pointers_restrict_p (tree clauses)
2320 /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2321 used by OpenACC. */
2322 if (flag_openacc == 0)
2323 return false;
2325 /* I. Basic example:
2327 void foo (void)
2329 unsigned int a[2], b[2];
2331 #pragma acc kernels \
2332 copyout (a) \
2333 copyout (b)
2335 a[0] = 0;
2336 b[0] = 1;
2340 After gimplification, we have:
2342 #pragma omp target oacc_kernels \
2343 map(force_from:a [len: 8]) \
2344 map(force_from:b [len: 8])
2346 a[0] = 0;
2347 b[0] = 1;
2350 Because both mappings have the force prefix, we know that they will be
2351 allocated when calling the corresponding offloaded function, which means we
2352 can mark the base pointers for a and b in the offloaded function as
2353 restrict. */
2355 tree c;
2356 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2358 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
2359 return false;
2361 switch (OMP_CLAUSE_MAP_KIND (c))
2363 case GOMP_MAP_FORCE_ALLOC:
2364 case GOMP_MAP_FORCE_TO:
2365 case GOMP_MAP_FORCE_FROM:
2366 case GOMP_MAP_FORCE_TOFROM:
2367 break;
2368 default:
2369 return false;
2373 return true;
2376 /* Scan a GIMPLE_OMP_TARGET. */
2378 static void
2379 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2381 omp_context *ctx;
2382 tree name;
2383 bool offloaded = is_gimple_omp_offloaded (stmt);
2384 tree clauses = gimple_omp_target_clauses (stmt);
2386 ctx = new_omp_context (stmt, outer_ctx);
2387 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2388 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2389 name = create_tmp_var_name (".omp_data_t");
2390 name = build_decl (gimple_location (stmt),
2391 TYPE_DECL, name, ctx->record_type);
2392 DECL_ARTIFICIAL (name) = 1;
2393 DECL_NAMELESS (name) = 1;
2394 TYPE_NAME (ctx->record_type) = name;
2395 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2397 bool base_pointers_restrict = false;
2398 if (offloaded)
2400 create_omp_child_function (ctx, false);
2401 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2403 base_pointers_restrict = omp_target_base_pointers_restrict_p (clauses);
2404 if (base_pointers_restrict
2405 && dump_file && (dump_flags & TDF_DETAILS))
2406 fprintf (dump_file,
2407 "Base pointers in offloaded function are restrict\n");
2410 scan_sharing_clauses (clauses, ctx, base_pointers_restrict);
2411 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2413 if (TYPE_FIELDS (ctx->record_type) == NULL)
2414 ctx->record_type = ctx->receiver_decl = NULL;
2415 else
2417 TYPE_FIELDS (ctx->record_type)
2418 = nreverse (TYPE_FIELDS (ctx->record_type));
2419 if (flag_checking)
2421 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2422 for (tree field = TYPE_FIELDS (ctx->record_type);
2423 field;
2424 field = DECL_CHAIN (field))
2425 gcc_assert (DECL_ALIGN (field) == align);
2427 layout_type (ctx->record_type);
2428 if (offloaded)
2429 fixup_child_record_type (ctx);
2433 /* Scan an OpenMP teams directive. */
2435 static void
2436 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2438 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2439 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2440 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2443 /* Check nesting restrictions. */
2444 static bool
2445 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2447 tree c;
2449 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2450 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2451 the original copy of its contents. */
2452 return true;
2454 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2455 inside an OpenACC CTX. */
2456 if (!(is_gimple_omp (stmt)
2457 && is_gimple_omp_oacc (stmt))
2458 /* Except for atomic codes that we share with OpenMP. */
2459 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2460 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2462 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2464 error_at (gimple_location (stmt),
2465 "non-OpenACC construct inside of OpenACC routine");
2466 return false;
2468 else
2469 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2470 if (is_gimple_omp (octx->stmt)
2471 && is_gimple_omp_oacc (octx->stmt))
2473 error_at (gimple_location (stmt),
2474 "non-OpenACC construct inside of OpenACC region");
2475 return false;
2479 if (ctx != NULL)
2481 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2482 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2484 c = NULL_TREE;
2485 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2487 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2488 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2490 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2491 && (ctx->outer == NULL
2492 || !gimple_omp_for_combined_into_p (ctx->stmt)
2493 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2494 || (gimple_omp_for_kind (ctx->outer->stmt)
2495 != GF_OMP_FOR_KIND_FOR)
2496 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2498 error_at (gimple_location (stmt),
2499 "%<ordered simd threads%> must be closely "
2500 "nested inside of %<for simd%> region");
2501 return false;
2503 return true;
2506 error_at (gimple_location (stmt),
2507 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2508 " may not be nested inside %<simd%> region");
2509 return false;
2511 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2513 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2514 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2515 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2516 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2518 error_at (gimple_location (stmt),
2519 "only %<distribute%> or %<parallel%> regions are "
2520 "allowed to be strictly nested inside %<teams%> "
2521 "region");
2522 return false;
2526 switch (gimple_code (stmt))
2528 case GIMPLE_OMP_FOR:
2529 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2530 return true;
2531 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2533 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2535 error_at (gimple_location (stmt),
2536 "%<distribute%> region must be strictly nested "
2537 "inside %<teams%> construct");
2538 return false;
2540 return true;
2542 /* We split taskloop into task and nested taskloop in it. */
2543 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2544 return true;
2545 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2547 bool ok = false;
2549 if (ctx)
2550 switch (gimple_code (ctx->stmt))
2552 case GIMPLE_OMP_FOR:
2553 ok = (gimple_omp_for_kind (ctx->stmt)
2554 == GF_OMP_FOR_KIND_OACC_LOOP);
2555 break;
2557 case GIMPLE_OMP_TARGET:
2558 switch (gimple_omp_target_kind (ctx->stmt))
2560 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2561 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2562 ok = true;
2563 break;
2565 default:
2566 break;
2569 default:
2570 break;
2572 else if (oacc_get_fn_attrib (current_function_decl))
2573 ok = true;
2574 if (!ok)
2576 error_at (gimple_location (stmt),
2577 "OpenACC loop directive must be associated with"
2578 " an OpenACC compute region");
2579 return false;
2582 /* FALLTHRU */
2583 case GIMPLE_CALL:
2584 if (is_gimple_call (stmt)
2585 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2586 == BUILT_IN_GOMP_CANCEL
2587 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2588 == BUILT_IN_GOMP_CANCELLATION_POINT))
2590 const char *bad = NULL;
2591 const char *kind = NULL;
2592 const char *construct
2593 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2594 == BUILT_IN_GOMP_CANCEL)
2595 ? "#pragma omp cancel"
2596 : "#pragma omp cancellation point";
2597 if (ctx == NULL)
2599 error_at (gimple_location (stmt), "orphaned %qs construct",
2600 construct);
2601 return false;
2603 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2604 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2605 : 0)
2607 case 1:
2608 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2609 bad = "#pragma omp parallel";
2610 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2611 == BUILT_IN_GOMP_CANCEL
2612 && !integer_zerop (gimple_call_arg (stmt, 1)))
2613 ctx->cancellable = true;
2614 kind = "parallel";
2615 break;
2616 case 2:
2617 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2618 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2619 bad = "#pragma omp for";
2620 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2621 == BUILT_IN_GOMP_CANCEL
2622 && !integer_zerop (gimple_call_arg (stmt, 1)))
2624 ctx->cancellable = true;
2625 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2626 OMP_CLAUSE_NOWAIT))
2627 warning_at (gimple_location (stmt), 0,
2628 "%<#pragma omp cancel for%> inside "
2629 "%<nowait%> for construct");
2630 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2631 OMP_CLAUSE_ORDERED))
2632 warning_at (gimple_location (stmt), 0,
2633 "%<#pragma omp cancel for%> inside "
2634 "%<ordered%> for construct");
2636 kind = "for";
2637 break;
2638 case 4:
2639 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2640 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2641 bad = "#pragma omp sections";
2642 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2643 == BUILT_IN_GOMP_CANCEL
2644 && !integer_zerop (gimple_call_arg (stmt, 1)))
2646 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2648 ctx->cancellable = true;
2649 if (omp_find_clause (gimple_omp_sections_clauses
2650 (ctx->stmt),
2651 OMP_CLAUSE_NOWAIT))
2652 warning_at (gimple_location (stmt), 0,
2653 "%<#pragma omp cancel sections%> inside "
2654 "%<nowait%> sections construct");
2656 else
2658 gcc_assert (ctx->outer
2659 && gimple_code (ctx->outer->stmt)
2660 == GIMPLE_OMP_SECTIONS);
2661 ctx->outer->cancellable = true;
2662 if (omp_find_clause (gimple_omp_sections_clauses
2663 (ctx->outer->stmt),
2664 OMP_CLAUSE_NOWAIT))
2665 warning_at (gimple_location (stmt), 0,
2666 "%<#pragma omp cancel sections%> inside "
2667 "%<nowait%> sections construct");
2670 kind = "sections";
2671 break;
2672 case 8:
2673 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2674 bad = "#pragma omp task";
2675 else
2677 for (omp_context *octx = ctx->outer;
2678 octx; octx = octx->outer)
2680 switch (gimple_code (octx->stmt))
2682 case GIMPLE_OMP_TASKGROUP:
2683 break;
2684 case GIMPLE_OMP_TARGET:
2685 if (gimple_omp_target_kind (octx->stmt)
2686 != GF_OMP_TARGET_KIND_REGION)
2687 continue;
2688 /* FALLTHRU */
2689 case GIMPLE_OMP_PARALLEL:
2690 case GIMPLE_OMP_TEAMS:
2691 error_at (gimple_location (stmt),
2692 "%<%s taskgroup%> construct not closely "
2693 "nested inside of %<taskgroup%> region",
2694 construct);
2695 return false;
2696 default:
2697 continue;
2699 break;
2701 ctx->cancellable = true;
2703 kind = "taskgroup";
2704 break;
2705 default:
2706 error_at (gimple_location (stmt), "invalid arguments");
2707 return false;
2709 if (bad)
2711 error_at (gimple_location (stmt),
2712 "%<%s %s%> construct not closely nested inside of %qs",
2713 construct, kind, bad);
2714 return false;
2717 /* FALLTHRU */
2718 case GIMPLE_OMP_SECTIONS:
2719 case GIMPLE_OMP_SINGLE:
2720 for (; ctx != NULL; ctx = ctx->outer)
2721 switch (gimple_code (ctx->stmt))
2723 case GIMPLE_OMP_FOR:
2724 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2725 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2726 break;
2727 /* FALLTHRU */
2728 case GIMPLE_OMP_SECTIONS:
2729 case GIMPLE_OMP_SINGLE:
2730 case GIMPLE_OMP_ORDERED:
2731 case GIMPLE_OMP_MASTER:
2732 case GIMPLE_OMP_TASK:
2733 case GIMPLE_OMP_CRITICAL:
2734 if (is_gimple_call (stmt))
2736 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2737 != BUILT_IN_GOMP_BARRIER)
2738 return true;
2739 error_at (gimple_location (stmt),
2740 "barrier region may not be closely nested inside "
2741 "of work-sharing, %<critical%>, %<ordered%>, "
2742 "%<master%>, explicit %<task%> or %<taskloop%> "
2743 "region");
2744 return false;
2746 error_at (gimple_location (stmt),
2747 "work-sharing region may not be closely nested inside "
2748 "of work-sharing, %<critical%>, %<ordered%>, "
2749 "%<master%>, explicit %<task%> or %<taskloop%> region");
2750 return false;
2751 case GIMPLE_OMP_PARALLEL:
2752 case GIMPLE_OMP_TEAMS:
2753 return true;
2754 case GIMPLE_OMP_TARGET:
2755 if (gimple_omp_target_kind (ctx->stmt)
2756 == GF_OMP_TARGET_KIND_REGION)
2757 return true;
2758 break;
2759 default:
2760 break;
2762 break;
2763 case GIMPLE_OMP_MASTER:
2764 for (; ctx != NULL; ctx = ctx->outer)
2765 switch (gimple_code (ctx->stmt))
2767 case GIMPLE_OMP_FOR:
2768 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2769 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2770 break;
2771 /* FALLTHRU */
2772 case GIMPLE_OMP_SECTIONS:
2773 case GIMPLE_OMP_SINGLE:
2774 case GIMPLE_OMP_TASK:
2775 error_at (gimple_location (stmt),
2776 "%<master%> region may not be closely nested inside "
2777 "of work-sharing, explicit %<task%> or %<taskloop%> "
2778 "region");
2779 return false;
2780 case GIMPLE_OMP_PARALLEL:
2781 case GIMPLE_OMP_TEAMS:
2782 return true;
2783 case GIMPLE_OMP_TARGET:
2784 if (gimple_omp_target_kind (ctx->stmt)
2785 == GF_OMP_TARGET_KIND_REGION)
2786 return true;
2787 break;
2788 default:
2789 break;
2791 break;
2792 case GIMPLE_OMP_TASK:
2793 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2794 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2795 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2796 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2798 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2799 error_at (OMP_CLAUSE_LOCATION (c),
2800 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2801 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2802 return false;
2804 break;
2805 case GIMPLE_OMP_ORDERED:
2806 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2807 c; c = OMP_CLAUSE_CHAIN (c))
2809 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2811 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2812 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2813 continue;
2815 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2816 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2817 || kind == OMP_CLAUSE_DEPEND_SINK)
2819 tree oclause;
2820 /* Look for containing ordered(N) loop. */
2821 if (ctx == NULL
2822 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2823 || (oclause
2824 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2825 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2827 error_at (OMP_CLAUSE_LOCATION (c),
2828 "%<ordered%> construct with %<depend%> clause "
2829 "must be closely nested inside an %<ordered%> "
2830 "loop");
2831 return false;
2833 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2835 error_at (OMP_CLAUSE_LOCATION (c),
2836 "%<ordered%> construct with %<depend%> clause "
2837 "must be closely nested inside a loop with "
2838 "%<ordered%> clause with a parameter");
2839 return false;
2842 else
2844 error_at (OMP_CLAUSE_LOCATION (c),
2845 "invalid depend kind in omp %<ordered%> %<depend%>");
2846 return false;
2849 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2850 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2852 /* ordered simd must be closely nested inside of simd region,
2853 and simd region must not encounter constructs other than
2854 ordered simd, therefore ordered simd may be either orphaned,
2855 or ctx->stmt must be simd. The latter case is handled already
2856 earlier. */
2857 if (ctx != NULL)
2859 error_at (gimple_location (stmt),
2860 "%<ordered%> %<simd%> must be closely nested inside "
2861 "%<simd%> region");
2862 return false;
2865 for (; ctx != NULL; ctx = ctx->outer)
2866 switch (gimple_code (ctx->stmt))
2868 case GIMPLE_OMP_CRITICAL:
2869 case GIMPLE_OMP_TASK:
2870 case GIMPLE_OMP_ORDERED:
2871 ordered_in_taskloop:
2872 error_at (gimple_location (stmt),
2873 "%<ordered%> region may not be closely nested inside "
2874 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2875 "%<taskloop%> region");
2876 return false;
2877 case GIMPLE_OMP_FOR:
2878 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2879 goto ordered_in_taskloop;
2880 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2881 OMP_CLAUSE_ORDERED) == NULL)
2883 error_at (gimple_location (stmt),
2884 "%<ordered%> region must be closely nested inside "
2885 "a loop region with an %<ordered%> clause");
2886 return false;
2888 return true;
2889 case GIMPLE_OMP_TARGET:
2890 if (gimple_omp_target_kind (ctx->stmt)
2891 != GF_OMP_TARGET_KIND_REGION)
2892 break;
2893 /* FALLTHRU */
2894 case GIMPLE_OMP_PARALLEL:
2895 case GIMPLE_OMP_TEAMS:
2896 error_at (gimple_location (stmt),
2897 "%<ordered%> region must be closely nested inside "
2898 "a loop region with an %<ordered%> clause");
2899 return false;
2900 default:
2901 break;
2903 break;
2904 case GIMPLE_OMP_CRITICAL:
2906 tree this_stmt_name
2907 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2908 for (; ctx != NULL; ctx = ctx->outer)
2909 if (gomp_critical *other_crit
2910 = dyn_cast <gomp_critical *> (ctx->stmt))
2911 if (this_stmt_name == gimple_omp_critical_name (other_crit))
2913 error_at (gimple_location (stmt),
2914 "%<critical%> region may not be nested inside "
2915 "a %<critical%> region with the same name");
2916 return false;
2919 break;
2920 case GIMPLE_OMP_TEAMS:
2921 if (ctx == NULL
2922 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2923 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2925 error_at (gimple_location (stmt),
2926 "%<teams%> construct not closely nested inside of "
2927 "%<target%> construct");
2928 return false;
2930 break;
2931 case GIMPLE_OMP_TARGET:
2932 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2933 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2934 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2935 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2937 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2938 error_at (OMP_CLAUSE_LOCATION (c),
2939 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2940 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2941 return false;
2943 if (is_gimple_omp_offloaded (stmt)
2944 && oacc_get_fn_attrib (cfun->decl) != NULL)
2946 error_at (gimple_location (stmt),
2947 "OpenACC region inside of OpenACC routine, nested "
2948 "parallelism not supported yet");
2949 return false;
2951 for (; ctx != NULL; ctx = ctx->outer)
2953 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2955 if (is_gimple_omp (stmt)
2956 && is_gimple_omp_oacc (stmt)
2957 && is_gimple_omp (ctx->stmt))
2959 error_at (gimple_location (stmt),
2960 "OpenACC construct inside of non-OpenACC region");
2961 return false;
2963 continue;
2966 const char *stmt_name, *ctx_stmt_name;
2967 switch (gimple_omp_target_kind (stmt))
2969 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2970 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2971 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2972 case GF_OMP_TARGET_KIND_ENTER_DATA:
2973 stmt_name = "target enter data"; break;
2974 case GF_OMP_TARGET_KIND_EXIT_DATA:
2975 stmt_name = "target exit data"; break;
2976 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2977 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2978 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2979 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
2980 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2981 stmt_name = "enter/exit data"; break;
2982 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2983 break;
2984 default: gcc_unreachable ();
2986 switch (gimple_omp_target_kind (ctx->stmt))
2988 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2989 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
2990 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2991 ctx_stmt_name = "parallel"; break;
2992 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2993 ctx_stmt_name = "kernels"; break;
2994 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
2995 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2996 ctx_stmt_name = "host_data"; break;
2997 default: gcc_unreachable ();
3000 /* OpenACC/OpenMP mismatch? */
3001 if (is_gimple_omp_oacc (stmt)
3002 != is_gimple_omp_oacc (ctx->stmt))
3004 error_at (gimple_location (stmt),
3005 "%s %qs construct inside of %s %qs region",
3006 (is_gimple_omp_oacc (stmt)
3007 ? "OpenACC" : "OpenMP"), stmt_name,
3008 (is_gimple_omp_oacc (ctx->stmt)
3009 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3010 return false;
3012 if (is_gimple_omp_offloaded (ctx->stmt))
3014 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3015 if (is_gimple_omp_oacc (ctx->stmt))
3017 error_at (gimple_location (stmt),
3018 "%qs construct inside of %qs region",
3019 stmt_name, ctx_stmt_name);
3020 return false;
3022 else
3024 warning_at (gimple_location (stmt), 0,
3025 "%qs construct inside of %qs region",
3026 stmt_name, ctx_stmt_name);
3030 break;
3031 default:
3032 break;
3034 return true;
3038 /* Helper function scan_omp.
3040 Callback for walk_tree or operators in walk_gimple_stmt used to
3041 scan for OMP directives in TP. */
3043 static tree
3044 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3046 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3047 omp_context *ctx = (omp_context *) wi->info;
3048 tree t = *tp;
3050 switch (TREE_CODE (t))
3052 case VAR_DECL:
3053 case PARM_DECL:
3054 case LABEL_DECL:
3055 case RESULT_DECL:
3056 if (ctx)
3058 tree repl = remap_decl (t, &ctx->cb);
3059 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3060 *tp = repl;
3062 break;
3064 default:
3065 if (ctx && TYPE_P (t))
3066 *tp = remap_type (t, &ctx->cb);
3067 else if (!DECL_P (t))
3069 *walk_subtrees = 1;
3070 if (ctx)
3072 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3073 if (tem != TREE_TYPE (t))
3075 if (TREE_CODE (t) == INTEGER_CST)
3076 *tp = wide_int_to_tree (tem, t);
3077 else
3078 TREE_TYPE (t) = tem;
3082 break;
3085 return NULL_TREE;
3088 /* Return true if FNDECL is a setjmp or a longjmp. */
3090 static bool
3091 setjmp_or_longjmp_p (const_tree fndecl)
3093 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3094 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
3095 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
3096 return true;
3098 tree declname = DECL_NAME (fndecl);
3099 if (!declname)
3100 return false;
3101 const char *name = IDENTIFIER_POINTER (declname);
3102 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3106 /* Helper function for scan_omp.
3108 Callback for walk_gimple_stmt used to scan for OMP directives in
3109 the current statement in GSI. */
3111 static tree
3112 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3113 struct walk_stmt_info *wi)
3115 gimple *stmt = gsi_stmt (*gsi);
3116 omp_context *ctx = (omp_context *) wi->info;
3118 if (gimple_has_location (stmt))
3119 input_location = gimple_location (stmt);
3121 /* Check the nesting restrictions. */
3122 bool remove = false;
3123 if (is_gimple_omp (stmt))
3124 remove = !check_omp_nesting_restrictions (stmt, ctx);
3125 else if (is_gimple_call (stmt))
3127 tree fndecl = gimple_call_fndecl (stmt);
3128 if (fndecl)
3130 if (setjmp_or_longjmp_p (fndecl)
3131 && ctx
3132 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3133 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3135 remove = true;
3136 error_at (gimple_location (stmt),
3137 "setjmp/longjmp inside simd construct");
3139 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3140 switch (DECL_FUNCTION_CODE (fndecl))
3142 case BUILT_IN_GOMP_BARRIER:
3143 case BUILT_IN_GOMP_CANCEL:
3144 case BUILT_IN_GOMP_CANCELLATION_POINT:
3145 case BUILT_IN_GOMP_TASKYIELD:
3146 case BUILT_IN_GOMP_TASKWAIT:
3147 case BUILT_IN_GOMP_TASKGROUP_START:
3148 case BUILT_IN_GOMP_TASKGROUP_END:
3149 remove = !check_omp_nesting_restrictions (stmt, ctx);
3150 break;
3151 default:
3152 break;
3156 if (remove)
3158 stmt = gimple_build_nop ();
3159 gsi_replace (gsi, stmt, false);
3162 *handled_ops_p = true;
3164 switch (gimple_code (stmt))
3166 case GIMPLE_OMP_PARALLEL:
3167 taskreg_nesting_level++;
3168 scan_omp_parallel (gsi, ctx);
3169 taskreg_nesting_level--;
3170 break;
3172 case GIMPLE_OMP_TASK:
3173 taskreg_nesting_level++;
3174 scan_omp_task (gsi, ctx);
3175 taskreg_nesting_level--;
3176 break;
3178 case GIMPLE_OMP_FOR:
3179 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3180 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3181 && omp_maybe_offloaded_ctx (ctx)
3182 && omp_max_simt_vf ())
3183 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3184 else
3185 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3186 break;
3188 case GIMPLE_OMP_SECTIONS:
3189 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3190 break;
3192 case GIMPLE_OMP_SINGLE:
3193 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3194 break;
3196 case GIMPLE_OMP_SECTION:
3197 case GIMPLE_OMP_MASTER:
3198 case GIMPLE_OMP_TASKGROUP:
3199 case GIMPLE_OMP_ORDERED:
3200 case GIMPLE_OMP_CRITICAL:
3201 case GIMPLE_OMP_GRID_BODY:
3202 ctx = new_omp_context (stmt, ctx);
3203 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3204 break;
3206 case GIMPLE_OMP_TARGET:
3207 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3208 break;
3210 case GIMPLE_OMP_TEAMS:
3211 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3212 break;
3214 case GIMPLE_BIND:
3216 tree var;
3218 *handled_ops_p = false;
3219 if (ctx)
3220 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3221 var ;
3222 var = DECL_CHAIN (var))
3223 insert_decl_map (&ctx->cb, var, var);
3225 break;
3226 default:
3227 *handled_ops_p = false;
3228 break;
3231 return NULL_TREE;
3235 /* Scan all the statements starting at the current statement. CTX
3236 contains context information about the OMP directives and
3237 clauses found during the scan. */
3239 static void
3240 scan_omp (gimple_seq *body_p, omp_context *ctx)
3242 location_t saved_location;
3243 struct walk_stmt_info wi;
3245 memset (&wi, 0, sizeof (wi));
3246 wi.info = ctx;
3247 wi.want_locations = true;
3249 saved_location = input_location;
3250 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3251 input_location = saved_location;
3254 /* Re-gimplification and code generation routines. */
3256 /* If a context was created for STMT when it was scanned, return it. */
3258 static omp_context *
3259 maybe_lookup_ctx (gimple *stmt)
3261 splay_tree_node n;
3262 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3263 return n ? (omp_context *) n->value : NULL;
3267 /* Find the mapping for DECL in CTX or the immediately enclosing
3268 context that has a mapping for DECL.
3270 If CTX is a nested parallel directive, we may have to use the decl
3271 mappings created in CTX's parent context. Suppose that we have the
3272 following parallel nesting (variable UIDs showed for clarity):
3274 iD.1562 = 0;
3275 #omp parallel shared(iD.1562) -> outer parallel
3276 iD.1562 = iD.1562 + 1;
3278 #omp parallel shared (iD.1562) -> inner parallel
3279 iD.1562 = iD.1562 - 1;
3281 Each parallel structure will create a distinct .omp_data_s structure
3282 for copying iD.1562 in/out of the directive:
3284 outer parallel .omp_data_s.1.i -> iD.1562
3285 inner parallel .omp_data_s.2.i -> iD.1562
3287 A shared variable mapping will produce a copy-out operation before
3288 the parallel directive and a copy-in operation after it. So, in
3289 this case we would have:
3291 iD.1562 = 0;
3292 .omp_data_o.1.i = iD.1562;
3293 #omp parallel shared(iD.1562) -> outer parallel
3294 .omp_data_i.1 = &.omp_data_o.1
3295 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3297 .omp_data_o.2.i = iD.1562; -> **
3298 #omp parallel shared(iD.1562) -> inner parallel
3299 .omp_data_i.2 = &.omp_data_o.2
3300 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3303 ** This is a problem. The symbol iD.1562 cannot be referenced
3304 inside the body of the outer parallel region. But since we are
3305 emitting this copy operation while expanding the inner parallel
3306 directive, we need to access the CTX structure of the outer
3307 parallel directive to get the correct mapping:
3309 .omp_data_o.2.i = .omp_data_i.1->i
3311 Since there may be other workshare or parallel directives enclosing
3312 the parallel directive, it may be necessary to walk up the context
3313 parent chain. This is not a problem in general because nested
3314 parallelism happens only rarely. */
3316 static tree
3317 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3319 tree t;
3320 omp_context *up;
3322 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3323 t = maybe_lookup_decl (decl, up);
3325 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3327 return t ? t : decl;
3331 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3332 in outer contexts. */
3334 static tree
3335 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3337 tree t = NULL;
3338 omp_context *up;
3340 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3341 t = maybe_lookup_decl (decl, up);
3343 return t ? t : decl;
3347 /* Construct the initialization value for reduction operation OP. */
3349 tree
3350 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3352 switch (op)
3354 case PLUS_EXPR:
3355 case MINUS_EXPR:
3356 case BIT_IOR_EXPR:
3357 case BIT_XOR_EXPR:
3358 case TRUTH_OR_EXPR:
3359 case TRUTH_ORIF_EXPR:
3360 case TRUTH_XOR_EXPR:
3361 case NE_EXPR:
3362 return build_zero_cst (type);
3364 case MULT_EXPR:
3365 case TRUTH_AND_EXPR:
3366 case TRUTH_ANDIF_EXPR:
3367 case EQ_EXPR:
3368 return fold_convert_loc (loc, type, integer_one_node);
3370 case BIT_AND_EXPR:
3371 return fold_convert_loc (loc, type, integer_minus_one_node);
3373 case MAX_EXPR:
3374 if (SCALAR_FLOAT_TYPE_P (type))
3376 REAL_VALUE_TYPE max, min;
3377 if (HONOR_INFINITIES (type))
3379 real_inf (&max);
3380 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3382 else
3383 real_maxval (&min, 1, TYPE_MODE (type));
3384 return build_real (type, min);
3386 else if (POINTER_TYPE_P (type))
3388 wide_int min
3389 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3390 return wide_int_to_tree (type, min);
3392 else
3394 gcc_assert (INTEGRAL_TYPE_P (type));
3395 return TYPE_MIN_VALUE (type);
3398 case MIN_EXPR:
3399 if (SCALAR_FLOAT_TYPE_P (type))
3401 REAL_VALUE_TYPE max;
3402 if (HONOR_INFINITIES (type))
3403 real_inf (&max);
3404 else
3405 real_maxval (&max, 0, TYPE_MODE (type));
3406 return build_real (type, max);
3408 else if (POINTER_TYPE_P (type))
3410 wide_int max
3411 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3412 return wide_int_to_tree (type, max);
3414 else
3416 gcc_assert (INTEGRAL_TYPE_P (type));
3417 return TYPE_MAX_VALUE (type);
3420 default:
3421 gcc_unreachable ();
3425 /* Construct the initialization value for reduction CLAUSE. */
3427 tree
3428 omp_reduction_init (tree clause, tree type)
3430 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3431 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3434 /* Return alignment to be assumed for var in CLAUSE, which should be
3435 OMP_CLAUSE_ALIGNED. */
3437 static tree
3438 omp_clause_aligned_alignment (tree clause)
3440 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3441 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3443 /* Otherwise return implementation defined alignment. */
3444 unsigned int al = 1;
3445 machine_mode mode, vmode;
3446 int vs = targetm.vectorize.autovectorize_vector_sizes ();
3447 if (vs)
3448 vs = 1 << floor_log2 (vs);
3449 static enum mode_class classes[]
3450 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3451 for (int i = 0; i < 4; i += 2)
3452 FOR_EACH_MODE_IN_CLASS (mode, classes[i])
3454 vmode = targetm.vectorize.preferred_simd_mode (mode);
3455 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3456 continue;
3457 while (vs
3458 && GET_MODE_SIZE (vmode) < vs
3459 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3460 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3462 tree type = lang_hooks.types.type_for_mode (mode, 1);
3463 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3464 continue;
3465 type = build_vector_type (type, GET_MODE_SIZE (vmode)
3466 / GET_MODE_SIZE (mode));
3467 if (TYPE_MODE (type) != vmode)
3468 continue;
3469 if (TYPE_ALIGN_UNIT (type) > al)
3470 al = TYPE_ALIGN_UNIT (type);
3472 return build_int_cst (integer_type_node, al);
3476 /* This structure is part of the interface between lower_rec_simd_input_clauses
3477 and lower_rec_input_clauses. */
3479 struct omplow_simd_context {
3480 tree idx;
3481 tree lane;
3482 vec<tree, va_heap> simt_eargs;
3483 gimple_seq simt_dlist;
3484 int max_vf;
3485 bool is_simt;
3488 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3489 privatization. */
3491 static bool
3492 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3493 omplow_simd_context *sctx, tree &ivar, tree &lvar)
3495 if (sctx->max_vf == 0)
3497 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3498 if (sctx->max_vf > 1)
3500 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3501 OMP_CLAUSE_SAFELEN);
3502 if (c
3503 && (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) != INTEGER_CST
3504 || tree_int_cst_sgn (OMP_CLAUSE_SAFELEN_EXPR (c)) != 1))
3505 sctx->max_vf = 1;
3506 else if (c && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
3507 sctx->max_vf) == -1)
3508 sctx->max_vf = tree_to_shwi (OMP_CLAUSE_SAFELEN_EXPR (c));
3510 if (sctx->max_vf > 1)
3512 sctx->idx = create_tmp_var (unsigned_type_node);
3513 sctx->lane = create_tmp_var (unsigned_type_node);
3516 if (sctx->max_vf == 1)
3517 return false;
3519 if (sctx->is_simt)
3521 if (is_gimple_reg (new_var))
3523 ivar = lvar = new_var;
3524 return true;
3526 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3527 ivar = lvar = create_tmp_var (type);
3528 TREE_ADDRESSABLE (ivar) = 1;
3529 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3530 NULL, DECL_ATTRIBUTES (ivar));
3531 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3532 tree clobber = build_constructor (type, NULL);
3533 TREE_THIS_VOLATILE (clobber) = 1;
3534 gimple *g = gimple_build_assign (ivar, clobber);
3535 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3537 else
3539 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3540 tree avar = create_tmp_var_raw (atype);
3541 if (TREE_ADDRESSABLE (new_var))
3542 TREE_ADDRESSABLE (avar) = 1;
3543 DECL_ATTRIBUTES (avar)
3544 = tree_cons (get_identifier ("omp simd array"), NULL,
3545 DECL_ATTRIBUTES (avar));
3546 gimple_add_tmp_var (avar);
3547 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3548 NULL_TREE, NULL_TREE);
3549 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3550 NULL_TREE, NULL_TREE);
3552 if (DECL_P (new_var))
3554 SET_DECL_VALUE_EXPR (new_var, lvar);
3555 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3557 return true;
3560 /* Helper function of lower_rec_input_clauses. For a reference
3561 in simd reduction, add an underlying variable it will reference. */
3563 static void
3564 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3566 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3567 if (TREE_CONSTANT (z))
3569 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3570 get_name (new_vard));
3571 gimple_add_tmp_var (z);
3572 TREE_ADDRESSABLE (z) = 1;
3573 z = build_fold_addr_expr_loc (loc, z);
3574 gimplify_assign (new_vard, z, ilist);
3578 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3579 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3580 private variables. Initialization statements go in ILIST, while calls
3581 to destructors go in DLIST. */
3583 static void
3584 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3585 omp_context *ctx, struct omp_for_data *fd)
3587 tree c, dtor, copyin_seq, x, ptr;
3588 bool copyin_by_ref = false;
3589 bool lastprivate_firstprivate = false;
3590 bool reduction_omp_orig_ref = false;
3591 int pass;
3592 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3593 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3594 omplow_simd_context sctx = omplow_simd_context ();
3595 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3596 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3597 gimple_seq llist[3] = { };
3599 copyin_seq = NULL;
3600 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3602 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3603 with data sharing clauses referencing variable sized vars. That
3604 is unnecessarily hard to support and very unlikely to result in
3605 vectorized code anyway. */
3606 if (is_simd)
3607 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3608 switch (OMP_CLAUSE_CODE (c))
3610 case OMP_CLAUSE_LINEAR:
3611 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3612 sctx.max_vf = 1;
3613 /* FALLTHRU */
3614 case OMP_CLAUSE_PRIVATE:
3615 case OMP_CLAUSE_FIRSTPRIVATE:
3616 case OMP_CLAUSE_LASTPRIVATE:
3617 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3618 sctx.max_vf = 1;
3619 break;
3620 case OMP_CLAUSE_REDUCTION:
3621 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3622 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3623 sctx.max_vf = 1;
3624 break;
3625 default:
3626 continue;
3629 /* Add a placeholder for simduid. */
3630 if (sctx.is_simt && sctx.max_vf != 1)
3631 sctx.simt_eargs.safe_push (NULL_TREE);
3633 /* Do all the fixed sized types in the first pass, and the variable sized
3634 types in the second pass. This makes sure that the scalar arguments to
3635 the variable sized types are processed before we use them in the
3636 variable sized operations. */
3637 for (pass = 0; pass < 2; ++pass)
3639 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3641 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3642 tree var, new_var;
3643 bool by_ref;
3644 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3646 switch (c_kind)
3648 case OMP_CLAUSE_PRIVATE:
3649 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3650 continue;
3651 break;
3652 case OMP_CLAUSE_SHARED:
3653 /* Ignore shared directives in teams construct. */
3654 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3655 continue;
3656 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3658 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3659 || is_global_var (OMP_CLAUSE_DECL (c)));
3660 continue;
3662 case OMP_CLAUSE_FIRSTPRIVATE:
3663 case OMP_CLAUSE_COPYIN:
3664 break;
3665 case OMP_CLAUSE_LINEAR:
3666 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3667 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3668 lastprivate_firstprivate = true;
3669 break;
3670 case OMP_CLAUSE_REDUCTION:
3671 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3672 reduction_omp_orig_ref = true;
3673 break;
3674 case OMP_CLAUSE__LOOPTEMP_:
3675 /* Handle _looptemp_ clauses only on parallel/task. */
3676 if (fd)
3677 continue;
3678 break;
3679 case OMP_CLAUSE_LASTPRIVATE:
3680 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3682 lastprivate_firstprivate = true;
3683 if (pass != 0 || is_taskloop_ctx (ctx))
3684 continue;
3686 /* Even without corresponding firstprivate, if
3687 decl is Fortran allocatable, it needs outer var
3688 reference. */
3689 else if (pass == 0
3690 && lang_hooks.decls.omp_private_outer_ref
3691 (OMP_CLAUSE_DECL (c)))
3692 lastprivate_firstprivate = true;
3693 break;
3694 case OMP_CLAUSE_ALIGNED:
3695 if (pass == 0)
3696 continue;
3697 var = OMP_CLAUSE_DECL (c);
3698 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3699 && !is_global_var (var))
3701 new_var = maybe_lookup_decl (var, ctx);
3702 if (new_var == NULL_TREE)
3703 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3704 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3705 tree alarg = omp_clause_aligned_alignment (c);
3706 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3707 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3708 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3709 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3710 gimplify_and_add (x, ilist);
3712 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3713 && is_global_var (var))
3715 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3716 new_var = lookup_decl (var, ctx);
3717 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3718 t = build_fold_addr_expr_loc (clause_loc, t);
3719 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3720 tree alarg = omp_clause_aligned_alignment (c);
3721 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3722 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3723 t = fold_convert_loc (clause_loc, ptype, t);
3724 x = create_tmp_var (ptype);
3725 t = build2 (MODIFY_EXPR, ptype, x, t);
3726 gimplify_and_add (t, ilist);
3727 t = build_simple_mem_ref_loc (clause_loc, x);
3728 SET_DECL_VALUE_EXPR (new_var, t);
3729 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3731 continue;
3732 default:
3733 continue;
3736 new_var = var = OMP_CLAUSE_DECL (c);
3737 if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3739 var = TREE_OPERAND (var, 0);
3740 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3741 var = TREE_OPERAND (var, 0);
3742 if (TREE_CODE (var) == INDIRECT_REF
3743 || TREE_CODE (var) == ADDR_EXPR)
3744 var = TREE_OPERAND (var, 0);
3745 if (is_variable_sized (var))
3747 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3748 var = DECL_VALUE_EXPR (var);
3749 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3750 var = TREE_OPERAND (var, 0);
3751 gcc_assert (DECL_P (var));
3753 new_var = var;
3755 if (c_kind != OMP_CLAUSE_COPYIN)
3756 new_var = lookup_decl (var, ctx);
3758 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3760 if (pass != 0)
3761 continue;
3763 /* C/C++ array section reductions. */
3764 else if (c_kind == OMP_CLAUSE_REDUCTION
3765 && var != OMP_CLAUSE_DECL (c))
3767 if (pass == 0)
3768 continue;
3770 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3771 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3772 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3774 tree b = TREE_OPERAND (orig_var, 1);
3775 b = maybe_lookup_decl (b, ctx);
3776 if (b == NULL)
3778 b = TREE_OPERAND (orig_var, 1);
3779 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3781 if (integer_zerop (bias))
3782 bias = b;
3783 else
3785 bias = fold_convert_loc (clause_loc,
3786 TREE_TYPE (b), bias);
3787 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3788 TREE_TYPE (b), b, bias);
3790 orig_var = TREE_OPERAND (orig_var, 0);
3792 if (TREE_CODE (orig_var) == INDIRECT_REF
3793 || TREE_CODE (orig_var) == ADDR_EXPR)
3794 orig_var = TREE_OPERAND (orig_var, 0);
3795 tree d = OMP_CLAUSE_DECL (c);
3796 tree type = TREE_TYPE (d);
3797 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3798 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3799 const char *name = get_name (orig_var);
3800 if (TREE_CONSTANT (v))
3802 x = create_tmp_var_raw (type, name);
3803 gimple_add_tmp_var (x);
3804 TREE_ADDRESSABLE (x) = 1;
3805 x = build_fold_addr_expr_loc (clause_loc, x);
3807 else
3809 tree atmp
3810 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3811 tree t = maybe_lookup_decl (v, ctx);
3812 if (t)
3813 v = t;
3814 else
3815 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3816 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3817 t = fold_build2_loc (clause_loc, PLUS_EXPR,
3818 TREE_TYPE (v), v,
3819 build_int_cst (TREE_TYPE (v), 1));
3820 t = fold_build2_loc (clause_loc, MULT_EXPR,
3821 TREE_TYPE (v), t,
3822 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3823 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3824 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3827 tree ptype = build_pointer_type (TREE_TYPE (type));
3828 x = fold_convert_loc (clause_loc, ptype, x);
3829 tree y = create_tmp_var (ptype, name);
3830 gimplify_assign (y, x, ilist);
3831 x = y;
3832 tree yb = y;
3834 if (!integer_zerop (bias))
3836 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3837 bias);
3838 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3840 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3841 pointer_sized_int_node, yb, bias);
3842 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3843 yb = create_tmp_var (ptype, name);
3844 gimplify_assign (yb, x, ilist);
3845 x = yb;
3848 d = TREE_OPERAND (d, 0);
3849 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3850 d = TREE_OPERAND (d, 0);
3851 if (TREE_CODE (d) == ADDR_EXPR)
3853 if (orig_var != var)
3855 gcc_assert (is_variable_sized (orig_var));
3856 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3858 gimplify_assign (new_var, x, ilist);
3859 tree new_orig_var = lookup_decl (orig_var, ctx);
3860 tree t = build_fold_indirect_ref (new_var);
3861 DECL_IGNORED_P (new_var) = 0;
3862 TREE_THIS_NOTRAP (t);
3863 SET_DECL_VALUE_EXPR (new_orig_var, t);
3864 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3866 else
3868 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3869 build_int_cst (ptype, 0));
3870 SET_DECL_VALUE_EXPR (new_var, x);
3871 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3874 else
3876 gcc_assert (orig_var == var);
3877 if (TREE_CODE (d) == INDIRECT_REF)
3879 x = create_tmp_var (ptype, name);
3880 TREE_ADDRESSABLE (x) = 1;
3881 gimplify_assign (x, yb, ilist);
3882 x = build_fold_addr_expr_loc (clause_loc, x);
3884 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3885 gimplify_assign (new_var, x, ilist);
3887 tree y1 = create_tmp_var (ptype, NULL);
3888 gimplify_assign (y1, y, ilist);
3889 tree i2 = NULL_TREE, y2 = NULL_TREE;
3890 tree body2 = NULL_TREE, end2 = NULL_TREE;
3891 tree y3 = NULL_TREE, y4 = NULL_TREE;
3892 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3894 y2 = create_tmp_var (ptype, NULL);
3895 gimplify_assign (y2, y, ilist);
3896 tree ref = build_outer_var_ref (var, ctx);
3897 /* For ref build_outer_var_ref already performs this. */
3898 if (TREE_CODE (d) == INDIRECT_REF)
3899 gcc_assert (omp_is_reference (var));
3900 else if (TREE_CODE (d) == ADDR_EXPR)
3901 ref = build_fold_addr_expr (ref);
3902 else if (omp_is_reference (var))
3903 ref = build_fold_addr_expr (ref);
3904 ref = fold_convert_loc (clause_loc, ptype, ref);
3905 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3906 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3908 y3 = create_tmp_var (ptype, NULL);
3909 gimplify_assign (y3, unshare_expr (ref), ilist);
3911 if (is_simd)
3913 y4 = create_tmp_var (ptype, NULL);
3914 gimplify_assign (y4, ref, dlist);
3917 tree i = create_tmp_var (TREE_TYPE (v), NULL);
3918 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3919 tree body = create_artificial_label (UNKNOWN_LOCATION);
3920 tree end = create_artificial_label (UNKNOWN_LOCATION);
3921 gimple_seq_add_stmt (ilist, gimple_build_label (body));
3922 if (y2)
3924 i2 = create_tmp_var (TREE_TYPE (v), NULL);
3925 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3926 body2 = create_artificial_label (UNKNOWN_LOCATION);
3927 end2 = create_artificial_label (UNKNOWN_LOCATION);
3928 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3930 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3932 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3933 tree decl_placeholder
3934 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3935 SET_DECL_VALUE_EXPR (decl_placeholder,
3936 build_simple_mem_ref (y1));
3937 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3938 SET_DECL_VALUE_EXPR (placeholder,
3939 y3 ? build_simple_mem_ref (y3)
3940 : error_mark_node);
3941 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3942 x = lang_hooks.decls.omp_clause_default_ctor
3943 (c, build_simple_mem_ref (y1),
3944 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3945 if (x)
3946 gimplify_and_add (x, ilist);
3947 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3949 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3950 lower_omp (&tseq, ctx);
3951 gimple_seq_add_seq (ilist, tseq);
3953 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3954 if (is_simd)
3956 SET_DECL_VALUE_EXPR (decl_placeholder,
3957 build_simple_mem_ref (y2));
3958 SET_DECL_VALUE_EXPR (placeholder,
3959 build_simple_mem_ref (y4));
3960 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3961 lower_omp (&tseq, ctx);
3962 gimple_seq_add_seq (dlist, tseq);
3963 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3965 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3966 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3967 x = lang_hooks.decls.omp_clause_dtor
3968 (c, build_simple_mem_ref (y2));
3969 if (x)
3971 gimple_seq tseq = NULL;
3972 dtor = x;
3973 gimplify_stmt (&dtor, &tseq);
3974 gimple_seq_add_seq (dlist, tseq);
3977 else
3979 x = omp_reduction_init (c, TREE_TYPE (type));
3980 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3982 /* reduction(-:var) sums up the partial results, so it
3983 acts identically to reduction(+:var). */
3984 if (code == MINUS_EXPR)
3985 code = PLUS_EXPR;
3987 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
3988 if (is_simd)
3990 x = build2 (code, TREE_TYPE (type),
3991 build_simple_mem_ref (y4),
3992 build_simple_mem_ref (y2));
3993 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
3996 gimple *g
3997 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
3998 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3999 gimple_seq_add_stmt (ilist, g);
4000 if (y3)
4002 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4003 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4004 gimple_seq_add_stmt (ilist, g);
4006 g = gimple_build_assign (i, PLUS_EXPR, i,
4007 build_int_cst (TREE_TYPE (i), 1));
4008 gimple_seq_add_stmt (ilist, g);
4009 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4010 gimple_seq_add_stmt (ilist, g);
4011 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4012 if (y2)
4014 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4015 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4016 gimple_seq_add_stmt (dlist, g);
4017 if (y4)
4019 g = gimple_build_assign
4020 (y4, POINTER_PLUS_EXPR, y4,
4021 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4022 gimple_seq_add_stmt (dlist, g);
4024 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4025 build_int_cst (TREE_TYPE (i2), 1));
4026 gimple_seq_add_stmt (dlist, g);
4027 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4028 gimple_seq_add_stmt (dlist, g);
4029 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4031 continue;
4033 else if (is_variable_sized (var))
4035 /* For variable sized types, we need to allocate the
4036 actual storage here. Call alloca and store the
4037 result in the pointer decl that we created elsewhere. */
4038 if (pass == 0)
4039 continue;
4041 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4043 gcall *stmt;
4044 tree tmp, atmp;
4046 ptr = DECL_VALUE_EXPR (new_var);
4047 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4048 ptr = TREE_OPERAND (ptr, 0);
4049 gcc_assert (DECL_P (ptr));
4050 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4052 /* void *tmp = __builtin_alloca */
4053 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4054 stmt = gimple_build_call (atmp, 2, x,
4055 size_int (DECL_ALIGN (var)));
4056 tmp = create_tmp_var_raw (ptr_type_node);
4057 gimple_add_tmp_var (tmp);
4058 gimple_call_set_lhs (stmt, tmp);
4060 gimple_seq_add_stmt (ilist, stmt);
4062 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4063 gimplify_assign (ptr, x, ilist);
4066 else if (omp_is_reference (var))
4068 /* For references that are being privatized for Fortran,
4069 allocate new backing storage for the new pointer
4070 variable. This allows us to avoid changing all the
4071 code that expects a pointer to something that expects
4072 a direct variable. */
4073 if (pass == 0)
4074 continue;
4076 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4077 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4079 x = build_receiver_ref (var, false, ctx);
4080 x = build_fold_addr_expr_loc (clause_loc, x);
4082 else if (TREE_CONSTANT (x))
4084 /* For reduction in SIMD loop, defer adding the
4085 initialization of the reference, because if we decide
4086 to use SIMD array for it, the initilization could cause
4087 expansion ICE. */
4088 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4089 x = NULL_TREE;
4090 else
4092 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4093 get_name (var));
4094 gimple_add_tmp_var (x);
4095 TREE_ADDRESSABLE (x) = 1;
4096 x = build_fold_addr_expr_loc (clause_loc, x);
4099 else
4101 tree atmp
4102 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4103 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4104 tree al = size_int (TYPE_ALIGN (rtype));
4105 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4108 if (x)
4110 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4111 gimplify_assign (new_var, x, ilist);
4114 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4116 else if (c_kind == OMP_CLAUSE_REDUCTION
4117 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4119 if (pass == 0)
4120 continue;
4122 else if (pass != 0)
4123 continue;
4125 switch (OMP_CLAUSE_CODE (c))
4127 case OMP_CLAUSE_SHARED:
4128 /* Ignore shared directives in teams construct. */
4129 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4130 continue;
4131 /* Shared global vars are just accessed directly. */
4132 if (is_global_var (new_var))
4133 break;
4134 /* For taskloop firstprivate/lastprivate, represented
4135 as firstprivate and shared clause on the task, new_var
4136 is the firstprivate var. */
4137 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4138 break;
4139 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4140 needs to be delayed until after fixup_child_record_type so
4141 that we get the correct type during the dereference. */
4142 by_ref = use_pointer_for_field (var, ctx);
4143 x = build_receiver_ref (var, by_ref, ctx);
4144 SET_DECL_VALUE_EXPR (new_var, x);
4145 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4147 /* ??? If VAR is not passed by reference, and the variable
4148 hasn't been initialized yet, then we'll get a warning for
4149 the store into the omp_data_s structure. Ideally, we'd be
4150 able to notice this and not store anything at all, but
4151 we're generating code too early. Suppress the warning. */
4152 if (!by_ref)
4153 TREE_NO_WARNING (var) = 1;
4154 break;
4156 case OMP_CLAUSE_LASTPRIVATE:
4157 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4158 break;
4159 /* FALLTHRU */
4161 case OMP_CLAUSE_PRIVATE:
4162 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4163 x = build_outer_var_ref (var, ctx);
4164 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4166 if (is_task_ctx (ctx))
4167 x = build_receiver_ref (var, false, ctx);
4168 else
4169 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4171 else
4172 x = NULL;
4173 do_private:
4174 tree nx;
4175 nx = lang_hooks.decls.omp_clause_default_ctor
4176 (c, unshare_expr (new_var), x);
4177 if (is_simd)
4179 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4180 if ((TREE_ADDRESSABLE (new_var) || nx || y
4181 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4182 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4183 ivar, lvar))
4185 if (nx)
4186 x = lang_hooks.decls.omp_clause_default_ctor
4187 (c, unshare_expr (ivar), x);
4188 if (nx && x)
4189 gimplify_and_add (x, &llist[0]);
4190 if (y)
4192 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4193 if (y)
4195 gimple_seq tseq = NULL;
4197 dtor = y;
4198 gimplify_stmt (&dtor, &tseq);
4199 gimple_seq_add_seq (&llist[1], tseq);
4202 break;
4205 if (nx)
4206 gimplify_and_add (nx, ilist);
4207 /* FALLTHRU */
4209 do_dtor:
4210 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4211 if (x)
4213 gimple_seq tseq = NULL;
4215 dtor = x;
4216 gimplify_stmt (&dtor, &tseq);
4217 gimple_seq_add_seq (dlist, tseq);
4219 break;
4221 case OMP_CLAUSE_LINEAR:
4222 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4223 goto do_firstprivate;
4224 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4225 x = NULL;
4226 else
4227 x = build_outer_var_ref (var, ctx);
4228 goto do_private;
4230 case OMP_CLAUSE_FIRSTPRIVATE:
4231 if (is_task_ctx (ctx))
4233 if (omp_is_reference (var) || is_variable_sized (var))
4234 goto do_dtor;
4235 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4236 ctx))
4237 || use_pointer_for_field (var, NULL))
4239 x = build_receiver_ref (var, false, ctx);
4240 SET_DECL_VALUE_EXPR (new_var, x);
4241 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4242 goto do_dtor;
4245 do_firstprivate:
4246 x = build_outer_var_ref (var, ctx);
4247 if (is_simd)
4249 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4250 && gimple_omp_for_combined_into_p (ctx->stmt))
4252 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4253 tree stept = TREE_TYPE (t);
4254 tree ct = omp_find_clause (clauses,
4255 OMP_CLAUSE__LOOPTEMP_);
4256 gcc_assert (ct);
4257 tree l = OMP_CLAUSE_DECL (ct);
4258 tree n1 = fd->loop.n1;
4259 tree step = fd->loop.step;
4260 tree itype = TREE_TYPE (l);
4261 if (POINTER_TYPE_P (itype))
4262 itype = signed_type_for (itype);
4263 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4264 if (TYPE_UNSIGNED (itype)
4265 && fd->loop.cond_code == GT_EXPR)
4266 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4267 fold_build1 (NEGATE_EXPR, itype, l),
4268 fold_build1 (NEGATE_EXPR,
4269 itype, step));
4270 else
4271 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4272 t = fold_build2 (MULT_EXPR, stept,
4273 fold_convert (stept, l), t);
4275 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4277 x = lang_hooks.decls.omp_clause_linear_ctor
4278 (c, new_var, x, t);
4279 gimplify_and_add (x, ilist);
4280 goto do_dtor;
4283 if (POINTER_TYPE_P (TREE_TYPE (x)))
4284 x = fold_build2 (POINTER_PLUS_EXPR,
4285 TREE_TYPE (x), x, t);
4286 else
4287 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4290 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4291 || TREE_ADDRESSABLE (new_var))
4292 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4293 ivar, lvar))
4295 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4297 tree iv = create_tmp_var (TREE_TYPE (new_var));
4298 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4299 gimplify_and_add (x, ilist);
4300 gimple_stmt_iterator gsi
4301 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4302 gassign *g
4303 = gimple_build_assign (unshare_expr (lvar), iv);
4304 gsi_insert_before_without_update (&gsi, g,
4305 GSI_SAME_STMT);
4306 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4307 enum tree_code code = PLUS_EXPR;
4308 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4309 code = POINTER_PLUS_EXPR;
4310 g = gimple_build_assign (iv, code, iv, t);
4311 gsi_insert_before_without_update (&gsi, g,
4312 GSI_SAME_STMT);
4313 break;
4315 x = lang_hooks.decls.omp_clause_copy_ctor
4316 (c, unshare_expr (ivar), x);
4317 gimplify_and_add (x, &llist[0]);
4318 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4319 if (x)
4321 gimple_seq tseq = NULL;
4323 dtor = x;
4324 gimplify_stmt (&dtor, &tseq);
4325 gimple_seq_add_seq (&llist[1], tseq);
4327 break;
4330 x = lang_hooks.decls.omp_clause_copy_ctor
4331 (c, unshare_expr (new_var), x);
4332 gimplify_and_add (x, ilist);
4333 goto do_dtor;
4335 case OMP_CLAUSE__LOOPTEMP_:
4336 gcc_assert (is_taskreg_ctx (ctx));
4337 x = build_outer_var_ref (var, ctx);
4338 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4339 gimplify_and_add (x, ilist);
4340 break;
4342 case OMP_CLAUSE_COPYIN:
4343 by_ref = use_pointer_for_field (var, NULL);
4344 x = build_receiver_ref (var, by_ref, ctx);
4345 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4346 append_to_statement_list (x, &copyin_seq);
4347 copyin_by_ref |= by_ref;
4348 break;
4350 case OMP_CLAUSE_REDUCTION:
4351 /* OpenACC reductions are initialized using the
4352 GOACC_REDUCTION internal function. */
4353 if (is_gimple_omp_oacc (ctx->stmt))
4354 break;
4355 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4357 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4358 gimple *tseq;
4359 x = build_outer_var_ref (var, ctx);
4361 if (omp_is_reference (var)
4362 && !useless_type_conversion_p (TREE_TYPE (placeholder),
4363 TREE_TYPE (x)))
4364 x = build_fold_addr_expr_loc (clause_loc, x);
4365 SET_DECL_VALUE_EXPR (placeholder, x);
4366 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4367 tree new_vard = new_var;
4368 if (omp_is_reference (var))
4370 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4371 new_vard = TREE_OPERAND (new_var, 0);
4372 gcc_assert (DECL_P (new_vard));
4374 if (is_simd
4375 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4376 ivar, lvar))
4378 if (new_vard == new_var)
4380 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4381 SET_DECL_VALUE_EXPR (new_var, ivar);
4383 else
4385 SET_DECL_VALUE_EXPR (new_vard,
4386 build_fold_addr_expr (ivar));
4387 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4389 x = lang_hooks.decls.omp_clause_default_ctor
4390 (c, unshare_expr (ivar),
4391 build_outer_var_ref (var, ctx));
4392 if (x)
4393 gimplify_and_add (x, &llist[0]);
4394 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4396 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4397 lower_omp (&tseq, ctx);
4398 gimple_seq_add_seq (&llist[0], tseq);
4400 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4401 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4402 lower_omp (&tseq, ctx);
4403 gimple_seq_add_seq (&llist[1], tseq);
4404 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4405 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4406 if (new_vard == new_var)
4407 SET_DECL_VALUE_EXPR (new_var, lvar);
4408 else
4409 SET_DECL_VALUE_EXPR (new_vard,
4410 build_fold_addr_expr (lvar));
4411 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4412 if (x)
4414 tseq = NULL;
4415 dtor = x;
4416 gimplify_stmt (&dtor, &tseq);
4417 gimple_seq_add_seq (&llist[1], tseq);
4419 break;
4421 /* If this is a reference to constant size reduction var
4422 with placeholder, we haven't emitted the initializer
4423 for it because it is undesirable if SIMD arrays are used.
4424 But if they aren't used, we need to emit the deferred
4425 initialization now. */
4426 else if (omp_is_reference (var) && is_simd)
4427 handle_simd_reference (clause_loc, new_vard, ilist);
4428 x = lang_hooks.decls.omp_clause_default_ctor
4429 (c, unshare_expr (new_var),
4430 build_outer_var_ref (var, ctx));
4431 if (x)
4432 gimplify_and_add (x, ilist);
4433 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4435 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4436 lower_omp (&tseq, ctx);
4437 gimple_seq_add_seq (ilist, tseq);
4439 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4440 if (is_simd)
4442 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4443 lower_omp (&tseq, ctx);
4444 gimple_seq_add_seq (dlist, tseq);
4445 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4447 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4448 goto do_dtor;
4450 else
4452 x = omp_reduction_init (c, TREE_TYPE (new_var));
4453 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4454 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4456 /* reduction(-:var) sums up the partial results, so it
4457 acts identically to reduction(+:var). */
4458 if (code == MINUS_EXPR)
4459 code = PLUS_EXPR;
4461 tree new_vard = new_var;
4462 if (is_simd && omp_is_reference (var))
4464 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4465 new_vard = TREE_OPERAND (new_var, 0);
4466 gcc_assert (DECL_P (new_vard));
4468 if (is_simd
4469 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4470 ivar, lvar))
4472 tree ref = build_outer_var_ref (var, ctx);
4474 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4476 if (sctx.is_simt)
4478 if (!simt_lane)
4479 simt_lane = create_tmp_var (unsigned_type_node);
4480 x = build_call_expr_internal_loc
4481 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4482 TREE_TYPE (ivar), 2, ivar, simt_lane);
4483 x = build2 (code, TREE_TYPE (ivar), ivar, x);
4484 gimplify_assign (ivar, x, &llist[2]);
4486 x = build2 (code, TREE_TYPE (ref), ref, ivar);
4487 ref = build_outer_var_ref (var, ctx);
4488 gimplify_assign (ref, x, &llist[1]);
4490 if (new_vard != new_var)
4492 SET_DECL_VALUE_EXPR (new_vard,
4493 build_fold_addr_expr (lvar));
4494 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4497 else
4499 if (omp_is_reference (var) && is_simd)
4500 handle_simd_reference (clause_loc, new_vard, ilist);
4501 gimplify_assign (new_var, x, ilist);
4502 if (is_simd)
4504 tree ref = build_outer_var_ref (var, ctx);
4506 x = build2 (code, TREE_TYPE (ref), ref, new_var);
4507 ref = build_outer_var_ref (var, ctx);
4508 gimplify_assign (ref, x, dlist);
4512 break;
4514 default:
4515 gcc_unreachable ();
4520 if (sctx.max_vf == 1)
4521 sctx.is_simt = false;
4523 if (sctx.lane || sctx.is_simt)
4525 uid = create_tmp_var (ptr_type_node, "simduid");
4526 /* Don't want uninit warnings on simduid, it is always uninitialized,
4527 but we use it not for the value, but for the DECL_UID only. */
4528 TREE_NO_WARNING (uid) = 1;
4529 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4530 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4531 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4532 gimple_omp_for_set_clauses (ctx->stmt, c);
4534 /* Emit calls denoting privatized variables and initializing a pointer to
4535 structure that holds private variables as fields after ompdevlow pass. */
4536 if (sctx.is_simt)
4538 sctx.simt_eargs[0] = uid;
4539 gimple *g
4540 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4541 gimple_call_set_lhs (g, uid);
4542 gimple_seq_add_stmt (ilist, g);
4543 sctx.simt_eargs.release ();
4545 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4546 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4547 gimple_call_set_lhs (g, simtrec);
4548 gimple_seq_add_stmt (ilist, g);
4550 if (sctx.lane)
4552 gimple *g
4553 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4554 gimple_call_set_lhs (g, sctx.lane);
4555 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4556 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4557 g = gimple_build_assign (sctx.lane, INTEGER_CST,
4558 build_int_cst (unsigned_type_node, 0));
4559 gimple_seq_add_stmt (ilist, g);
4560 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4561 if (llist[2])
4563 tree simt_vf = create_tmp_var (unsigned_type_node);
4564 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4565 gimple_call_set_lhs (g, simt_vf);
4566 gimple_seq_add_stmt (dlist, g);
4568 tree t = build_int_cst (unsigned_type_node, 1);
4569 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4570 gimple_seq_add_stmt (dlist, g);
4572 t = build_int_cst (unsigned_type_node, 0);
4573 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4574 gimple_seq_add_stmt (dlist, g);
4576 tree body = create_artificial_label (UNKNOWN_LOCATION);
4577 tree header = create_artificial_label (UNKNOWN_LOCATION);
4578 tree end = create_artificial_label (UNKNOWN_LOCATION);
4579 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4580 gimple_seq_add_stmt (dlist, gimple_build_label (body));
4582 gimple_seq_add_seq (dlist, llist[2]);
4584 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4585 gimple_seq_add_stmt (dlist, g);
4587 gimple_seq_add_stmt (dlist, gimple_build_label (header));
4588 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4589 gimple_seq_add_stmt (dlist, g);
4591 gimple_seq_add_stmt (dlist, gimple_build_label (end));
4593 for (int i = 0; i < 2; i++)
4594 if (llist[i])
4596 tree vf = create_tmp_var (unsigned_type_node);
4597 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4598 gimple_call_set_lhs (g, vf);
4599 gimple_seq *seq = i == 0 ? ilist : dlist;
4600 gimple_seq_add_stmt (seq, g);
4601 tree t = build_int_cst (unsigned_type_node, 0);
4602 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4603 gimple_seq_add_stmt (seq, g);
4604 tree body = create_artificial_label (UNKNOWN_LOCATION);
4605 tree header = create_artificial_label (UNKNOWN_LOCATION);
4606 tree end = create_artificial_label (UNKNOWN_LOCATION);
4607 gimple_seq_add_stmt (seq, gimple_build_goto (header));
4608 gimple_seq_add_stmt (seq, gimple_build_label (body));
4609 gimple_seq_add_seq (seq, llist[i]);
4610 t = build_int_cst (unsigned_type_node, 1);
4611 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
4612 gimple_seq_add_stmt (seq, g);
4613 gimple_seq_add_stmt (seq, gimple_build_label (header));
4614 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
4615 gimple_seq_add_stmt (seq, g);
4616 gimple_seq_add_stmt (seq, gimple_build_label (end));
4619 if (sctx.is_simt)
4621 gimple_seq_add_seq (dlist, sctx.simt_dlist);
4622 gimple *g
4623 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4624 gimple_seq_add_stmt (dlist, g);
4627 /* The copyin sequence is not to be executed by the main thread, since
4628 that would result in self-copies. Perhaps not visible to scalars,
4629 but it certainly is to C++ operator=. */
4630 if (copyin_seq)
4632 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4634 x = build2 (NE_EXPR, boolean_type_node, x,
4635 build_int_cst (TREE_TYPE (x), 0));
4636 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4637 gimplify_and_add (x, ilist);
4640 /* If any copyin variable is passed by reference, we must ensure the
4641 master thread doesn't modify it before it is copied over in all
4642 threads. Similarly for variables in both firstprivate and
4643 lastprivate clauses we need to ensure the lastprivate copying
4644 happens after firstprivate copying in all threads. And similarly
4645 for UDRs if initializer expression refers to omp_orig. */
4646 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4648 /* Don't add any barrier for #pragma omp simd or
4649 #pragma omp distribute. */
4650 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4651 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4652 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4655 /* If max_vf is non-zero, then we can use only a vectorization factor
4656 up to the max_vf we chose. So stick it into the safelen clause. */
4657 if (sctx.max_vf)
4659 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4660 OMP_CLAUSE_SAFELEN);
4661 if (c == NULL_TREE
4662 || (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) == INTEGER_CST
4663 && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
4664 sctx.max_vf) == 1))
4666 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4667 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4668 sctx.max_vf);
4669 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4670 gimple_omp_for_set_clauses (ctx->stmt, c);
4676 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4677 both parallel and workshare constructs. PREDICATE may be NULL if it's
4678 always true. */
4680 static void
4681 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4682 omp_context *ctx)
4684 tree x, c, label = NULL, orig_clauses = clauses;
4685 bool par_clauses = false;
4686 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4688 /* Early exit if there are no lastprivate or linear clauses. */
4689 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4690 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4691 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4692 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4693 break;
4694 if (clauses == NULL)
4696 /* If this was a workshare clause, see if it had been combined
4697 with its parallel. In that case, look for the clauses on the
4698 parallel statement itself. */
4699 if (is_parallel_ctx (ctx))
4700 return;
4702 ctx = ctx->outer;
4703 if (ctx == NULL || !is_parallel_ctx (ctx))
4704 return;
4706 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4707 OMP_CLAUSE_LASTPRIVATE);
4708 if (clauses == NULL)
4709 return;
4710 par_clauses = true;
4713 bool maybe_simt = false;
4714 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4715 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4717 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4718 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4719 if (simduid)
4720 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4723 if (predicate)
4725 gcond *stmt;
4726 tree label_true, arm1, arm2;
4727 enum tree_code pred_code = TREE_CODE (predicate);
4729 label = create_artificial_label (UNKNOWN_LOCATION);
4730 label_true = create_artificial_label (UNKNOWN_LOCATION);
4731 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4733 arm1 = TREE_OPERAND (predicate, 0);
4734 arm2 = TREE_OPERAND (predicate, 1);
4735 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4736 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4738 else
4740 arm1 = predicate;
4741 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4742 arm2 = boolean_false_node;
4743 pred_code = NE_EXPR;
4745 if (maybe_simt)
4747 c = build2 (pred_code, boolean_type_node, arm1, arm2);
4748 c = fold_convert (integer_type_node, c);
4749 simtcond = create_tmp_var (integer_type_node);
4750 gimplify_assign (simtcond, c, stmt_list);
4751 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4752 1, simtcond);
4753 c = create_tmp_var (integer_type_node);
4754 gimple_call_set_lhs (g, c);
4755 gimple_seq_add_stmt (stmt_list, g);
4756 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4757 label_true, label);
4759 else
4760 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4761 gimple_seq_add_stmt (stmt_list, stmt);
4762 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4765 for (c = clauses; c ;)
4767 tree var, new_var;
4768 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4770 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4771 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4772 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4774 var = OMP_CLAUSE_DECL (c);
4775 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4776 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4777 && is_taskloop_ctx (ctx))
4779 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4780 new_var = lookup_decl (var, ctx->outer);
4782 else
4784 new_var = lookup_decl (var, ctx);
4785 /* Avoid uninitialized warnings for lastprivate and
4786 for linear iterators. */
4787 if (predicate
4788 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4789 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4790 TREE_NO_WARNING (new_var) = 1;
4793 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4795 tree val = DECL_VALUE_EXPR (new_var);
4796 if (TREE_CODE (val) == ARRAY_REF
4797 && VAR_P (TREE_OPERAND (val, 0))
4798 && lookup_attribute ("omp simd array",
4799 DECL_ATTRIBUTES (TREE_OPERAND (val,
4800 0))))
4802 if (lastlane == NULL)
4804 lastlane = create_tmp_var (unsigned_type_node);
4805 gcall *g
4806 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4807 2, simduid,
4808 TREE_OPERAND (val, 1));
4809 gimple_call_set_lhs (g, lastlane);
4810 gimple_seq_add_stmt (stmt_list, g);
4812 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4813 TREE_OPERAND (val, 0), lastlane,
4814 NULL_TREE, NULL_TREE);
4817 else if (maybe_simt)
4819 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
4820 ? DECL_VALUE_EXPR (new_var)
4821 : new_var);
4822 if (simtlast == NULL)
4824 simtlast = create_tmp_var (unsigned_type_node);
4825 gcall *g = gimple_build_call_internal
4826 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4827 gimple_call_set_lhs (g, simtlast);
4828 gimple_seq_add_stmt (stmt_list, g);
4830 x = build_call_expr_internal_loc
4831 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4832 TREE_TYPE (val), 2, val, simtlast);
4833 new_var = unshare_expr (new_var);
4834 gimplify_assign (new_var, x, stmt_list);
4835 new_var = unshare_expr (new_var);
4838 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4839 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4841 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4842 gimple_seq_add_seq (stmt_list,
4843 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4844 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4846 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4847 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4849 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4850 gimple_seq_add_seq (stmt_list,
4851 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4852 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4855 x = NULL_TREE;
4856 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4857 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4859 gcc_checking_assert (is_taskloop_ctx (ctx));
4860 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4861 ctx->outer->outer);
4862 if (is_global_var (ovar))
4863 x = ovar;
4865 if (!x)
4866 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4867 if (omp_is_reference (var))
4868 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4869 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4870 gimplify_and_add (x, stmt_list);
4872 c = OMP_CLAUSE_CHAIN (c);
4873 if (c == NULL && !par_clauses)
4875 /* If this was a workshare clause, see if it had been combined
4876 with its parallel. In that case, continue looking for the
4877 clauses also on the parallel statement itself. */
4878 if (is_parallel_ctx (ctx))
4879 break;
4881 ctx = ctx->outer;
4882 if (ctx == NULL || !is_parallel_ctx (ctx))
4883 break;
4885 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4886 OMP_CLAUSE_LASTPRIVATE);
4887 par_clauses = true;
4891 if (label)
4892 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4895 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4896 (which might be a placeholder). INNER is true if this is an inner
4897 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4898 join markers. Generate the before-loop forking sequence in
4899 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4900 general form of these sequences is
4902 GOACC_REDUCTION_SETUP
4903 GOACC_FORK
4904 GOACC_REDUCTION_INIT
4906 GOACC_REDUCTION_FINI
4907 GOACC_JOIN
4908 GOACC_REDUCTION_TEARDOWN. */
4910 static void
4911 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4912 gcall *fork, gcall *join, gimple_seq *fork_seq,
4913 gimple_seq *join_seq, omp_context *ctx)
4915 gimple_seq before_fork = NULL;
4916 gimple_seq after_fork = NULL;
4917 gimple_seq before_join = NULL;
4918 gimple_seq after_join = NULL;
4919 tree init_code = NULL_TREE, fini_code = NULL_TREE,
4920 setup_code = NULL_TREE, teardown_code = NULL_TREE;
4921 unsigned offset = 0;
4923 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4924 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4926 tree orig = OMP_CLAUSE_DECL (c);
4927 tree var = maybe_lookup_decl (orig, ctx);
4928 tree ref_to_res = NULL_TREE;
4929 tree incoming, outgoing, v1, v2, v3;
4930 bool is_private = false;
4932 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4933 if (rcode == MINUS_EXPR)
4934 rcode = PLUS_EXPR;
4935 else if (rcode == TRUTH_ANDIF_EXPR)
4936 rcode = BIT_AND_EXPR;
4937 else if (rcode == TRUTH_ORIF_EXPR)
4938 rcode = BIT_IOR_EXPR;
4939 tree op = build_int_cst (unsigned_type_node, rcode);
4941 if (!var)
4942 var = orig;
4944 incoming = outgoing = var;
4946 if (!inner)
4948 /* See if an outer construct also reduces this variable. */
4949 omp_context *outer = ctx;
4951 while (omp_context *probe = outer->outer)
4953 enum gimple_code type = gimple_code (probe->stmt);
4954 tree cls;
4956 switch (type)
4958 case GIMPLE_OMP_FOR:
4959 cls = gimple_omp_for_clauses (probe->stmt);
4960 break;
4962 case GIMPLE_OMP_TARGET:
4963 if (gimple_omp_target_kind (probe->stmt)
4964 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
4965 goto do_lookup;
4967 cls = gimple_omp_target_clauses (probe->stmt);
4968 break;
4970 default:
4971 goto do_lookup;
4974 outer = probe;
4975 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
4976 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4977 && orig == OMP_CLAUSE_DECL (cls))
4979 incoming = outgoing = lookup_decl (orig, probe);
4980 goto has_outer_reduction;
4982 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4983 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4984 && orig == OMP_CLAUSE_DECL (cls))
4986 is_private = true;
4987 goto do_lookup;
4991 do_lookup:
4992 /* This is the outermost construct with this reduction,
4993 see if there's a mapping for it. */
4994 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
4995 && maybe_lookup_field (orig, outer) && !is_private)
4997 ref_to_res = build_receiver_ref (orig, false, outer);
4998 if (omp_is_reference (orig))
4999 ref_to_res = build_simple_mem_ref (ref_to_res);
5001 tree type = TREE_TYPE (var);
5002 if (POINTER_TYPE_P (type))
5003 type = TREE_TYPE (type);
5005 outgoing = var;
5006 incoming = omp_reduction_init_op (loc, rcode, type);
5008 else
5010 /* Try to look at enclosing contexts for reduction var,
5011 use original if no mapping found. */
5012 tree t = NULL_TREE;
5013 omp_context *c = ctx->outer;
5014 while (c && !t)
5016 t = maybe_lookup_decl (orig, c);
5017 c = c->outer;
5019 incoming = outgoing = (t ? t : orig);
5022 has_outer_reduction:;
5025 if (!ref_to_res)
5026 ref_to_res = integer_zero_node;
5028 if (omp_is_reference (orig))
5030 tree type = TREE_TYPE (var);
5031 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
5033 if (!inner)
5035 tree x = create_tmp_var (TREE_TYPE (type), id);
5036 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
5039 v1 = create_tmp_var (type, id);
5040 v2 = create_tmp_var (type, id);
5041 v3 = create_tmp_var (type, id);
5043 gimplify_assign (v1, var, fork_seq);
5044 gimplify_assign (v2, var, fork_seq);
5045 gimplify_assign (v3, var, fork_seq);
5047 var = build_simple_mem_ref (var);
5048 v1 = build_simple_mem_ref (v1);
5049 v2 = build_simple_mem_ref (v2);
5050 v3 = build_simple_mem_ref (v3);
5051 outgoing = build_simple_mem_ref (outgoing);
5053 if (!TREE_CONSTANT (incoming))
5054 incoming = build_simple_mem_ref (incoming);
5056 else
5057 v1 = v2 = v3 = var;
5059 /* Determine position in reduction buffer, which may be used
5060 by target. */
5061 machine_mode mode = TYPE_MODE (TREE_TYPE (var));
5062 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
5063 offset = (offset + align - 1) & ~(align - 1);
5064 tree off = build_int_cst (sizetype, offset);
5065 offset += GET_MODE_SIZE (mode);
5067 if (!init_code)
5069 init_code = build_int_cst (integer_type_node,
5070 IFN_GOACC_REDUCTION_INIT);
5071 fini_code = build_int_cst (integer_type_node,
5072 IFN_GOACC_REDUCTION_FINI);
5073 setup_code = build_int_cst (integer_type_node,
5074 IFN_GOACC_REDUCTION_SETUP);
5075 teardown_code = build_int_cst (integer_type_node,
5076 IFN_GOACC_REDUCTION_TEARDOWN);
5079 tree setup_call
5080 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5081 TREE_TYPE (var), 6, setup_code,
5082 unshare_expr (ref_to_res),
5083 incoming, level, op, off);
5084 tree init_call
5085 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5086 TREE_TYPE (var), 6, init_code,
5087 unshare_expr (ref_to_res),
5088 v1, level, op, off);
5089 tree fini_call
5090 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5091 TREE_TYPE (var), 6, fini_code,
5092 unshare_expr (ref_to_res),
5093 v2, level, op, off);
5094 tree teardown_call
5095 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5096 TREE_TYPE (var), 6, teardown_code,
5097 ref_to_res, v3, level, op, off);
5099 gimplify_assign (v1, setup_call, &before_fork);
5100 gimplify_assign (v2, init_call, &after_fork);
5101 gimplify_assign (v3, fini_call, &before_join);
5102 gimplify_assign (outgoing, teardown_call, &after_join);
5105 /* Now stitch things together. */
5106 gimple_seq_add_seq (fork_seq, before_fork);
5107 if (fork)
5108 gimple_seq_add_stmt (fork_seq, fork);
5109 gimple_seq_add_seq (fork_seq, after_fork);
5111 gimple_seq_add_seq (join_seq, before_join);
5112 if (join)
5113 gimple_seq_add_stmt (join_seq, join);
5114 gimple_seq_add_seq (join_seq, after_join);
5117 /* Generate code to implement the REDUCTION clauses. */
5119 static void
5120 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5122 gimple_seq sub_seq = NULL;
5123 gimple *stmt;
5124 tree x, c;
5125 int count = 0;
5127 /* OpenACC loop reductions are handled elsewhere. */
5128 if (is_gimple_omp_oacc (ctx->stmt))
5129 return;
5131 /* SIMD reductions are handled in lower_rec_input_clauses. */
5132 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5133 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5134 return;
5136 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5137 update in that case, otherwise use a lock. */
5138 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5139 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5141 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5142 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5144 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5145 count = -1;
5146 break;
5148 count++;
5151 if (count == 0)
5152 return;
5154 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5156 tree var, ref, new_var, orig_var;
5157 enum tree_code code;
5158 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5160 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5161 continue;
5163 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
5164 orig_var = var = OMP_CLAUSE_DECL (c);
5165 if (TREE_CODE (var) == MEM_REF)
5167 var = TREE_OPERAND (var, 0);
5168 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5169 var = TREE_OPERAND (var, 0);
5170 if (TREE_CODE (var) == ADDR_EXPR)
5171 var = TREE_OPERAND (var, 0);
5172 else
5174 /* If this is a pointer or referenced based array
5175 section, the var could be private in the outer
5176 context e.g. on orphaned loop construct. Pretend this
5177 is private variable's outer reference. */
5178 ccode = OMP_CLAUSE_PRIVATE;
5179 if (TREE_CODE (var) == INDIRECT_REF)
5180 var = TREE_OPERAND (var, 0);
5182 orig_var = var;
5183 if (is_variable_sized (var))
5185 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5186 var = DECL_VALUE_EXPR (var);
5187 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5188 var = TREE_OPERAND (var, 0);
5189 gcc_assert (DECL_P (var));
5192 new_var = lookup_decl (var, ctx);
5193 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5194 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5195 ref = build_outer_var_ref (var, ctx, ccode);
5196 code = OMP_CLAUSE_REDUCTION_CODE (c);
5198 /* reduction(-:var) sums up the partial results, so it acts
5199 identically to reduction(+:var). */
5200 if (code == MINUS_EXPR)
5201 code = PLUS_EXPR;
5203 if (count == 1)
5205 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5207 addr = save_expr (addr);
5208 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5209 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5210 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5211 gimplify_and_add (x, stmt_seqp);
5212 return;
5214 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5216 tree d = OMP_CLAUSE_DECL (c);
5217 tree type = TREE_TYPE (d);
5218 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5219 tree i = create_tmp_var (TREE_TYPE (v), NULL);
5220 tree ptype = build_pointer_type (TREE_TYPE (type));
5221 tree bias = TREE_OPERAND (d, 1);
5222 d = TREE_OPERAND (d, 0);
5223 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5225 tree b = TREE_OPERAND (d, 1);
5226 b = maybe_lookup_decl (b, ctx);
5227 if (b == NULL)
5229 b = TREE_OPERAND (d, 1);
5230 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5232 if (integer_zerop (bias))
5233 bias = b;
5234 else
5236 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5237 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5238 TREE_TYPE (b), b, bias);
5240 d = TREE_OPERAND (d, 0);
5242 /* For ref build_outer_var_ref already performs this, so
5243 only new_var needs a dereference. */
5244 if (TREE_CODE (d) == INDIRECT_REF)
5246 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5247 gcc_assert (omp_is_reference (var) && var == orig_var);
5249 else if (TREE_CODE (d) == ADDR_EXPR)
5251 if (orig_var == var)
5253 new_var = build_fold_addr_expr (new_var);
5254 ref = build_fold_addr_expr (ref);
5257 else
5259 gcc_assert (orig_var == var);
5260 if (omp_is_reference (var))
5261 ref = build_fold_addr_expr (ref);
5263 if (DECL_P (v))
5265 tree t = maybe_lookup_decl (v, ctx);
5266 if (t)
5267 v = t;
5268 else
5269 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5270 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5272 if (!integer_zerop (bias))
5274 bias = fold_convert_loc (clause_loc, sizetype, bias);
5275 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5276 TREE_TYPE (new_var), new_var,
5277 unshare_expr (bias));
5278 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5279 TREE_TYPE (ref), ref, bias);
5281 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5282 ref = fold_convert_loc (clause_loc, ptype, ref);
5283 tree m = create_tmp_var (ptype, NULL);
5284 gimplify_assign (m, new_var, stmt_seqp);
5285 new_var = m;
5286 m = create_tmp_var (ptype, NULL);
5287 gimplify_assign (m, ref, stmt_seqp);
5288 ref = m;
5289 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5290 tree body = create_artificial_label (UNKNOWN_LOCATION);
5291 tree end = create_artificial_label (UNKNOWN_LOCATION);
5292 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5293 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5294 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5295 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5297 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5298 tree decl_placeholder
5299 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5300 SET_DECL_VALUE_EXPR (placeholder, out);
5301 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5302 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5303 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5304 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5305 gimple_seq_add_seq (&sub_seq,
5306 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5307 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5308 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5309 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5311 else
5313 x = build2 (code, TREE_TYPE (out), out, priv);
5314 out = unshare_expr (out);
5315 gimplify_assign (out, x, &sub_seq);
5317 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5318 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5319 gimple_seq_add_stmt (&sub_seq, g);
5320 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5321 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5322 gimple_seq_add_stmt (&sub_seq, g);
5323 g = gimple_build_assign (i, PLUS_EXPR, i,
5324 build_int_cst (TREE_TYPE (i), 1));
5325 gimple_seq_add_stmt (&sub_seq, g);
5326 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5327 gimple_seq_add_stmt (&sub_seq, g);
5328 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5330 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5332 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5334 if (omp_is_reference (var)
5335 && !useless_type_conversion_p (TREE_TYPE (placeholder),
5336 TREE_TYPE (ref)))
5337 ref = build_fold_addr_expr_loc (clause_loc, ref);
5338 SET_DECL_VALUE_EXPR (placeholder, ref);
5339 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5340 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5341 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5342 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5343 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5345 else
5347 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5348 ref = build_outer_var_ref (var, ctx);
5349 gimplify_assign (ref, x, &sub_seq);
5353 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5355 gimple_seq_add_stmt (stmt_seqp, stmt);
5357 gimple_seq_add_seq (stmt_seqp, sub_seq);
5359 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5361 gimple_seq_add_stmt (stmt_seqp, stmt);
5365 /* Generate code to implement the COPYPRIVATE clauses. */
5367 static void
5368 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5369 omp_context *ctx)
5371 tree c;
5373 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5375 tree var, new_var, ref, x;
5376 bool by_ref;
5377 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5379 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5380 continue;
5382 var = OMP_CLAUSE_DECL (c);
5383 by_ref = use_pointer_for_field (var, NULL);
5385 ref = build_sender_ref (var, ctx);
5386 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5387 if (by_ref)
5389 x = build_fold_addr_expr_loc (clause_loc, new_var);
5390 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5392 gimplify_assign (ref, x, slist);
5394 ref = build_receiver_ref (var, false, ctx);
5395 if (by_ref)
5397 ref = fold_convert_loc (clause_loc,
5398 build_pointer_type (TREE_TYPE (new_var)),
5399 ref);
5400 ref = build_fold_indirect_ref_loc (clause_loc, ref);
5402 if (omp_is_reference (var))
5404 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5405 ref = build_simple_mem_ref_loc (clause_loc, ref);
5406 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5408 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5409 gimplify_and_add (x, rlist);
5414 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5415 and REDUCTION from the sender (aka parent) side. */
5417 static void
5418 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5419 omp_context *ctx)
5421 tree c, t;
5422 int ignored_looptemp = 0;
5423 bool is_taskloop = false;
5425 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5426 by GOMP_taskloop. */
5427 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5429 ignored_looptemp = 2;
5430 is_taskloop = true;
5433 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5435 tree val, ref, x, var;
5436 bool by_ref, do_in = false, do_out = false;
5437 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5439 switch (OMP_CLAUSE_CODE (c))
5441 case OMP_CLAUSE_PRIVATE:
5442 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5443 break;
5444 continue;
5445 case OMP_CLAUSE_FIRSTPRIVATE:
5446 case OMP_CLAUSE_COPYIN:
5447 case OMP_CLAUSE_LASTPRIVATE:
5448 case OMP_CLAUSE_REDUCTION:
5449 break;
5450 case OMP_CLAUSE_SHARED:
5451 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5452 break;
5453 continue;
5454 case OMP_CLAUSE__LOOPTEMP_:
5455 if (ignored_looptemp)
5457 ignored_looptemp--;
5458 continue;
5460 break;
5461 default:
5462 continue;
5465 val = OMP_CLAUSE_DECL (c);
5466 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5467 && TREE_CODE (val) == MEM_REF)
5469 val = TREE_OPERAND (val, 0);
5470 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5471 val = TREE_OPERAND (val, 0);
5472 if (TREE_CODE (val) == INDIRECT_REF
5473 || TREE_CODE (val) == ADDR_EXPR)
5474 val = TREE_OPERAND (val, 0);
5475 if (is_variable_sized (val))
5476 continue;
5479 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5480 outer taskloop region. */
5481 omp_context *ctx_for_o = ctx;
5482 if (is_taskloop
5483 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5484 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5485 ctx_for_o = ctx->outer;
5487 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5489 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5490 && is_global_var (var))
5491 continue;
5493 t = omp_member_access_dummy_var (var);
5494 if (t)
5496 var = DECL_VALUE_EXPR (var);
5497 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5498 if (o != t)
5499 var = unshare_and_remap (var, t, o);
5500 else
5501 var = unshare_expr (var);
5504 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5506 /* Handle taskloop firstprivate/lastprivate, where the
5507 lastprivate on GIMPLE_OMP_TASK is represented as
5508 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5509 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5510 x = omp_build_component_ref (ctx->sender_decl, f);
5511 if (use_pointer_for_field (val, ctx))
5512 var = build_fold_addr_expr (var);
5513 gimplify_assign (x, var, ilist);
5514 DECL_ABSTRACT_ORIGIN (f) = NULL;
5515 continue;
5518 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5519 || val == OMP_CLAUSE_DECL (c))
5520 && is_variable_sized (val))
5521 continue;
5522 by_ref = use_pointer_for_field (val, NULL);
5524 switch (OMP_CLAUSE_CODE (c))
5526 case OMP_CLAUSE_FIRSTPRIVATE:
5527 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5528 && !by_ref
5529 && is_task_ctx (ctx))
5530 TREE_NO_WARNING (var) = 1;
5531 do_in = true;
5532 break;
5534 case OMP_CLAUSE_PRIVATE:
5535 case OMP_CLAUSE_COPYIN:
5536 case OMP_CLAUSE__LOOPTEMP_:
5537 do_in = true;
5538 break;
5540 case OMP_CLAUSE_LASTPRIVATE:
5541 if (by_ref || omp_is_reference (val))
5543 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5544 continue;
5545 do_in = true;
5547 else
5549 do_out = true;
5550 if (lang_hooks.decls.omp_private_outer_ref (val))
5551 do_in = true;
5553 break;
5555 case OMP_CLAUSE_REDUCTION:
5556 do_in = true;
5557 if (val == OMP_CLAUSE_DECL (c))
5558 do_out = !(by_ref || omp_is_reference (val));
5559 else
5560 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5561 break;
5563 default:
5564 gcc_unreachable ();
5567 if (do_in)
5569 ref = build_sender_ref (val, ctx);
5570 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5571 gimplify_assign (ref, x, ilist);
5572 if (is_task_ctx (ctx))
5573 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5576 if (do_out)
5578 ref = build_sender_ref (val, ctx);
5579 gimplify_assign (var, ref, olist);
5584 /* Generate code to implement SHARED from the sender (aka parent)
5585 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5586 list things that got automatically shared. */
5588 static void
5589 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5591 tree var, ovar, nvar, t, f, x, record_type;
5593 if (ctx->record_type == NULL)
5594 return;
5596 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5597 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5599 ovar = DECL_ABSTRACT_ORIGIN (f);
5600 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5601 continue;
5603 nvar = maybe_lookup_decl (ovar, ctx);
5604 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5605 continue;
5607 /* If CTX is a nested parallel directive. Find the immediately
5608 enclosing parallel or workshare construct that contains a
5609 mapping for OVAR. */
5610 var = lookup_decl_in_outer_ctx (ovar, ctx);
5612 t = omp_member_access_dummy_var (var);
5613 if (t)
5615 var = DECL_VALUE_EXPR (var);
5616 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5617 if (o != t)
5618 var = unshare_and_remap (var, t, o);
5619 else
5620 var = unshare_expr (var);
5623 if (use_pointer_for_field (ovar, ctx))
5625 x = build_sender_ref (ovar, ctx);
5626 var = build_fold_addr_expr (var);
5627 gimplify_assign (x, var, ilist);
5629 else
5631 x = build_sender_ref (ovar, ctx);
5632 gimplify_assign (x, var, ilist);
5634 if (!TREE_READONLY (var)
5635 /* We don't need to receive a new reference to a result
5636 or parm decl. In fact we may not store to it as we will
5637 invalidate any pending RSO and generate wrong gimple
5638 during inlining. */
5639 && !((TREE_CODE (var) == RESULT_DECL
5640 || TREE_CODE (var) == PARM_DECL)
5641 && DECL_BY_REFERENCE (var)))
5643 x = build_sender_ref (ovar, ctx);
5644 gimplify_assign (var, x, olist);
5650 /* Emit an OpenACC head marker call, encapulating the partitioning and
5651 other information that must be processed by the target compiler.
5652 Return the maximum number of dimensions the associated loop might
5653 be partitioned over. */
5655 static unsigned
5656 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5657 gimple_seq *seq, omp_context *ctx)
5659 unsigned levels = 0;
5660 unsigned tag = 0;
5661 tree gang_static = NULL_TREE;
5662 auto_vec<tree, 5> args;
5664 args.quick_push (build_int_cst
5665 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5666 args.quick_push (ddvar);
5667 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5669 switch (OMP_CLAUSE_CODE (c))
5671 case OMP_CLAUSE_GANG:
5672 tag |= OLF_DIM_GANG;
5673 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5674 /* static:* is represented by -1, and we can ignore it, as
5675 scheduling is always static. */
5676 if (gang_static && integer_minus_onep (gang_static))
5677 gang_static = NULL_TREE;
5678 levels++;
5679 break;
5681 case OMP_CLAUSE_WORKER:
5682 tag |= OLF_DIM_WORKER;
5683 levels++;
5684 break;
5686 case OMP_CLAUSE_VECTOR:
5687 tag |= OLF_DIM_VECTOR;
5688 levels++;
5689 break;
5691 case OMP_CLAUSE_SEQ:
5692 tag |= OLF_SEQ;
5693 break;
5695 case OMP_CLAUSE_AUTO:
5696 tag |= OLF_AUTO;
5697 break;
5699 case OMP_CLAUSE_INDEPENDENT:
5700 tag |= OLF_INDEPENDENT;
5701 break;
5703 case OMP_CLAUSE_TILE:
5704 tag |= OLF_TILE;
5705 break;
5707 default:
5708 continue;
5712 if (gang_static)
5714 if (DECL_P (gang_static))
5715 gang_static = build_outer_var_ref (gang_static, ctx);
5716 tag |= OLF_GANG_STATIC;
5719 /* In a parallel region, loops are implicitly INDEPENDENT. */
5720 omp_context *tgt = enclosing_target_ctx (ctx);
5721 if (!tgt || is_oacc_parallel (tgt))
5722 tag |= OLF_INDEPENDENT;
5724 if (tag & OLF_TILE)
5725 /* Tiling could use all 3 levels. */
5726 levels = 3;
5727 else
5729 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5730 Ensure at least one level, or 2 for possible auto
5731 partitioning */
5732 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5733 << OLF_DIM_BASE) | OLF_SEQ));
5735 if (levels < 1u + maybe_auto)
5736 levels = 1u + maybe_auto;
5739 args.quick_push (build_int_cst (integer_type_node, levels));
5740 args.quick_push (build_int_cst (integer_type_node, tag));
5741 if (gang_static)
5742 args.quick_push (gang_static);
5744 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5745 gimple_set_location (call, loc);
5746 gimple_set_lhs (call, ddvar);
5747 gimple_seq_add_stmt (seq, call);
5749 return levels;
5752 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5753 partitioning level of the enclosed region. */
5755 static void
5756 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5757 tree tofollow, gimple_seq *seq)
5759 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5760 : IFN_UNIQUE_OACC_TAIL_MARK);
5761 tree marker = build_int_cst (integer_type_node, marker_kind);
5762 int nargs = 2 + (tofollow != NULL_TREE);
5763 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5764 marker, ddvar, tofollow);
5765 gimple_set_location (call, loc);
5766 gimple_set_lhs (call, ddvar);
5767 gimple_seq_add_stmt (seq, call);
5770 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5771 the loop clauses, from which we extract reductions. Initialize
5772 HEAD and TAIL. */
5774 static void
5775 lower_oacc_head_tail (location_t loc, tree clauses,
5776 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5778 bool inner = false;
5779 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5780 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5782 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5783 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5784 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5786 gcc_assert (count);
5787 for (unsigned done = 1; count; count--, done++)
5789 gimple_seq fork_seq = NULL;
5790 gimple_seq join_seq = NULL;
5792 tree place = build_int_cst (integer_type_node, -1);
5793 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5794 fork_kind, ddvar, place);
5795 gimple_set_location (fork, loc);
5796 gimple_set_lhs (fork, ddvar);
5798 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5799 join_kind, ddvar, place);
5800 gimple_set_location (join, loc);
5801 gimple_set_lhs (join, ddvar);
5803 /* Mark the beginning of this level sequence. */
5804 if (inner)
5805 lower_oacc_loop_marker (loc, ddvar, true,
5806 build_int_cst (integer_type_node, count),
5807 &fork_seq);
5808 lower_oacc_loop_marker (loc, ddvar, false,
5809 build_int_cst (integer_type_node, done),
5810 &join_seq);
5812 lower_oacc_reductions (loc, clauses, place, inner,
5813 fork, join, &fork_seq, &join_seq, ctx);
5815 /* Append this level to head. */
5816 gimple_seq_add_seq (head, fork_seq);
5817 /* Prepend it to tail. */
5818 gimple_seq_add_seq (&join_seq, *tail);
5819 *tail = join_seq;
5821 inner = true;
5824 /* Mark the end of the sequence. */
5825 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5826 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5829 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5830 catch handler and return it. This prevents programs from violating the
5831 structured block semantics with throws. */
5833 static gimple_seq
5834 maybe_catch_exception (gimple_seq body)
5836 gimple *g;
5837 tree decl;
5839 if (!flag_exceptions)
5840 return body;
5842 if (lang_hooks.eh_protect_cleanup_actions != NULL)
5843 decl = lang_hooks.eh_protect_cleanup_actions ();
5844 else
5845 decl = builtin_decl_explicit (BUILT_IN_TRAP);
5847 g = gimple_build_eh_must_not_throw (decl);
5848 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5849 GIMPLE_TRY_CATCH);
5851 return gimple_seq_alloc_with_stmt (g);
5855 /* Routines to lower OMP directives into OMP-GIMPLE. */
5857 /* If ctx is a worksharing context inside of a cancellable parallel
5858 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5859 and conditional branch to parallel's cancel_label to handle
5860 cancellation in the implicit barrier. */
5862 static void
5863 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5865 gimple *omp_return = gimple_seq_last_stmt (*body);
5866 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5867 if (gimple_omp_return_nowait_p (omp_return))
5868 return;
5869 if (ctx->outer
5870 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5871 && ctx->outer->cancellable)
5873 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5874 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5875 tree lhs = create_tmp_var (c_bool_type);
5876 gimple_omp_return_set_lhs (omp_return, lhs);
5877 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5878 gimple *g = gimple_build_cond (NE_EXPR, lhs,
5879 fold_convert (c_bool_type,
5880 boolean_false_node),
5881 ctx->outer->cancel_label, fallthru_label);
5882 gimple_seq_add_stmt (body, g);
5883 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5887 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5888 CTX is the enclosing OMP context for the current statement. */
5890 static void
5891 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5893 tree block, control;
5894 gimple_stmt_iterator tgsi;
5895 gomp_sections *stmt;
5896 gimple *t;
5897 gbind *new_stmt, *bind;
5898 gimple_seq ilist, dlist, olist, new_body;
5900 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5902 push_gimplify_context ();
5904 dlist = NULL;
5905 ilist = NULL;
5906 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5907 &ilist, &dlist, ctx, NULL);
5909 new_body = gimple_omp_body (stmt);
5910 gimple_omp_set_body (stmt, NULL);
5911 tgsi = gsi_start (new_body);
5912 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5914 omp_context *sctx;
5915 gimple *sec_start;
5917 sec_start = gsi_stmt (tgsi);
5918 sctx = maybe_lookup_ctx (sec_start);
5919 gcc_assert (sctx);
5921 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5922 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5923 GSI_CONTINUE_LINKING);
5924 gimple_omp_set_body (sec_start, NULL);
5926 if (gsi_one_before_end_p (tgsi))
5928 gimple_seq l = NULL;
5929 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5930 &l, ctx);
5931 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5932 gimple_omp_section_set_last (sec_start);
5935 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5936 GSI_CONTINUE_LINKING);
5939 block = make_node (BLOCK);
5940 bind = gimple_build_bind (NULL, new_body, block);
5942 olist = NULL;
5943 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
5945 block = make_node (BLOCK);
5946 new_stmt = gimple_build_bind (NULL, NULL, block);
5947 gsi_replace (gsi_p, new_stmt, true);
5949 pop_gimplify_context (new_stmt);
5950 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5951 BLOCK_VARS (block) = gimple_bind_vars (bind);
5952 if (BLOCK_VARS (block))
5953 TREE_USED (block) = 1;
5955 new_body = NULL;
5956 gimple_seq_add_seq (&new_body, ilist);
5957 gimple_seq_add_stmt (&new_body, stmt);
5958 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5959 gimple_seq_add_stmt (&new_body, bind);
5961 control = create_tmp_var (unsigned_type_node, ".section");
5962 t = gimple_build_omp_continue (control, control);
5963 gimple_omp_sections_set_control (stmt, control);
5964 gimple_seq_add_stmt (&new_body, t);
5966 gimple_seq_add_seq (&new_body, olist);
5967 if (ctx->cancellable)
5968 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5969 gimple_seq_add_seq (&new_body, dlist);
5971 new_body = maybe_catch_exception (new_body);
5973 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5974 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5975 t = gimple_build_omp_return (nowait);
5976 gimple_seq_add_stmt (&new_body, t);
5977 maybe_add_implicit_barrier_cancel (ctx, &new_body);
5979 gimple_bind_set_body (new_stmt, new_body);
5983 /* A subroutine of lower_omp_single. Expand the simple form of
5984 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5986 if (GOMP_single_start ())
5987 BODY;
5988 [ GOMP_barrier (); ] -> unless 'nowait' is present.
5990 FIXME. It may be better to delay expanding the logic of this until
5991 pass_expand_omp. The expanded logic may make the job more difficult
5992 to a synchronization analysis pass. */
5994 static void
5995 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
5997 location_t loc = gimple_location (single_stmt);
5998 tree tlabel = create_artificial_label (loc);
5999 tree flabel = create_artificial_label (loc);
6000 gimple *call, *cond;
6001 tree lhs, decl;
6003 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
6004 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
6005 call = gimple_build_call (decl, 0);
6006 gimple_call_set_lhs (call, lhs);
6007 gimple_seq_add_stmt (pre_p, call);
6009 cond = gimple_build_cond (EQ_EXPR, lhs,
6010 fold_convert_loc (loc, TREE_TYPE (lhs),
6011 boolean_true_node),
6012 tlabel, flabel);
6013 gimple_seq_add_stmt (pre_p, cond);
6014 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
6015 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6016 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
6020 /* A subroutine of lower_omp_single. Expand the simple form of
6021 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
6023 #pragma omp single copyprivate (a, b, c)
6025 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
6028 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6030 BODY;
6031 copyout.a = a;
6032 copyout.b = b;
6033 copyout.c = c;
6034 GOMP_single_copy_end (&copyout);
6036 else
6038 a = copyout_p->a;
6039 b = copyout_p->b;
6040 c = copyout_p->c;
6042 GOMP_barrier ();
6045 FIXME. It may be better to delay expanding the logic of this until
6046 pass_expand_omp. The expanded logic may make the job more difficult
6047 to a synchronization analysis pass. */
6049 static void
6050 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
6051 omp_context *ctx)
6053 tree ptr_type, t, l0, l1, l2, bfn_decl;
6054 gimple_seq copyin_seq;
6055 location_t loc = gimple_location (single_stmt);
6057 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
6059 ptr_type = build_pointer_type (ctx->record_type);
6060 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
6062 l0 = create_artificial_label (loc);
6063 l1 = create_artificial_label (loc);
6064 l2 = create_artificial_label (loc);
6066 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6067 t = build_call_expr_loc (loc, bfn_decl, 0);
6068 t = fold_convert_loc (loc, ptr_type, t);
6069 gimplify_assign (ctx->receiver_decl, t, pre_p);
6071 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6072 build_int_cst (ptr_type, 0));
6073 t = build3 (COND_EXPR, void_type_node, t,
6074 build_and_jump (&l0), build_and_jump (&l1));
6075 gimplify_and_add (t, pre_p);
6077 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6079 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6081 copyin_seq = NULL;
6082 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6083 &copyin_seq, ctx);
6085 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6086 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6087 t = build_call_expr_loc (loc, bfn_decl, 1, t);
6088 gimplify_and_add (t, pre_p);
6090 t = build_and_jump (&l2);
6091 gimplify_and_add (t, pre_p);
6093 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6095 gimple_seq_add_seq (pre_p, copyin_seq);
6097 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6101 /* Expand code for an OpenMP single directive. */
6103 static void
6104 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6106 tree block;
6107 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6108 gbind *bind;
6109 gimple_seq bind_body, bind_body_tail = NULL, dlist;
6111 push_gimplify_context ();
6113 block = make_node (BLOCK);
6114 bind = gimple_build_bind (NULL, NULL, block);
6115 gsi_replace (gsi_p, bind, true);
6116 bind_body = NULL;
6117 dlist = NULL;
6118 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6119 &bind_body, &dlist, ctx, NULL);
6120 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6122 gimple_seq_add_stmt (&bind_body, single_stmt);
6124 if (ctx->record_type)
6125 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6126 else
6127 lower_omp_single_simple (single_stmt, &bind_body);
6129 gimple_omp_set_body (single_stmt, NULL);
6131 gimple_seq_add_seq (&bind_body, dlist);
6133 bind_body = maybe_catch_exception (bind_body);
6135 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6136 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6137 gimple *g = gimple_build_omp_return (nowait);
6138 gimple_seq_add_stmt (&bind_body_tail, g);
6139 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6140 if (ctx->record_type)
6142 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6143 tree clobber = build_constructor (ctx->record_type, NULL);
6144 TREE_THIS_VOLATILE (clobber) = 1;
6145 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6146 clobber), GSI_SAME_STMT);
6148 gimple_seq_add_seq (&bind_body, bind_body_tail);
6149 gimple_bind_set_body (bind, bind_body);
6151 pop_gimplify_context (bind);
6153 gimple_bind_append_vars (bind, ctx->block_vars);
6154 BLOCK_VARS (block) = ctx->block_vars;
6155 if (BLOCK_VARS (block))
6156 TREE_USED (block) = 1;
6160 /* Expand code for an OpenMP master directive. */
6162 static void
6163 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6165 tree block, lab = NULL, x, bfn_decl;
6166 gimple *stmt = gsi_stmt (*gsi_p);
6167 gbind *bind;
6168 location_t loc = gimple_location (stmt);
6169 gimple_seq tseq;
6171 push_gimplify_context ();
6173 block = make_node (BLOCK);
6174 bind = gimple_build_bind (NULL, NULL, block);
6175 gsi_replace (gsi_p, bind, true);
6176 gimple_bind_add_stmt (bind, stmt);
6178 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6179 x = build_call_expr_loc (loc, bfn_decl, 0);
6180 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6181 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6182 tseq = NULL;
6183 gimplify_and_add (x, &tseq);
6184 gimple_bind_add_seq (bind, tseq);
6186 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6187 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6188 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6189 gimple_omp_set_body (stmt, NULL);
6191 gimple_bind_add_stmt (bind, gimple_build_label (lab));
6193 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6195 pop_gimplify_context (bind);
6197 gimple_bind_append_vars (bind, ctx->block_vars);
6198 BLOCK_VARS (block) = ctx->block_vars;
6202 /* Expand code for an OpenMP taskgroup directive. */
6204 static void
6205 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6207 gimple *stmt = gsi_stmt (*gsi_p);
6208 gcall *x;
6209 gbind *bind;
6210 tree block = make_node (BLOCK);
6212 bind = gimple_build_bind (NULL, NULL, block);
6213 gsi_replace (gsi_p, bind, true);
6214 gimple_bind_add_stmt (bind, stmt);
6216 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6218 gimple_bind_add_stmt (bind, x);
6220 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6221 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6222 gimple_omp_set_body (stmt, NULL);
6224 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6226 gimple_bind_append_vars (bind, ctx->block_vars);
6227 BLOCK_VARS (block) = ctx->block_vars;
6231 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6233 static void
6234 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6235 omp_context *ctx)
6237 struct omp_for_data fd;
6238 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6239 return;
6241 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6242 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6243 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6244 if (!fd.ordered)
6245 return;
6247 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6248 tree c = gimple_omp_ordered_clauses (ord_stmt);
6249 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6250 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6252 /* Merge depend clauses from multiple adjacent
6253 #pragma omp ordered depend(sink:...) constructs
6254 into one #pragma omp ordered depend(sink:...), so that
6255 we can optimize them together. */
6256 gimple_stmt_iterator gsi = *gsi_p;
6257 gsi_next (&gsi);
6258 while (!gsi_end_p (gsi))
6260 gimple *stmt = gsi_stmt (gsi);
6261 if (is_gimple_debug (stmt)
6262 || gimple_code (stmt) == GIMPLE_NOP)
6264 gsi_next (&gsi);
6265 continue;
6267 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6268 break;
6269 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6270 c = gimple_omp_ordered_clauses (ord_stmt2);
6271 if (c == NULL_TREE
6272 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6273 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6274 break;
6275 while (*list_p)
6276 list_p = &OMP_CLAUSE_CHAIN (*list_p);
6277 *list_p = c;
6278 gsi_remove (&gsi, true);
6282 /* Canonicalize sink dependence clauses into one folded clause if
6283 possible.
6285 The basic algorithm is to create a sink vector whose first
6286 element is the GCD of all the first elements, and whose remaining
6287 elements are the minimum of the subsequent columns.
6289 We ignore dependence vectors whose first element is zero because
6290 such dependencies are known to be executed by the same thread.
6292 We take into account the direction of the loop, so a minimum
6293 becomes a maximum if the loop is iterating forwards. We also
6294 ignore sink clauses where the loop direction is unknown, or where
6295 the offsets are clearly invalid because they are not a multiple
6296 of the loop increment.
6298 For example:
6300 #pragma omp for ordered(2)
6301 for (i=0; i < N; ++i)
6302 for (j=0; j < M; ++j)
6304 #pragma omp ordered \
6305 depend(sink:i-8,j-2) \
6306 depend(sink:i,j-1) \ // Completely ignored because i+0.
6307 depend(sink:i-4,j-3) \
6308 depend(sink:i-6,j-4)
6309 #pragma omp ordered depend(source)
6312 Folded clause is:
6314 depend(sink:-gcd(8,4,6),-min(2,3,4))
6315 -or-
6316 depend(sink:-2,-2)
6319 /* FIXME: Computing GCD's where the first element is zero is
6320 non-trivial in the presence of collapsed loops. Do this later. */
6321 if (fd.collapse > 1)
6322 return;
6324 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6326 /* wide_int is not a POD so it must be default-constructed. */
6327 for (unsigned i = 0; i != 2 * len - 1; ++i)
6328 new (static_cast<void*>(folded_deps + i)) wide_int ();
6330 tree folded_dep = NULL_TREE;
6331 /* TRUE if the first dimension's offset is negative. */
6332 bool neg_offset_p = false;
6334 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6335 unsigned int i;
6336 while ((c = *list_p) != NULL)
6338 bool remove = false;
6340 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6341 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6342 goto next_ordered_clause;
6344 tree vec;
6345 for (vec = OMP_CLAUSE_DECL (c), i = 0;
6346 vec && TREE_CODE (vec) == TREE_LIST;
6347 vec = TREE_CHAIN (vec), ++i)
6349 gcc_assert (i < len);
6351 /* omp_extract_for_data has canonicalized the condition. */
6352 gcc_assert (fd.loops[i].cond_code == LT_EXPR
6353 || fd.loops[i].cond_code == GT_EXPR);
6354 bool forward = fd.loops[i].cond_code == LT_EXPR;
6355 bool maybe_lexically_later = true;
6357 /* While the committee makes up its mind, bail if we have any
6358 non-constant steps. */
6359 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6360 goto lower_omp_ordered_ret;
6362 tree itype = TREE_TYPE (TREE_VALUE (vec));
6363 if (POINTER_TYPE_P (itype))
6364 itype = sizetype;
6365 wide_int offset = wide_int::from (TREE_PURPOSE (vec),
6366 TYPE_PRECISION (itype),
6367 TYPE_SIGN (itype));
6369 /* Ignore invalid offsets that are not multiples of the step. */
6370 if (!wi::multiple_of_p
6371 (wi::abs (offset), wi::abs ((wide_int) fd.loops[i].step),
6372 UNSIGNED))
6374 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6375 "ignoring sink clause with offset that is not "
6376 "a multiple of the loop step");
6377 remove = true;
6378 goto next_ordered_clause;
6381 /* Calculate the first dimension. The first dimension of
6382 the folded dependency vector is the GCD of the first
6383 elements, while ignoring any first elements whose offset
6384 is 0. */
6385 if (i == 0)
6387 /* Ignore dependence vectors whose first dimension is 0. */
6388 if (offset == 0)
6390 remove = true;
6391 goto next_ordered_clause;
6393 else
6395 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6397 error_at (OMP_CLAUSE_LOCATION (c),
6398 "first offset must be in opposite direction "
6399 "of loop iterations");
6400 goto lower_omp_ordered_ret;
6402 if (forward)
6403 offset = -offset;
6404 neg_offset_p = forward;
6405 /* Initialize the first time around. */
6406 if (folded_dep == NULL_TREE)
6408 folded_dep = c;
6409 folded_deps[0] = offset;
6411 else
6412 folded_deps[0] = wi::gcd (folded_deps[0],
6413 offset, UNSIGNED);
6416 /* Calculate minimum for the remaining dimensions. */
6417 else
6419 folded_deps[len + i - 1] = offset;
6420 if (folded_dep == c)
6421 folded_deps[i] = offset;
6422 else if (maybe_lexically_later
6423 && !wi::eq_p (folded_deps[i], offset))
6425 if (forward ^ wi::gts_p (folded_deps[i], offset))
6427 unsigned int j;
6428 folded_dep = c;
6429 for (j = 1; j <= i; j++)
6430 folded_deps[j] = folded_deps[len + j - 1];
6432 else
6433 maybe_lexically_later = false;
6437 gcc_assert (i == len);
6439 remove = true;
6441 next_ordered_clause:
6442 if (remove)
6443 *list_p = OMP_CLAUSE_CHAIN (c);
6444 else
6445 list_p = &OMP_CLAUSE_CHAIN (c);
6448 if (folded_dep)
6450 if (neg_offset_p)
6451 folded_deps[0] = -folded_deps[0];
6453 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6454 if (POINTER_TYPE_P (itype))
6455 itype = sizetype;
6457 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6458 = wide_int_to_tree (itype, folded_deps[0]);
6459 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6460 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6463 lower_omp_ordered_ret:
6465 /* Ordered without clauses is #pragma omp threads, while we want
6466 a nop instead if we remove all clauses. */
6467 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6468 gsi_replace (gsi_p, gimple_build_nop (), true);
6472 /* Expand code for an OpenMP ordered directive. */
6474 static void
6475 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6477 tree block;
6478 gimple *stmt = gsi_stmt (*gsi_p), *g;
6479 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6480 gcall *x;
6481 gbind *bind;
6482 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6483 OMP_CLAUSE_SIMD);
6484 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6485 loop. */
6486 bool maybe_simt
6487 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6488 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6489 OMP_CLAUSE_THREADS);
6491 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6492 OMP_CLAUSE_DEPEND))
6494 /* FIXME: This is needs to be moved to the expansion to verify various
6495 conditions only testable on cfg with dominators computed, and also
6496 all the depend clauses to be merged still might need to be available
6497 for the runtime checks. */
6498 if (0)
6499 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6500 return;
6503 push_gimplify_context ();
6505 block = make_node (BLOCK);
6506 bind = gimple_build_bind (NULL, NULL, block);
6507 gsi_replace (gsi_p, bind, true);
6508 gimple_bind_add_stmt (bind, stmt);
6510 if (simd)
6512 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6513 build_int_cst (NULL_TREE, threads));
6514 cfun->has_simduid_loops = true;
6516 else
6517 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6519 gimple_bind_add_stmt (bind, x);
6521 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6522 if (maybe_simt)
6524 counter = create_tmp_var (integer_type_node);
6525 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6526 gimple_call_set_lhs (g, counter);
6527 gimple_bind_add_stmt (bind, g);
6529 body = create_artificial_label (UNKNOWN_LOCATION);
6530 test = create_artificial_label (UNKNOWN_LOCATION);
6531 gimple_bind_add_stmt (bind, gimple_build_label (body));
6533 tree simt_pred = create_tmp_var (integer_type_node);
6534 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6535 gimple_call_set_lhs (g, simt_pred);
6536 gimple_bind_add_stmt (bind, g);
6538 tree t = create_artificial_label (UNKNOWN_LOCATION);
6539 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6540 gimple_bind_add_stmt (bind, g);
6542 gimple_bind_add_stmt (bind, gimple_build_label (t));
6544 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6545 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6546 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6547 gimple_omp_set_body (stmt, NULL);
6549 if (maybe_simt)
6551 gimple_bind_add_stmt (bind, gimple_build_label (test));
6552 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6553 gimple_bind_add_stmt (bind, g);
6555 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6556 tree nonneg = create_tmp_var (integer_type_node);
6557 gimple_seq tseq = NULL;
6558 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6559 gimple_bind_add_seq (bind, tseq);
6561 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6562 gimple_call_set_lhs (g, nonneg);
6563 gimple_bind_add_stmt (bind, g);
6565 tree end = create_artificial_label (UNKNOWN_LOCATION);
6566 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6567 gimple_bind_add_stmt (bind, g);
6569 gimple_bind_add_stmt (bind, gimple_build_label (end));
6571 if (simd)
6572 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6573 build_int_cst (NULL_TREE, threads));
6574 else
6575 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6577 gimple_bind_add_stmt (bind, x);
6579 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6581 pop_gimplify_context (bind);
6583 gimple_bind_append_vars (bind, ctx->block_vars);
6584 BLOCK_VARS (block) = gimple_bind_vars (bind);
6588 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6589 substitution of a couple of function calls. But in the NAMED case,
6590 requires that languages coordinate a symbol name. It is therefore
6591 best put here in common code. */
6593 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6595 static void
6596 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6598 tree block;
6599 tree name, lock, unlock;
6600 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6601 gbind *bind;
6602 location_t loc = gimple_location (stmt);
6603 gimple_seq tbody;
6605 name = gimple_omp_critical_name (stmt);
6606 if (name)
6608 tree decl;
6610 if (!critical_name_mutexes)
6611 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6613 tree *n = critical_name_mutexes->get (name);
6614 if (n == NULL)
6616 char *new_str;
6618 decl = create_tmp_var_raw (ptr_type_node);
6620 new_str = ACONCAT ((".gomp_critical_user_",
6621 IDENTIFIER_POINTER (name), NULL));
6622 DECL_NAME (decl) = get_identifier (new_str);
6623 TREE_PUBLIC (decl) = 1;
6624 TREE_STATIC (decl) = 1;
6625 DECL_COMMON (decl) = 1;
6626 DECL_ARTIFICIAL (decl) = 1;
6627 DECL_IGNORED_P (decl) = 1;
6629 varpool_node::finalize_decl (decl);
6631 critical_name_mutexes->put (name, decl);
6633 else
6634 decl = *n;
6636 /* If '#pragma omp critical' is inside offloaded region or
6637 inside function marked as offloadable, the symbol must be
6638 marked as offloadable too. */
6639 omp_context *octx;
6640 if (cgraph_node::get (current_function_decl)->offloadable)
6641 varpool_node::get_create (decl)->offloadable = 1;
6642 else
6643 for (octx = ctx->outer; octx; octx = octx->outer)
6644 if (is_gimple_omp_offloaded (octx->stmt))
6646 varpool_node::get_create (decl)->offloadable = 1;
6647 break;
6650 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6651 lock = build_call_expr_loc (loc, lock, 1,
6652 build_fold_addr_expr_loc (loc, decl));
6654 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6655 unlock = build_call_expr_loc (loc, unlock, 1,
6656 build_fold_addr_expr_loc (loc, decl));
6658 else
6660 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6661 lock = build_call_expr_loc (loc, lock, 0);
6663 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6664 unlock = build_call_expr_loc (loc, unlock, 0);
6667 push_gimplify_context ();
6669 block = make_node (BLOCK);
6670 bind = gimple_build_bind (NULL, NULL, block);
6671 gsi_replace (gsi_p, bind, true);
6672 gimple_bind_add_stmt (bind, stmt);
6674 tbody = gimple_bind_body (bind);
6675 gimplify_and_add (lock, &tbody);
6676 gimple_bind_set_body (bind, tbody);
6678 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6679 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6680 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6681 gimple_omp_set_body (stmt, NULL);
6683 tbody = gimple_bind_body (bind);
6684 gimplify_and_add (unlock, &tbody);
6685 gimple_bind_set_body (bind, tbody);
6687 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6689 pop_gimplify_context (bind);
6690 gimple_bind_append_vars (bind, ctx->block_vars);
6691 BLOCK_VARS (block) = gimple_bind_vars (bind);
6694 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6695 for a lastprivate clause. Given a loop control predicate of (V
6696 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6697 is appended to *DLIST, iterator initialization is appended to
6698 *BODY_P. */
6700 static void
6701 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6702 gimple_seq *dlist, struct omp_context *ctx)
6704 tree clauses, cond, vinit;
6705 enum tree_code cond_code;
6706 gimple_seq stmts;
6708 cond_code = fd->loop.cond_code;
6709 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6711 /* When possible, use a strict equality expression. This can let VRP
6712 type optimizations deduce the value and remove a copy. */
6713 if (tree_fits_shwi_p (fd->loop.step))
6715 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6716 if (step == 1 || step == -1)
6717 cond_code = EQ_EXPR;
6720 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6721 || gimple_omp_for_grid_phony (fd->for_stmt))
6722 cond = omp_grid_lastprivate_predicate (fd);
6723 else
6725 tree n2 = fd->loop.n2;
6726 if (fd->collapse > 1
6727 && TREE_CODE (n2) != INTEGER_CST
6728 && gimple_omp_for_combined_into_p (fd->for_stmt))
6730 struct omp_context *taskreg_ctx = NULL;
6731 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6733 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6734 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6735 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6737 if (gimple_omp_for_combined_into_p (gfor))
6739 gcc_assert (ctx->outer->outer
6740 && is_parallel_ctx (ctx->outer->outer));
6741 taskreg_ctx = ctx->outer->outer;
6743 else
6745 struct omp_for_data outer_fd;
6746 omp_extract_for_data (gfor, &outer_fd, NULL);
6747 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6750 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6751 taskreg_ctx = ctx->outer->outer;
6753 else if (is_taskreg_ctx (ctx->outer))
6754 taskreg_ctx = ctx->outer;
6755 if (taskreg_ctx)
6757 int i;
6758 tree taskreg_clauses
6759 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6760 tree innerc = omp_find_clause (taskreg_clauses,
6761 OMP_CLAUSE__LOOPTEMP_);
6762 gcc_assert (innerc);
6763 for (i = 0; i < fd->collapse; i++)
6765 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6766 OMP_CLAUSE__LOOPTEMP_);
6767 gcc_assert (innerc);
6769 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6770 OMP_CLAUSE__LOOPTEMP_);
6771 if (innerc)
6772 n2 = fold_convert (TREE_TYPE (n2),
6773 lookup_decl (OMP_CLAUSE_DECL (innerc),
6774 taskreg_ctx));
6777 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6780 clauses = gimple_omp_for_clauses (fd->for_stmt);
6781 stmts = NULL;
6782 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6783 if (!gimple_seq_empty_p (stmts))
6785 gimple_seq_add_seq (&stmts, *dlist);
6786 *dlist = stmts;
6788 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6789 vinit = fd->loop.n1;
6790 if (cond_code == EQ_EXPR
6791 && tree_fits_shwi_p (fd->loop.n2)
6792 && ! integer_zerop (fd->loop.n2))
6793 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6794 else
6795 vinit = unshare_expr (vinit);
6797 /* Initialize the iterator variable, so that threads that don't execute
6798 any iterations don't execute the lastprivate clauses by accident. */
6799 gimplify_assign (fd->loop.v, vinit, body_p);
6804 /* Lower code for an OMP loop directive. */
6806 static void
6807 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6809 tree *rhs_p, block;
6810 struct omp_for_data fd, *fdp = NULL;
6811 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6812 gbind *new_stmt;
6813 gimple_seq omp_for_body, body, dlist;
6814 gimple_seq oacc_head = NULL, oacc_tail = NULL;
6815 size_t i;
6817 push_gimplify_context ();
6819 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6821 block = make_node (BLOCK);
6822 new_stmt = gimple_build_bind (NULL, NULL, block);
6823 /* Replace at gsi right away, so that 'stmt' is no member
6824 of a sequence anymore as we're going to add to a different
6825 one below. */
6826 gsi_replace (gsi_p, new_stmt, true);
6828 /* Move declaration of temporaries in the loop body before we make
6829 it go away. */
6830 omp_for_body = gimple_omp_body (stmt);
6831 if (!gimple_seq_empty_p (omp_for_body)
6832 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6834 gbind *inner_bind
6835 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6836 tree vars = gimple_bind_vars (inner_bind);
6837 gimple_bind_append_vars (new_stmt, vars);
6838 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6839 keep them on the inner_bind and it's block. */
6840 gimple_bind_set_vars (inner_bind, NULL_TREE);
6841 if (gimple_bind_block (inner_bind))
6842 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6845 if (gimple_omp_for_combined_into_p (stmt))
6847 omp_extract_for_data (stmt, &fd, NULL);
6848 fdp = &fd;
6850 /* We need two temporaries with fd.loop.v type (istart/iend)
6851 and then (fd.collapse - 1) temporaries with the same
6852 type for count2 ... countN-1 vars if not constant. */
6853 size_t count = 2;
6854 tree type = fd.iter_type;
6855 if (fd.collapse > 1
6856 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6857 count += fd.collapse - 1;
6858 bool taskreg_for
6859 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6860 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6861 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6862 tree simtc = NULL;
6863 tree clauses = *pc;
6864 if (taskreg_for)
6865 outerc
6866 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6867 OMP_CLAUSE__LOOPTEMP_);
6868 if (ctx->simt_stmt)
6869 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6870 OMP_CLAUSE__LOOPTEMP_);
6871 for (i = 0; i < count; i++)
6873 tree temp;
6874 if (taskreg_for)
6876 gcc_assert (outerc);
6877 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6878 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6879 OMP_CLAUSE__LOOPTEMP_);
6881 else
6883 /* If there are 2 adjacent SIMD stmts, one with _simt_
6884 clause, another without, make sure they have the same
6885 decls in _looptemp_ clauses, because the outer stmt
6886 they are combined into will look up just one inner_stmt. */
6887 if (ctx->simt_stmt)
6888 temp = OMP_CLAUSE_DECL (simtc);
6889 else
6890 temp = create_tmp_var (type);
6891 insert_decl_map (&ctx->outer->cb, temp, temp);
6893 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6894 OMP_CLAUSE_DECL (*pc) = temp;
6895 pc = &OMP_CLAUSE_CHAIN (*pc);
6896 if (ctx->simt_stmt)
6897 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6898 OMP_CLAUSE__LOOPTEMP_);
6900 *pc = clauses;
6903 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6904 dlist = NULL;
6905 body = NULL;
6906 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6907 fdp);
6908 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6910 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6912 /* Lower the header expressions. At this point, we can assume that
6913 the header is of the form:
6915 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6917 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6918 using the .omp_data_s mapping, if needed. */
6919 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6921 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6922 if (!is_gimple_min_invariant (*rhs_p))
6923 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6925 rhs_p = gimple_omp_for_final_ptr (stmt, i);
6926 if (!is_gimple_min_invariant (*rhs_p))
6927 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6929 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6930 if (!is_gimple_min_invariant (*rhs_p))
6931 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6934 /* Once lowered, extract the bounds and clauses. */
6935 omp_extract_for_data (stmt, &fd, NULL);
6937 if (is_gimple_omp_oacc (ctx->stmt)
6938 && !ctx_in_oacc_kernels_region (ctx))
6939 lower_oacc_head_tail (gimple_location (stmt),
6940 gimple_omp_for_clauses (stmt),
6941 &oacc_head, &oacc_tail, ctx);
6943 /* Add OpenACC partitioning and reduction markers just before the loop. */
6944 if (oacc_head)
6945 gimple_seq_add_seq (&body, oacc_head);
6947 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
6949 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6950 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
6951 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6952 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6954 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6955 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6956 OMP_CLAUSE_LINEAR_STEP (c)
6957 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6958 ctx);
6961 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6962 && gimple_omp_for_grid_phony (stmt));
6963 if (!phony_loop)
6964 gimple_seq_add_stmt (&body, stmt);
6965 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6967 if (!phony_loop)
6968 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6969 fd.loop.v));
6971 /* After the loop, add exit clauses. */
6972 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
6974 if (ctx->cancellable)
6975 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
6977 gimple_seq_add_seq (&body, dlist);
6979 body = maybe_catch_exception (body);
6981 if (!phony_loop)
6983 /* Region exit marker goes at the end of the loop body. */
6984 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
6985 maybe_add_implicit_barrier_cancel (ctx, &body);
6988 /* Add OpenACC joining and reduction markers just after the loop. */
6989 if (oacc_tail)
6990 gimple_seq_add_seq (&body, oacc_tail);
6992 pop_gimplify_context (new_stmt);
6994 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6995 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
6996 if (BLOCK_VARS (block))
6997 TREE_USED (block) = 1;
6999 gimple_bind_set_body (new_stmt, body);
7000 gimple_omp_set_body (stmt, NULL);
7001 gimple_omp_for_set_pre_body (stmt, NULL);
7004 /* Callback for walk_stmts. Check if the current statement only contains
7005 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
7007 static tree
7008 check_combined_parallel (gimple_stmt_iterator *gsi_p,
7009 bool *handled_ops_p,
7010 struct walk_stmt_info *wi)
7012 int *info = (int *) wi->info;
7013 gimple *stmt = gsi_stmt (*gsi_p);
7015 *handled_ops_p = true;
7016 switch (gimple_code (stmt))
7018 WALK_SUBSTMTS;
7020 case GIMPLE_OMP_FOR:
7021 case GIMPLE_OMP_SECTIONS:
7022 *info = *info == 0 ? 1 : -1;
7023 break;
7024 default:
7025 *info = -1;
7026 break;
7028 return NULL;
7031 struct omp_taskcopy_context
7033 /* This field must be at the beginning, as we do "inheritance": Some
7034 callback functions for tree-inline.c (e.g., omp_copy_decl)
7035 receive a copy_body_data pointer that is up-casted to an
7036 omp_context pointer. */
7037 copy_body_data cb;
7038 omp_context *ctx;
7041 static tree
7042 task_copyfn_copy_decl (tree var, copy_body_data *cb)
7044 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
7046 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
7047 return create_tmp_var (TREE_TYPE (var));
7049 return var;
7052 static tree
7053 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
7055 tree name, new_fields = NULL, type, f;
7057 type = lang_hooks.types.make_type (RECORD_TYPE);
7058 name = DECL_NAME (TYPE_NAME (orig_type));
7059 name = build_decl (gimple_location (tcctx->ctx->stmt),
7060 TYPE_DECL, name, type);
7061 TYPE_NAME (type) = name;
7063 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
7065 tree new_f = copy_node (f);
7066 DECL_CONTEXT (new_f) = type;
7067 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7068 TREE_CHAIN (new_f) = new_fields;
7069 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7070 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7071 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7072 &tcctx->cb, NULL);
7073 new_fields = new_f;
7074 tcctx->cb.decl_map->put (f, new_f);
7076 TYPE_FIELDS (type) = nreverse (new_fields);
7077 layout_type (type);
7078 return type;
7081 /* Create task copyfn. */
7083 static void
7084 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7086 struct function *child_cfun;
7087 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7088 tree record_type, srecord_type, bind, list;
7089 bool record_needs_remap = false, srecord_needs_remap = false;
7090 splay_tree_node n;
7091 struct omp_taskcopy_context tcctx;
7092 location_t loc = gimple_location (task_stmt);
7094 child_fn = gimple_omp_task_copy_fn (task_stmt);
7095 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7096 gcc_assert (child_cfun->cfg == NULL);
7097 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
7099 /* Reset DECL_CONTEXT on function arguments. */
7100 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7101 DECL_CONTEXT (t) = child_fn;
7103 /* Populate the function. */
7104 push_gimplify_context ();
7105 push_cfun (child_cfun);
7107 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7108 TREE_SIDE_EFFECTS (bind) = 1;
7109 list = NULL;
7110 DECL_SAVED_TREE (child_fn) = bind;
7111 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
7113 /* Remap src and dst argument types if needed. */
7114 record_type = ctx->record_type;
7115 srecord_type = ctx->srecord_type;
7116 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7117 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7119 record_needs_remap = true;
7120 break;
7122 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7123 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7125 srecord_needs_remap = true;
7126 break;
7129 if (record_needs_remap || srecord_needs_remap)
7131 memset (&tcctx, '\0', sizeof (tcctx));
7132 tcctx.cb.src_fn = ctx->cb.src_fn;
7133 tcctx.cb.dst_fn = child_fn;
7134 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7135 gcc_checking_assert (tcctx.cb.src_node);
7136 tcctx.cb.dst_node = tcctx.cb.src_node;
7137 tcctx.cb.src_cfun = ctx->cb.src_cfun;
7138 tcctx.cb.copy_decl = task_copyfn_copy_decl;
7139 tcctx.cb.eh_lp_nr = 0;
7140 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7141 tcctx.cb.decl_map = new hash_map<tree, tree>;
7142 tcctx.ctx = ctx;
7144 if (record_needs_remap)
7145 record_type = task_copyfn_remap_type (&tcctx, record_type);
7146 if (srecord_needs_remap)
7147 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7149 else
7150 tcctx.cb.decl_map = NULL;
7152 arg = DECL_ARGUMENTS (child_fn);
7153 TREE_TYPE (arg) = build_pointer_type (record_type);
7154 sarg = DECL_CHAIN (arg);
7155 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7157 /* First pass: initialize temporaries used in record_type and srecord_type
7158 sizes and field offsets. */
7159 if (tcctx.cb.decl_map)
7160 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7161 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7163 tree *p;
7165 decl = OMP_CLAUSE_DECL (c);
7166 p = tcctx.cb.decl_map->get (decl);
7167 if (p == NULL)
7168 continue;
7169 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7170 sf = (tree) n->value;
7171 sf = *tcctx.cb.decl_map->get (sf);
7172 src = build_simple_mem_ref_loc (loc, sarg);
7173 src = omp_build_component_ref (src, sf);
7174 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7175 append_to_statement_list (t, &list);
7178 /* Second pass: copy shared var pointers and copy construct non-VLA
7179 firstprivate vars. */
7180 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7181 switch (OMP_CLAUSE_CODE (c))
7183 splay_tree_key key;
7184 case OMP_CLAUSE_SHARED:
7185 decl = OMP_CLAUSE_DECL (c);
7186 key = (splay_tree_key) decl;
7187 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7188 key = (splay_tree_key) &DECL_UID (decl);
7189 n = splay_tree_lookup (ctx->field_map, key);
7190 if (n == NULL)
7191 break;
7192 f = (tree) n->value;
7193 if (tcctx.cb.decl_map)
7194 f = *tcctx.cb.decl_map->get (f);
7195 n = splay_tree_lookup (ctx->sfield_map, key);
7196 sf = (tree) n->value;
7197 if (tcctx.cb.decl_map)
7198 sf = *tcctx.cb.decl_map->get (sf);
7199 src = build_simple_mem_ref_loc (loc, sarg);
7200 src = omp_build_component_ref (src, sf);
7201 dst = build_simple_mem_ref_loc (loc, arg);
7202 dst = omp_build_component_ref (dst, f);
7203 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7204 append_to_statement_list (t, &list);
7205 break;
7206 case OMP_CLAUSE_FIRSTPRIVATE:
7207 decl = OMP_CLAUSE_DECL (c);
7208 if (is_variable_sized (decl))
7209 break;
7210 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7211 if (n == NULL)
7212 break;
7213 f = (tree) n->value;
7214 if (tcctx.cb.decl_map)
7215 f = *tcctx.cb.decl_map->get (f);
7216 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7217 if (n != NULL)
7219 sf = (tree) n->value;
7220 if (tcctx.cb.decl_map)
7221 sf = *tcctx.cb.decl_map->get (sf);
7222 src = build_simple_mem_ref_loc (loc, sarg);
7223 src = omp_build_component_ref (src, sf);
7224 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7225 src = build_simple_mem_ref_loc (loc, src);
7227 else
7228 src = decl;
7229 dst = build_simple_mem_ref_loc (loc, arg);
7230 dst = omp_build_component_ref (dst, f);
7231 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7232 append_to_statement_list (t, &list);
7233 break;
7234 case OMP_CLAUSE_PRIVATE:
7235 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7236 break;
7237 decl = OMP_CLAUSE_DECL (c);
7238 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7239 f = (tree) n->value;
7240 if (tcctx.cb.decl_map)
7241 f = *tcctx.cb.decl_map->get (f);
7242 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7243 if (n != NULL)
7245 sf = (tree) n->value;
7246 if (tcctx.cb.decl_map)
7247 sf = *tcctx.cb.decl_map->get (sf);
7248 src = build_simple_mem_ref_loc (loc, sarg);
7249 src = omp_build_component_ref (src, sf);
7250 if (use_pointer_for_field (decl, NULL))
7251 src = build_simple_mem_ref_loc (loc, src);
7253 else
7254 src = decl;
7255 dst = build_simple_mem_ref_loc (loc, arg);
7256 dst = omp_build_component_ref (dst, f);
7257 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7258 append_to_statement_list (t, &list);
7259 break;
7260 default:
7261 break;
7264 /* Last pass: handle VLA firstprivates. */
7265 if (tcctx.cb.decl_map)
7266 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7267 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7269 tree ind, ptr, df;
7271 decl = OMP_CLAUSE_DECL (c);
7272 if (!is_variable_sized (decl))
7273 continue;
7274 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7275 if (n == NULL)
7276 continue;
7277 f = (tree) n->value;
7278 f = *tcctx.cb.decl_map->get (f);
7279 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7280 ind = DECL_VALUE_EXPR (decl);
7281 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7282 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7283 n = splay_tree_lookup (ctx->sfield_map,
7284 (splay_tree_key) TREE_OPERAND (ind, 0));
7285 sf = (tree) n->value;
7286 sf = *tcctx.cb.decl_map->get (sf);
7287 src = build_simple_mem_ref_loc (loc, sarg);
7288 src = omp_build_component_ref (src, sf);
7289 src = build_simple_mem_ref_loc (loc, src);
7290 dst = build_simple_mem_ref_loc (loc, arg);
7291 dst = omp_build_component_ref (dst, f);
7292 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7293 append_to_statement_list (t, &list);
7294 n = splay_tree_lookup (ctx->field_map,
7295 (splay_tree_key) TREE_OPERAND (ind, 0));
7296 df = (tree) n->value;
7297 df = *tcctx.cb.decl_map->get (df);
7298 ptr = build_simple_mem_ref_loc (loc, arg);
7299 ptr = omp_build_component_ref (ptr, df);
7300 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7301 build_fold_addr_expr_loc (loc, dst));
7302 append_to_statement_list (t, &list);
7305 t = build1 (RETURN_EXPR, void_type_node, NULL);
7306 append_to_statement_list (t, &list);
7308 if (tcctx.cb.decl_map)
7309 delete tcctx.cb.decl_map;
7310 pop_gimplify_context (NULL);
7311 BIND_EXPR_BODY (bind) = list;
7312 pop_cfun ();
7315 static void
7316 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7318 tree c, clauses;
7319 gimple *g;
7320 size_t n_in = 0, n_out = 0, idx = 2, i;
7322 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7323 gcc_assert (clauses);
7324 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7325 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7326 switch (OMP_CLAUSE_DEPEND_KIND (c))
7328 case OMP_CLAUSE_DEPEND_IN:
7329 n_in++;
7330 break;
7331 case OMP_CLAUSE_DEPEND_OUT:
7332 case OMP_CLAUSE_DEPEND_INOUT:
7333 n_out++;
7334 break;
7335 case OMP_CLAUSE_DEPEND_SOURCE:
7336 case OMP_CLAUSE_DEPEND_SINK:
7337 /* FALLTHRU */
7338 default:
7339 gcc_unreachable ();
7341 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7342 tree array = create_tmp_var (type);
7343 TREE_ADDRESSABLE (array) = 1;
7344 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7345 NULL_TREE);
7346 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7347 gimple_seq_add_stmt (iseq, g);
7348 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7349 NULL_TREE);
7350 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7351 gimple_seq_add_stmt (iseq, g);
7352 for (i = 0; i < 2; i++)
7354 if ((i ? n_in : n_out) == 0)
7355 continue;
7356 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7357 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7358 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7360 tree t = OMP_CLAUSE_DECL (c);
7361 t = fold_convert (ptr_type_node, t);
7362 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7363 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7364 NULL_TREE, NULL_TREE);
7365 g = gimple_build_assign (r, t);
7366 gimple_seq_add_stmt (iseq, g);
7369 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7370 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7371 OMP_CLAUSE_CHAIN (c) = *pclauses;
7372 *pclauses = c;
7373 tree clobber = build_constructor (type, NULL);
7374 TREE_THIS_VOLATILE (clobber) = 1;
7375 g = gimple_build_assign (array, clobber);
7376 gimple_seq_add_stmt (oseq, g);
7379 /* Lower the OpenMP parallel or task directive in the current statement
7380 in GSI_P. CTX holds context information for the directive. */
7382 static void
7383 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7385 tree clauses;
7386 tree child_fn, t;
7387 gimple *stmt = gsi_stmt (*gsi_p);
7388 gbind *par_bind, *bind, *dep_bind = NULL;
7389 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7390 location_t loc = gimple_location (stmt);
7392 clauses = gimple_omp_taskreg_clauses (stmt);
7393 par_bind
7394 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7395 par_body = gimple_bind_body (par_bind);
7396 child_fn = ctx->cb.dst_fn;
7397 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7398 && !gimple_omp_parallel_combined_p (stmt))
7400 struct walk_stmt_info wi;
7401 int ws_num = 0;
7403 memset (&wi, 0, sizeof (wi));
7404 wi.info = &ws_num;
7405 wi.val_only = true;
7406 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7407 if (ws_num == 1)
7408 gimple_omp_parallel_set_combined_p (stmt, true);
7410 gimple_seq dep_ilist = NULL;
7411 gimple_seq dep_olist = NULL;
7412 if (gimple_code (stmt) == GIMPLE_OMP_TASK
7413 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7415 push_gimplify_context ();
7416 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7417 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7418 &dep_ilist, &dep_olist);
7421 if (ctx->srecord_type)
7422 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7424 push_gimplify_context ();
7426 par_olist = NULL;
7427 par_ilist = NULL;
7428 par_rlist = NULL;
7429 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7430 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7431 if (phony_construct && ctx->record_type)
7433 gcc_checking_assert (!ctx->receiver_decl);
7434 ctx->receiver_decl = create_tmp_var
7435 (build_reference_type (ctx->record_type), ".omp_rec");
7437 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7438 lower_omp (&par_body, ctx);
7439 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7440 lower_reduction_clauses (clauses, &par_rlist, ctx);
7442 /* Declare all the variables created by mapping and the variables
7443 declared in the scope of the parallel body. */
7444 record_vars_into (ctx->block_vars, child_fn);
7445 record_vars_into (gimple_bind_vars (par_bind), child_fn);
7447 if (ctx->record_type)
7449 ctx->sender_decl
7450 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7451 : ctx->record_type, ".omp_data_o");
7452 DECL_NAMELESS (ctx->sender_decl) = 1;
7453 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7454 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7457 olist = NULL;
7458 ilist = NULL;
7459 lower_send_clauses (clauses, &ilist, &olist, ctx);
7460 lower_send_shared_vars (&ilist, &olist, ctx);
7462 if (ctx->record_type)
7464 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7465 TREE_THIS_VOLATILE (clobber) = 1;
7466 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7467 clobber));
7470 /* Once all the expansions are done, sequence all the different
7471 fragments inside gimple_omp_body. */
7473 new_body = NULL;
7475 if (ctx->record_type)
7477 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7478 /* fixup_child_record_type might have changed receiver_decl's type. */
7479 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7480 gimple_seq_add_stmt (&new_body,
7481 gimple_build_assign (ctx->receiver_decl, t));
7484 gimple_seq_add_seq (&new_body, par_ilist);
7485 gimple_seq_add_seq (&new_body, par_body);
7486 gimple_seq_add_seq (&new_body, par_rlist);
7487 if (ctx->cancellable)
7488 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7489 gimple_seq_add_seq (&new_body, par_olist);
7490 new_body = maybe_catch_exception (new_body);
7491 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7492 gimple_seq_add_stmt (&new_body,
7493 gimple_build_omp_continue (integer_zero_node,
7494 integer_zero_node));
7495 if (!phony_construct)
7497 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7498 gimple_omp_set_body (stmt, new_body);
7501 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7502 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7503 gimple_bind_add_seq (bind, ilist);
7504 if (!phony_construct)
7505 gimple_bind_add_stmt (bind, stmt);
7506 else
7507 gimple_bind_add_seq (bind, new_body);
7508 gimple_bind_add_seq (bind, olist);
7510 pop_gimplify_context (NULL);
7512 if (dep_bind)
7514 gimple_bind_add_seq (dep_bind, dep_ilist);
7515 gimple_bind_add_stmt (dep_bind, bind);
7516 gimple_bind_add_seq (dep_bind, dep_olist);
7517 pop_gimplify_context (dep_bind);
7521 /* Lower the GIMPLE_OMP_TARGET in the current statement
7522 in GSI_P. CTX holds context information for the directive. */
7524 static void
7525 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7527 tree clauses;
7528 tree child_fn, t, c;
7529 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7530 gbind *tgt_bind, *bind, *dep_bind = NULL;
7531 gimple_seq tgt_body, olist, ilist, fplist, new_body;
7532 location_t loc = gimple_location (stmt);
7533 bool offloaded, data_region;
7534 unsigned int map_cnt = 0;
7536 offloaded = is_gimple_omp_offloaded (stmt);
7537 switch (gimple_omp_target_kind (stmt))
7539 case GF_OMP_TARGET_KIND_REGION:
7540 case GF_OMP_TARGET_KIND_UPDATE:
7541 case GF_OMP_TARGET_KIND_ENTER_DATA:
7542 case GF_OMP_TARGET_KIND_EXIT_DATA:
7543 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7544 case GF_OMP_TARGET_KIND_OACC_KERNELS:
7545 case GF_OMP_TARGET_KIND_OACC_UPDATE:
7546 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7547 case GF_OMP_TARGET_KIND_OACC_DECLARE:
7548 data_region = false;
7549 break;
7550 case GF_OMP_TARGET_KIND_DATA:
7551 case GF_OMP_TARGET_KIND_OACC_DATA:
7552 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7553 data_region = true;
7554 break;
7555 default:
7556 gcc_unreachable ();
7559 clauses = gimple_omp_target_clauses (stmt);
7561 gimple_seq dep_ilist = NULL;
7562 gimple_seq dep_olist = NULL;
7563 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7565 push_gimplify_context ();
7566 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7567 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7568 &dep_ilist, &dep_olist);
7571 tgt_bind = NULL;
7572 tgt_body = NULL;
7573 if (offloaded)
7575 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7576 tgt_body = gimple_bind_body (tgt_bind);
7578 else if (data_region)
7579 tgt_body = gimple_omp_body (stmt);
7580 child_fn = ctx->cb.dst_fn;
7582 push_gimplify_context ();
7583 fplist = NULL;
7585 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7586 switch (OMP_CLAUSE_CODE (c))
7588 tree var, x;
7590 default:
7591 break;
7592 case OMP_CLAUSE_MAP:
7593 #if CHECKING_P
7594 /* First check what we're prepared to handle in the following. */
7595 switch (OMP_CLAUSE_MAP_KIND (c))
7597 case GOMP_MAP_ALLOC:
7598 case GOMP_MAP_TO:
7599 case GOMP_MAP_FROM:
7600 case GOMP_MAP_TOFROM:
7601 case GOMP_MAP_POINTER:
7602 case GOMP_MAP_TO_PSET:
7603 case GOMP_MAP_DELETE:
7604 case GOMP_MAP_RELEASE:
7605 case GOMP_MAP_ALWAYS_TO:
7606 case GOMP_MAP_ALWAYS_FROM:
7607 case GOMP_MAP_ALWAYS_TOFROM:
7608 case GOMP_MAP_FIRSTPRIVATE_POINTER:
7609 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7610 case GOMP_MAP_STRUCT:
7611 case GOMP_MAP_ALWAYS_POINTER:
7612 break;
7613 case GOMP_MAP_FORCE_ALLOC:
7614 case GOMP_MAP_FORCE_TO:
7615 case GOMP_MAP_FORCE_FROM:
7616 case GOMP_MAP_FORCE_TOFROM:
7617 case GOMP_MAP_FORCE_PRESENT:
7618 case GOMP_MAP_FORCE_DEVICEPTR:
7619 case GOMP_MAP_DEVICE_RESIDENT:
7620 case GOMP_MAP_LINK:
7621 gcc_assert (is_gimple_omp_oacc (stmt));
7622 break;
7623 default:
7624 gcc_unreachable ();
7626 #endif
7627 /* FALLTHRU */
7628 case OMP_CLAUSE_TO:
7629 case OMP_CLAUSE_FROM:
7630 oacc_firstprivate:
7631 var = OMP_CLAUSE_DECL (c);
7632 if (!DECL_P (var))
7634 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7635 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7636 && (OMP_CLAUSE_MAP_KIND (c)
7637 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
7638 map_cnt++;
7639 continue;
7642 if (DECL_SIZE (var)
7643 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7645 tree var2 = DECL_VALUE_EXPR (var);
7646 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7647 var2 = TREE_OPERAND (var2, 0);
7648 gcc_assert (DECL_P (var2));
7649 var = var2;
7652 if (offloaded
7653 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7654 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7655 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7657 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7659 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7660 && varpool_node::get_create (var)->offloadable)
7661 continue;
7663 tree type = build_pointer_type (TREE_TYPE (var));
7664 tree new_var = lookup_decl (var, ctx);
7665 x = create_tmp_var_raw (type, get_name (new_var));
7666 gimple_add_tmp_var (x);
7667 x = build_simple_mem_ref (x);
7668 SET_DECL_VALUE_EXPR (new_var, x);
7669 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7671 continue;
7674 if (!maybe_lookup_field (var, ctx))
7675 continue;
7677 /* Don't remap oacc parallel reduction variables, because the
7678 intermediate result must be local to each gang. */
7679 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7680 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7682 x = build_receiver_ref (var, true, ctx);
7683 tree new_var = lookup_decl (var, ctx);
7685 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7686 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7687 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7688 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7689 x = build_simple_mem_ref (x);
7690 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7692 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7693 if (omp_is_reference (new_var))
7695 /* Create a local object to hold the instance
7696 value. */
7697 tree type = TREE_TYPE (TREE_TYPE (new_var));
7698 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7699 tree inst = create_tmp_var (type, id);
7700 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7701 x = build_fold_addr_expr (inst);
7703 gimplify_assign (new_var, x, &fplist);
7705 else if (DECL_P (new_var))
7707 SET_DECL_VALUE_EXPR (new_var, x);
7708 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7710 else
7711 gcc_unreachable ();
7713 map_cnt++;
7714 break;
7716 case OMP_CLAUSE_FIRSTPRIVATE:
7717 if (is_oacc_parallel (ctx))
7718 goto oacc_firstprivate;
7719 map_cnt++;
7720 var = OMP_CLAUSE_DECL (c);
7721 if (!omp_is_reference (var)
7722 && !is_gimple_reg_type (TREE_TYPE (var)))
7724 tree new_var = lookup_decl (var, ctx);
7725 if (is_variable_sized (var))
7727 tree pvar = DECL_VALUE_EXPR (var);
7728 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7729 pvar = TREE_OPERAND (pvar, 0);
7730 gcc_assert (DECL_P (pvar));
7731 tree new_pvar = lookup_decl (pvar, ctx);
7732 x = build_fold_indirect_ref (new_pvar);
7733 TREE_THIS_NOTRAP (x) = 1;
7735 else
7736 x = build_receiver_ref (var, true, ctx);
7737 SET_DECL_VALUE_EXPR (new_var, x);
7738 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7740 break;
7742 case OMP_CLAUSE_PRIVATE:
7743 if (is_gimple_omp_oacc (ctx->stmt))
7744 break;
7745 var = OMP_CLAUSE_DECL (c);
7746 if (is_variable_sized (var))
7748 tree new_var = lookup_decl (var, ctx);
7749 tree pvar = DECL_VALUE_EXPR (var);
7750 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7751 pvar = TREE_OPERAND (pvar, 0);
7752 gcc_assert (DECL_P (pvar));
7753 tree new_pvar = lookup_decl (pvar, ctx);
7754 x = build_fold_indirect_ref (new_pvar);
7755 TREE_THIS_NOTRAP (x) = 1;
7756 SET_DECL_VALUE_EXPR (new_var, x);
7757 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7759 break;
7761 case OMP_CLAUSE_USE_DEVICE_PTR:
7762 case OMP_CLAUSE_IS_DEVICE_PTR:
7763 var = OMP_CLAUSE_DECL (c);
7764 map_cnt++;
7765 if (is_variable_sized (var))
7767 tree new_var = lookup_decl (var, ctx);
7768 tree pvar = DECL_VALUE_EXPR (var);
7769 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7770 pvar = TREE_OPERAND (pvar, 0);
7771 gcc_assert (DECL_P (pvar));
7772 tree new_pvar = lookup_decl (pvar, ctx);
7773 x = build_fold_indirect_ref (new_pvar);
7774 TREE_THIS_NOTRAP (x) = 1;
7775 SET_DECL_VALUE_EXPR (new_var, x);
7776 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7778 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7780 tree new_var = lookup_decl (var, ctx);
7781 tree type = build_pointer_type (TREE_TYPE (var));
7782 x = create_tmp_var_raw (type, get_name (new_var));
7783 gimple_add_tmp_var (x);
7784 x = build_simple_mem_ref (x);
7785 SET_DECL_VALUE_EXPR (new_var, x);
7786 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7788 else
7790 tree new_var = lookup_decl (var, ctx);
7791 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7792 gimple_add_tmp_var (x);
7793 SET_DECL_VALUE_EXPR (new_var, x);
7794 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7796 break;
7799 if (offloaded)
7801 target_nesting_level++;
7802 lower_omp (&tgt_body, ctx);
7803 target_nesting_level--;
7805 else if (data_region)
7806 lower_omp (&tgt_body, ctx);
7808 if (offloaded)
7810 /* Declare all the variables created by mapping and the variables
7811 declared in the scope of the target body. */
7812 record_vars_into (ctx->block_vars, child_fn);
7813 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7816 olist = NULL;
7817 ilist = NULL;
7818 if (ctx->record_type)
7820 ctx->sender_decl
7821 = create_tmp_var (ctx->record_type, ".omp_data_arr");
7822 DECL_NAMELESS (ctx->sender_decl) = 1;
7823 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7824 t = make_tree_vec (3);
7825 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7826 TREE_VEC_ELT (t, 1)
7827 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7828 ".omp_data_sizes");
7829 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7830 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7831 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7832 tree tkind_type = short_unsigned_type_node;
7833 int talign_shift = 8;
7834 TREE_VEC_ELT (t, 2)
7835 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7836 ".omp_data_kinds");
7837 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7838 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7839 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7840 gimple_omp_target_set_data_arg (stmt, t);
7842 vec<constructor_elt, va_gc> *vsize;
7843 vec<constructor_elt, va_gc> *vkind;
7844 vec_alloc (vsize, map_cnt);
7845 vec_alloc (vkind, map_cnt);
7846 unsigned int map_idx = 0;
7848 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7849 switch (OMP_CLAUSE_CODE (c))
7851 tree ovar, nc, s, purpose, var, x, type;
7852 unsigned int talign;
7854 default:
7855 break;
7857 case OMP_CLAUSE_MAP:
7858 case OMP_CLAUSE_TO:
7859 case OMP_CLAUSE_FROM:
7860 oacc_firstprivate_map:
7861 nc = c;
7862 ovar = OMP_CLAUSE_DECL (c);
7863 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7864 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7865 || (OMP_CLAUSE_MAP_KIND (c)
7866 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7867 break;
7868 if (!DECL_P (ovar))
7870 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7871 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7873 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7874 == get_base_address (ovar));
7875 nc = OMP_CLAUSE_CHAIN (c);
7876 ovar = OMP_CLAUSE_DECL (nc);
7878 else
7880 tree x = build_sender_ref (ovar, ctx);
7881 tree v
7882 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7883 gimplify_assign (x, v, &ilist);
7884 nc = NULL_TREE;
7887 else
7889 if (DECL_SIZE (ovar)
7890 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7892 tree ovar2 = DECL_VALUE_EXPR (ovar);
7893 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7894 ovar2 = TREE_OPERAND (ovar2, 0);
7895 gcc_assert (DECL_P (ovar2));
7896 ovar = ovar2;
7898 if (!maybe_lookup_field (ovar, ctx))
7899 continue;
7902 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7903 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7904 talign = DECL_ALIGN_UNIT (ovar);
7905 if (nc)
7907 var = lookup_decl_in_outer_ctx (ovar, ctx);
7908 x = build_sender_ref (ovar, ctx);
7910 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7911 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7912 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7913 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7915 gcc_assert (offloaded);
7916 tree avar
7917 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7918 mark_addressable (avar);
7919 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7920 talign = DECL_ALIGN_UNIT (avar);
7921 avar = build_fold_addr_expr (avar);
7922 gimplify_assign (x, avar, &ilist);
7924 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7926 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7927 if (!omp_is_reference (var))
7929 if (is_gimple_reg (var)
7930 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7931 TREE_NO_WARNING (var) = 1;
7932 var = build_fold_addr_expr (var);
7934 else
7935 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7936 gimplify_assign (x, var, &ilist);
7938 else if (is_gimple_reg (var))
7940 gcc_assert (offloaded);
7941 tree avar = create_tmp_var (TREE_TYPE (var));
7942 mark_addressable (avar);
7943 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7944 if (GOMP_MAP_COPY_TO_P (map_kind)
7945 || map_kind == GOMP_MAP_POINTER
7946 || map_kind == GOMP_MAP_TO_PSET
7947 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7949 /* If we need to initialize a temporary
7950 with VAR because it is not addressable, and
7951 the variable hasn't been initialized yet, then
7952 we'll get a warning for the store to avar.
7953 Don't warn in that case, the mapping might
7954 be implicit. */
7955 TREE_NO_WARNING (var) = 1;
7956 gimplify_assign (avar, var, &ilist);
7958 avar = build_fold_addr_expr (avar);
7959 gimplify_assign (x, avar, &ilist);
7960 if ((GOMP_MAP_COPY_FROM_P (map_kind)
7961 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7962 && !TYPE_READONLY (TREE_TYPE (var)))
7964 x = unshare_expr (x);
7965 x = build_simple_mem_ref (x);
7966 gimplify_assign (var, x, &olist);
7969 else
7971 var = build_fold_addr_expr (var);
7972 gimplify_assign (x, var, &ilist);
7975 s = NULL_TREE;
7976 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7978 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7979 s = TREE_TYPE (ovar);
7980 if (TREE_CODE (s) == REFERENCE_TYPE)
7981 s = TREE_TYPE (s);
7982 s = TYPE_SIZE_UNIT (s);
7984 else
7985 s = OMP_CLAUSE_SIZE (c);
7986 if (s == NULL_TREE)
7987 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
7988 s = fold_convert (size_type_node, s);
7989 purpose = size_int (map_idx++);
7990 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
7991 if (TREE_CODE (s) != INTEGER_CST)
7992 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
7994 unsigned HOST_WIDE_INT tkind, tkind_zero;
7995 switch (OMP_CLAUSE_CODE (c))
7997 case OMP_CLAUSE_MAP:
7998 tkind = OMP_CLAUSE_MAP_KIND (c);
7999 tkind_zero = tkind;
8000 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
8001 switch (tkind)
8003 case GOMP_MAP_ALLOC:
8004 case GOMP_MAP_TO:
8005 case GOMP_MAP_FROM:
8006 case GOMP_MAP_TOFROM:
8007 case GOMP_MAP_ALWAYS_TO:
8008 case GOMP_MAP_ALWAYS_FROM:
8009 case GOMP_MAP_ALWAYS_TOFROM:
8010 case GOMP_MAP_RELEASE:
8011 case GOMP_MAP_FORCE_TO:
8012 case GOMP_MAP_FORCE_FROM:
8013 case GOMP_MAP_FORCE_TOFROM:
8014 case GOMP_MAP_FORCE_PRESENT:
8015 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
8016 break;
8017 case GOMP_MAP_DELETE:
8018 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
8019 default:
8020 break;
8022 if (tkind_zero != tkind)
8024 if (integer_zerop (s))
8025 tkind = tkind_zero;
8026 else if (integer_nonzerop (s))
8027 tkind_zero = tkind;
8029 break;
8030 case OMP_CLAUSE_FIRSTPRIVATE:
8031 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
8032 tkind = GOMP_MAP_TO;
8033 tkind_zero = tkind;
8034 break;
8035 case OMP_CLAUSE_TO:
8036 tkind = GOMP_MAP_TO;
8037 tkind_zero = tkind;
8038 break;
8039 case OMP_CLAUSE_FROM:
8040 tkind = GOMP_MAP_FROM;
8041 tkind_zero = tkind;
8042 break;
8043 default:
8044 gcc_unreachable ();
8046 gcc_checking_assert (tkind
8047 < (HOST_WIDE_INT_C (1U) << talign_shift));
8048 gcc_checking_assert (tkind_zero
8049 < (HOST_WIDE_INT_C (1U) << talign_shift));
8050 talign = ceil_log2 (talign);
8051 tkind |= talign << talign_shift;
8052 tkind_zero |= talign << talign_shift;
8053 gcc_checking_assert (tkind
8054 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8055 gcc_checking_assert (tkind_zero
8056 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8057 if (tkind == tkind_zero)
8058 x = build_int_cstu (tkind_type, tkind);
8059 else
8061 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8062 x = build3 (COND_EXPR, tkind_type,
8063 fold_build2 (EQ_EXPR, boolean_type_node,
8064 unshare_expr (s), size_zero_node),
8065 build_int_cstu (tkind_type, tkind_zero),
8066 build_int_cstu (tkind_type, tkind));
8068 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8069 if (nc && nc != c)
8070 c = nc;
8071 break;
8073 case OMP_CLAUSE_FIRSTPRIVATE:
8074 if (is_oacc_parallel (ctx))
8075 goto oacc_firstprivate_map;
8076 ovar = OMP_CLAUSE_DECL (c);
8077 if (omp_is_reference (ovar))
8078 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8079 else
8080 talign = DECL_ALIGN_UNIT (ovar);
8081 var = lookup_decl_in_outer_ctx (ovar, ctx);
8082 x = build_sender_ref (ovar, ctx);
8083 tkind = GOMP_MAP_FIRSTPRIVATE;
8084 type = TREE_TYPE (ovar);
8085 if (omp_is_reference (ovar))
8086 type = TREE_TYPE (type);
8087 if ((INTEGRAL_TYPE_P (type)
8088 && TYPE_PRECISION (type) <= POINTER_SIZE)
8089 || TREE_CODE (type) == POINTER_TYPE)
8091 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8092 tree t = var;
8093 if (omp_is_reference (var))
8094 t = build_simple_mem_ref (var);
8095 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8096 TREE_NO_WARNING (var) = 1;
8097 if (TREE_CODE (type) != POINTER_TYPE)
8098 t = fold_convert (pointer_sized_int_node, t);
8099 t = fold_convert (TREE_TYPE (x), t);
8100 gimplify_assign (x, t, &ilist);
8102 else if (omp_is_reference (var))
8103 gimplify_assign (x, var, &ilist);
8104 else if (is_gimple_reg (var))
8106 tree avar = create_tmp_var (TREE_TYPE (var));
8107 mark_addressable (avar);
8108 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8109 TREE_NO_WARNING (var) = 1;
8110 gimplify_assign (avar, var, &ilist);
8111 avar = build_fold_addr_expr (avar);
8112 gimplify_assign (x, avar, &ilist);
8114 else
8116 var = build_fold_addr_expr (var);
8117 gimplify_assign (x, var, &ilist);
8119 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8120 s = size_int (0);
8121 else if (omp_is_reference (ovar))
8122 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8123 else
8124 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8125 s = fold_convert (size_type_node, s);
8126 purpose = size_int (map_idx++);
8127 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8128 if (TREE_CODE (s) != INTEGER_CST)
8129 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8131 gcc_checking_assert (tkind
8132 < (HOST_WIDE_INT_C (1U) << talign_shift));
8133 talign = ceil_log2 (talign);
8134 tkind |= talign << talign_shift;
8135 gcc_checking_assert (tkind
8136 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8137 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8138 build_int_cstu (tkind_type, tkind));
8139 break;
8141 case OMP_CLAUSE_USE_DEVICE_PTR:
8142 case OMP_CLAUSE_IS_DEVICE_PTR:
8143 ovar = OMP_CLAUSE_DECL (c);
8144 var = lookup_decl_in_outer_ctx (ovar, ctx);
8145 x = build_sender_ref (ovar, ctx);
8146 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8147 tkind = GOMP_MAP_USE_DEVICE_PTR;
8148 else
8149 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8150 type = TREE_TYPE (ovar);
8151 if (TREE_CODE (type) == ARRAY_TYPE)
8152 var = build_fold_addr_expr (var);
8153 else
8155 if (omp_is_reference (ovar))
8157 type = TREE_TYPE (type);
8158 if (TREE_CODE (type) != ARRAY_TYPE)
8159 var = build_simple_mem_ref (var);
8160 var = fold_convert (TREE_TYPE (x), var);
8163 gimplify_assign (x, var, &ilist);
8164 s = size_int (0);
8165 purpose = size_int (map_idx++);
8166 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8167 gcc_checking_assert (tkind
8168 < (HOST_WIDE_INT_C (1U) << talign_shift));
8169 gcc_checking_assert (tkind
8170 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8171 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8172 build_int_cstu (tkind_type, tkind));
8173 break;
8176 gcc_assert (map_idx == map_cnt);
8178 DECL_INITIAL (TREE_VEC_ELT (t, 1))
8179 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8180 DECL_INITIAL (TREE_VEC_ELT (t, 2))
8181 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8182 for (int i = 1; i <= 2; i++)
8183 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8185 gimple_seq initlist = NULL;
8186 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8187 TREE_VEC_ELT (t, i)),
8188 &initlist, true, NULL_TREE);
8189 gimple_seq_add_seq (&ilist, initlist);
8191 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8192 NULL);
8193 TREE_THIS_VOLATILE (clobber) = 1;
8194 gimple_seq_add_stmt (&olist,
8195 gimple_build_assign (TREE_VEC_ELT (t, i),
8196 clobber));
8199 tree clobber = build_constructor (ctx->record_type, NULL);
8200 TREE_THIS_VOLATILE (clobber) = 1;
8201 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8202 clobber));
8205 /* Once all the expansions are done, sequence all the different
8206 fragments inside gimple_omp_body. */
8208 new_body = NULL;
8210 if (offloaded
8211 && ctx->record_type)
8213 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8214 /* fixup_child_record_type might have changed receiver_decl's type. */
8215 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8216 gimple_seq_add_stmt (&new_body,
8217 gimple_build_assign (ctx->receiver_decl, t));
8219 gimple_seq_add_seq (&new_body, fplist);
8221 if (offloaded || data_region)
8223 tree prev = NULL_TREE;
8224 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8225 switch (OMP_CLAUSE_CODE (c))
8227 tree var, x;
8228 default:
8229 break;
8230 case OMP_CLAUSE_FIRSTPRIVATE:
8231 if (is_gimple_omp_oacc (ctx->stmt))
8232 break;
8233 var = OMP_CLAUSE_DECL (c);
8234 if (omp_is_reference (var)
8235 || is_gimple_reg_type (TREE_TYPE (var)))
8237 tree new_var = lookup_decl (var, ctx);
8238 tree type;
8239 type = TREE_TYPE (var);
8240 if (omp_is_reference (var))
8241 type = TREE_TYPE (type);
8242 if ((INTEGRAL_TYPE_P (type)
8243 && TYPE_PRECISION (type) <= POINTER_SIZE)
8244 || TREE_CODE (type) == POINTER_TYPE)
8246 x = build_receiver_ref (var, false, ctx);
8247 if (TREE_CODE (type) != POINTER_TYPE)
8248 x = fold_convert (pointer_sized_int_node, x);
8249 x = fold_convert (type, x);
8250 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8251 fb_rvalue);
8252 if (omp_is_reference (var))
8254 tree v = create_tmp_var_raw (type, get_name (var));
8255 gimple_add_tmp_var (v);
8256 TREE_ADDRESSABLE (v) = 1;
8257 gimple_seq_add_stmt (&new_body,
8258 gimple_build_assign (v, x));
8259 x = build_fold_addr_expr (v);
8261 gimple_seq_add_stmt (&new_body,
8262 gimple_build_assign (new_var, x));
8264 else
8266 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8267 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8268 fb_rvalue);
8269 gimple_seq_add_stmt (&new_body,
8270 gimple_build_assign (new_var, x));
8273 else if (is_variable_sized (var))
8275 tree pvar = DECL_VALUE_EXPR (var);
8276 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8277 pvar = TREE_OPERAND (pvar, 0);
8278 gcc_assert (DECL_P (pvar));
8279 tree new_var = lookup_decl (pvar, ctx);
8280 x = build_receiver_ref (var, false, ctx);
8281 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8282 gimple_seq_add_stmt (&new_body,
8283 gimple_build_assign (new_var, x));
8285 break;
8286 case OMP_CLAUSE_PRIVATE:
8287 if (is_gimple_omp_oacc (ctx->stmt))
8288 break;
8289 var = OMP_CLAUSE_DECL (c);
8290 if (omp_is_reference (var))
8292 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8293 tree new_var = lookup_decl (var, ctx);
8294 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8295 if (TREE_CONSTANT (x))
8297 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8298 get_name (var));
8299 gimple_add_tmp_var (x);
8300 TREE_ADDRESSABLE (x) = 1;
8301 x = build_fold_addr_expr_loc (clause_loc, x);
8303 else
8304 break;
8306 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8307 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8308 gimple_seq_add_stmt (&new_body,
8309 gimple_build_assign (new_var, x));
8311 break;
8312 case OMP_CLAUSE_USE_DEVICE_PTR:
8313 case OMP_CLAUSE_IS_DEVICE_PTR:
8314 var = OMP_CLAUSE_DECL (c);
8315 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8316 x = build_sender_ref (var, ctx);
8317 else
8318 x = build_receiver_ref (var, false, ctx);
8319 if (is_variable_sized (var))
8321 tree pvar = DECL_VALUE_EXPR (var);
8322 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8323 pvar = TREE_OPERAND (pvar, 0);
8324 gcc_assert (DECL_P (pvar));
8325 tree new_var = lookup_decl (pvar, ctx);
8326 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8327 gimple_seq_add_stmt (&new_body,
8328 gimple_build_assign (new_var, x));
8330 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8332 tree new_var = lookup_decl (var, ctx);
8333 new_var = DECL_VALUE_EXPR (new_var);
8334 gcc_assert (TREE_CODE (new_var) == MEM_REF);
8335 new_var = TREE_OPERAND (new_var, 0);
8336 gcc_assert (DECL_P (new_var));
8337 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8338 gimple_seq_add_stmt (&new_body,
8339 gimple_build_assign (new_var, x));
8341 else
8343 tree type = TREE_TYPE (var);
8344 tree new_var = lookup_decl (var, ctx);
8345 if (omp_is_reference (var))
8347 type = TREE_TYPE (type);
8348 if (TREE_CODE (type) != ARRAY_TYPE)
8350 tree v = create_tmp_var_raw (type, get_name (var));
8351 gimple_add_tmp_var (v);
8352 TREE_ADDRESSABLE (v) = 1;
8353 x = fold_convert (type, x);
8354 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8355 fb_rvalue);
8356 gimple_seq_add_stmt (&new_body,
8357 gimple_build_assign (v, x));
8358 x = build_fold_addr_expr (v);
8361 new_var = DECL_VALUE_EXPR (new_var);
8362 x = fold_convert (TREE_TYPE (new_var), x);
8363 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8364 gimple_seq_add_stmt (&new_body,
8365 gimple_build_assign (new_var, x));
8367 break;
8369 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8370 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8371 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8372 or references to VLAs. */
8373 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8374 switch (OMP_CLAUSE_CODE (c))
8376 tree var;
8377 default:
8378 break;
8379 case OMP_CLAUSE_MAP:
8380 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8381 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8383 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8384 HOST_WIDE_INT offset = 0;
8385 gcc_assert (prev);
8386 var = OMP_CLAUSE_DECL (c);
8387 if (DECL_P (var)
8388 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8389 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8390 ctx))
8391 && varpool_node::get_create (var)->offloadable)
8392 break;
8393 if (TREE_CODE (var) == INDIRECT_REF
8394 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8395 var = TREE_OPERAND (var, 0);
8396 if (TREE_CODE (var) == COMPONENT_REF)
8398 var = get_addr_base_and_unit_offset (var, &offset);
8399 gcc_assert (var != NULL_TREE && DECL_P (var));
8401 else if (DECL_SIZE (var)
8402 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8404 tree var2 = DECL_VALUE_EXPR (var);
8405 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8406 var2 = TREE_OPERAND (var2, 0);
8407 gcc_assert (DECL_P (var2));
8408 var = var2;
8410 tree new_var = lookup_decl (var, ctx), x;
8411 tree type = TREE_TYPE (new_var);
8412 bool is_ref;
8413 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8414 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8415 == COMPONENT_REF))
8417 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8418 is_ref = true;
8419 new_var = build2 (MEM_REF, type,
8420 build_fold_addr_expr (new_var),
8421 build_int_cst (build_pointer_type (type),
8422 offset));
8424 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8426 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8427 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8428 new_var = build2 (MEM_REF, type,
8429 build_fold_addr_expr (new_var),
8430 build_int_cst (build_pointer_type (type),
8431 offset));
8433 else
8434 is_ref = omp_is_reference (var);
8435 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8436 is_ref = false;
8437 bool ref_to_array = false;
8438 if (is_ref)
8440 type = TREE_TYPE (type);
8441 if (TREE_CODE (type) == ARRAY_TYPE)
8443 type = build_pointer_type (type);
8444 ref_to_array = true;
8447 else if (TREE_CODE (type) == ARRAY_TYPE)
8449 tree decl2 = DECL_VALUE_EXPR (new_var);
8450 gcc_assert (TREE_CODE (decl2) == MEM_REF);
8451 decl2 = TREE_OPERAND (decl2, 0);
8452 gcc_assert (DECL_P (decl2));
8453 new_var = decl2;
8454 type = TREE_TYPE (new_var);
8456 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8457 x = fold_convert_loc (clause_loc, type, x);
8458 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8460 tree bias = OMP_CLAUSE_SIZE (c);
8461 if (DECL_P (bias))
8462 bias = lookup_decl (bias, ctx);
8463 bias = fold_convert_loc (clause_loc, sizetype, bias);
8464 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8465 bias);
8466 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8467 TREE_TYPE (x), x, bias);
8469 if (ref_to_array)
8470 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8471 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8472 if (is_ref && !ref_to_array)
8474 tree t = create_tmp_var_raw (type, get_name (var));
8475 gimple_add_tmp_var (t);
8476 TREE_ADDRESSABLE (t) = 1;
8477 gimple_seq_add_stmt (&new_body,
8478 gimple_build_assign (t, x));
8479 x = build_fold_addr_expr_loc (clause_loc, t);
8481 gimple_seq_add_stmt (&new_body,
8482 gimple_build_assign (new_var, x));
8483 prev = NULL_TREE;
8485 else if (OMP_CLAUSE_CHAIN (c)
8486 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8487 == OMP_CLAUSE_MAP
8488 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8489 == GOMP_MAP_FIRSTPRIVATE_POINTER
8490 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8491 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8492 prev = c;
8493 break;
8494 case OMP_CLAUSE_PRIVATE:
8495 var = OMP_CLAUSE_DECL (c);
8496 if (is_variable_sized (var))
8498 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8499 tree new_var = lookup_decl (var, ctx);
8500 tree pvar = DECL_VALUE_EXPR (var);
8501 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8502 pvar = TREE_OPERAND (pvar, 0);
8503 gcc_assert (DECL_P (pvar));
8504 tree new_pvar = lookup_decl (pvar, ctx);
8505 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8506 tree al = size_int (DECL_ALIGN (var));
8507 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8508 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8509 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8510 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8511 gimple_seq_add_stmt (&new_body,
8512 gimple_build_assign (new_pvar, x));
8514 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8516 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8517 tree new_var = lookup_decl (var, ctx);
8518 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8519 if (TREE_CONSTANT (x))
8520 break;
8521 else
8523 tree atmp
8524 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8525 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8526 tree al = size_int (TYPE_ALIGN (rtype));
8527 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8530 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8531 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8532 gimple_seq_add_stmt (&new_body,
8533 gimple_build_assign (new_var, x));
8535 break;
8538 gimple_seq fork_seq = NULL;
8539 gimple_seq join_seq = NULL;
8541 if (is_oacc_parallel (ctx))
8543 /* If there are reductions on the offloaded region itself, treat
8544 them as a dummy GANG loop. */
8545 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8547 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8548 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8551 gimple_seq_add_seq (&new_body, fork_seq);
8552 gimple_seq_add_seq (&new_body, tgt_body);
8553 gimple_seq_add_seq (&new_body, join_seq);
8555 if (offloaded)
8556 new_body = maybe_catch_exception (new_body);
8558 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8559 gimple_omp_set_body (stmt, new_body);
8562 bind = gimple_build_bind (NULL, NULL,
8563 tgt_bind ? gimple_bind_block (tgt_bind)
8564 : NULL_TREE);
8565 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8566 gimple_bind_add_seq (bind, ilist);
8567 gimple_bind_add_stmt (bind, stmt);
8568 gimple_bind_add_seq (bind, olist);
8570 pop_gimplify_context (NULL);
8572 if (dep_bind)
8574 gimple_bind_add_seq (dep_bind, dep_ilist);
8575 gimple_bind_add_stmt (dep_bind, bind);
8576 gimple_bind_add_seq (dep_bind, dep_olist);
8577 pop_gimplify_context (dep_bind);
8581 /* Expand code for an OpenMP teams directive. */
8583 static void
8584 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8586 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8587 push_gimplify_context ();
8589 tree block = make_node (BLOCK);
8590 gbind *bind = gimple_build_bind (NULL, NULL, block);
8591 gsi_replace (gsi_p, bind, true);
8592 gimple_seq bind_body = NULL;
8593 gimple_seq dlist = NULL;
8594 gimple_seq olist = NULL;
8596 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8597 OMP_CLAUSE_NUM_TEAMS);
8598 if (num_teams == NULL_TREE)
8599 num_teams = build_int_cst (unsigned_type_node, 0);
8600 else
8602 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8603 num_teams = fold_convert (unsigned_type_node, num_teams);
8604 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8606 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8607 OMP_CLAUSE_THREAD_LIMIT);
8608 if (thread_limit == NULL_TREE)
8609 thread_limit = build_int_cst (unsigned_type_node, 0);
8610 else
8612 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8613 thread_limit = fold_convert (unsigned_type_node, thread_limit);
8614 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8615 fb_rvalue);
8618 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8619 &bind_body, &dlist, ctx, NULL);
8620 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8621 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8622 if (!gimple_omp_teams_grid_phony (teams_stmt))
8624 gimple_seq_add_stmt (&bind_body, teams_stmt);
8625 location_t loc = gimple_location (teams_stmt);
8626 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8627 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8628 gimple_set_location (call, loc);
8629 gimple_seq_add_stmt (&bind_body, call);
8632 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8633 gimple_omp_set_body (teams_stmt, NULL);
8634 gimple_seq_add_seq (&bind_body, olist);
8635 gimple_seq_add_seq (&bind_body, dlist);
8636 if (!gimple_omp_teams_grid_phony (teams_stmt))
8637 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8638 gimple_bind_set_body (bind, bind_body);
8640 pop_gimplify_context (bind);
8642 gimple_bind_append_vars (bind, ctx->block_vars);
8643 BLOCK_VARS (block) = ctx->block_vars;
8644 if (BLOCK_VARS (block))
8645 TREE_USED (block) = 1;
8648 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8650 static void
8651 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8653 gimple *stmt = gsi_stmt (*gsi_p);
8654 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8655 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8656 gimple_build_omp_return (false));
8660 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8661 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8662 of OMP context, but with task_shared_vars set. */
8664 static tree
8665 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8666 void *data)
8668 tree t = *tp;
8670 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8671 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8672 return t;
8674 if (task_shared_vars
8675 && DECL_P (t)
8676 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8677 return t;
8679 /* If a global variable has been privatized, TREE_CONSTANT on
8680 ADDR_EXPR might be wrong. */
8681 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8682 recompute_tree_invariant_for_addr_expr (t);
8684 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8685 return NULL_TREE;
8688 /* Data to be communicated between lower_omp_regimplify_operands and
8689 lower_omp_regimplify_operands_p. */
8691 struct lower_omp_regimplify_operands_data
8693 omp_context *ctx;
8694 vec<tree> *decls;
8697 /* Helper function for lower_omp_regimplify_operands. Find
8698 omp_member_access_dummy_var vars and adjust temporarily their
8699 DECL_VALUE_EXPRs if needed. */
8701 static tree
8702 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8703 void *data)
8705 tree t = omp_member_access_dummy_var (*tp);
8706 if (t)
8708 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8709 lower_omp_regimplify_operands_data *ldata
8710 = (lower_omp_regimplify_operands_data *) wi->info;
8711 tree o = maybe_lookup_decl (t, ldata->ctx);
8712 if (o != t)
8714 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8715 ldata->decls->safe_push (*tp);
8716 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8717 SET_DECL_VALUE_EXPR (*tp, v);
8720 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8721 return NULL_TREE;
8724 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8725 of omp_member_access_dummy_var vars during regimplification. */
8727 static void
8728 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8729 gimple_stmt_iterator *gsi_p)
8731 auto_vec<tree, 10> decls;
8732 if (ctx)
8734 struct walk_stmt_info wi;
8735 memset (&wi, '\0', sizeof (wi));
8736 struct lower_omp_regimplify_operands_data data;
8737 data.ctx = ctx;
8738 data.decls = &decls;
8739 wi.info = &data;
8740 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8742 gimple_regimplify_operands (stmt, gsi_p);
8743 while (!decls.is_empty ())
8745 tree t = decls.pop ();
8746 tree v = decls.pop ();
8747 SET_DECL_VALUE_EXPR (t, v);
8751 static void
8752 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8754 gimple *stmt = gsi_stmt (*gsi_p);
8755 struct walk_stmt_info wi;
8756 gcall *call_stmt;
8758 if (gimple_has_location (stmt))
8759 input_location = gimple_location (stmt);
8761 if (task_shared_vars)
8762 memset (&wi, '\0', sizeof (wi));
8764 /* If we have issued syntax errors, avoid doing any heavy lifting.
8765 Just replace the OMP directives with a NOP to avoid
8766 confusing RTL expansion. */
8767 if (seen_error () && is_gimple_omp (stmt))
8769 gsi_replace (gsi_p, gimple_build_nop (), true);
8770 return;
8773 switch (gimple_code (stmt))
8775 case GIMPLE_COND:
8777 gcond *cond_stmt = as_a <gcond *> (stmt);
8778 if ((ctx || task_shared_vars)
8779 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8780 lower_omp_regimplify_p,
8781 ctx ? NULL : &wi, NULL)
8782 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8783 lower_omp_regimplify_p,
8784 ctx ? NULL : &wi, NULL)))
8785 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8787 break;
8788 case GIMPLE_CATCH:
8789 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8790 break;
8791 case GIMPLE_EH_FILTER:
8792 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8793 break;
8794 case GIMPLE_TRY:
8795 lower_omp (gimple_try_eval_ptr (stmt), ctx);
8796 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8797 break;
8798 case GIMPLE_TRANSACTION:
8799 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8800 ctx);
8801 break;
8802 case GIMPLE_BIND:
8803 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8804 break;
8805 case GIMPLE_OMP_PARALLEL:
8806 case GIMPLE_OMP_TASK:
8807 ctx = maybe_lookup_ctx (stmt);
8808 gcc_assert (ctx);
8809 if (ctx->cancellable)
8810 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8811 lower_omp_taskreg (gsi_p, ctx);
8812 break;
8813 case GIMPLE_OMP_FOR:
8814 ctx = maybe_lookup_ctx (stmt);
8815 gcc_assert (ctx);
8816 if (ctx->cancellable)
8817 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8818 lower_omp_for (gsi_p, ctx);
8819 break;
8820 case GIMPLE_OMP_SECTIONS:
8821 ctx = maybe_lookup_ctx (stmt);
8822 gcc_assert (ctx);
8823 if (ctx->cancellable)
8824 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8825 lower_omp_sections (gsi_p, ctx);
8826 break;
8827 case GIMPLE_OMP_SINGLE:
8828 ctx = maybe_lookup_ctx (stmt);
8829 gcc_assert (ctx);
8830 lower_omp_single (gsi_p, ctx);
8831 break;
8832 case GIMPLE_OMP_MASTER:
8833 ctx = maybe_lookup_ctx (stmt);
8834 gcc_assert (ctx);
8835 lower_omp_master (gsi_p, ctx);
8836 break;
8837 case GIMPLE_OMP_TASKGROUP:
8838 ctx = maybe_lookup_ctx (stmt);
8839 gcc_assert (ctx);
8840 lower_omp_taskgroup (gsi_p, ctx);
8841 break;
8842 case GIMPLE_OMP_ORDERED:
8843 ctx = maybe_lookup_ctx (stmt);
8844 gcc_assert (ctx);
8845 lower_omp_ordered (gsi_p, ctx);
8846 break;
8847 case GIMPLE_OMP_CRITICAL:
8848 ctx = maybe_lookup_ctx (stmt);
8849 gcc_assert (ctx);
8850 lower_omp_critical (gsi_p, ctx);
8851 break;
8852 case GIMPLE_OMP_ATOMIC_LOAD:
8853 if ((ctx || task_shared_vars)
8854 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8855 as_a <gomp_atomic_load *> (stmt)),
8856 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8857 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8858 break;
8859 case GIMPLE_OMP_TARGET:
8860 ctx = maybe_lookup_ctx (stmt);
8861 gcc_assert (ctx);
8862 lower_omp_target (gsi_p, ctx);
8863 break;
8864 case GIMPLE_OMP_TEAMS:
8865 ctx = maybe_lookup_ctx (stmt);
8866 gcc_assert (ctx);
8867 lower_omp_teams (gsi_p, ctx);
8868 break;
8869 case GIMPLE_OMP_GRID_BODY:
8870 ctx = maybe_lookup_ctx (stmt);
8871 gcc_assert (ctx);
8872 lower_omp_grid_body (gsi_p, ctx);
8873 break;
8874 case GIMPLE_CALL:
8875 tree fndecl;
8876 call_stmt = as_a <gcall *> (stmt);
8877 fndecl = gimple_call_fndecl (call_stmt);
8878 if (fndecl
8879 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8880 switch (DECL_FUNCTION_CODE (fndecl))
8882 case BUILT_IN_GOMP_BARRIER:
8883 if (ctx == NULL)
8884 break;
8885 /* FALLTHRU */
8886 case BUILT_IN_GOMP_CANCEL:
8887 case BUILT_IN_GOMP_CANCELLATION_POINT:
8888 omp_context *cctx;
8889 cctx = ctx;
8890 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8891 cctx = cctx->outer;
8892 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8893 if (!cctx->cancellable)
8895 if (DECL_FUNCTION_CODE (fndecl)
8896 == BUILT_IN_GOMP_CANCELLATION_POINT)
8898 stmt = gimple_build_nop ();
8899 gsi_replace (gsi_p, stmt, false);
8901 break;
8903 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8905 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8906 gimple_call_set_fndecl (call_stmt, fndecl);
8907 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8909 tree lhs;
8910 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8911 gimple_call_set_lhs (call_stmt, lhs);
8912 tree fallthru_label;
8913 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8914 gimple *g;
8915 g = gimple_build_label (fallthru_label);
8916 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8917 g = gimple_build_cond (NE_EXPR, lhs,
8918 fold_convert (TREE_TYPE (lhs),
8919 boolean_false_node),
8920 cctx->cancel_label, fallthru_label);
8921 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8922 break;
8923 default:
8924 break;
8926 /* FALLTHRU */
8927 default:
8928 if ((ctx || task_shared_vars)
8929 && walk_gimple_op (stmt, lower_omp_regimplify_p,
8930 ctx ? NULL : &wi))
8932 /* Just remove clobbers, this should happen only if we have
8933 "privatized" local addressable variables in SIMD regions,
8934 the clobber isn't needed in that case and gimplifying address
8935 of the ARRAY_REF into a pointer and creating MEM_REF based
8936 clobber would create worse code than we get with the clobber
8937 dropped. */
8938 if (gimple_clobber_p (stmt))
8940 gsi_replace (gsi_p, gimple_build_nop (), true);
8941 break;
8943 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8945 break;
8949 static void
8950 lower_omp (gimple_seq *body, omp_context *ctx)
8952 location_t saved_location = input_location;
8953 gimple_stmt_iterator gsi;
8954 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8955 lower_omp_1 (&gsi, ctx);
8956 /* During gimplification, we haven't folded statments inside offloading
8957 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8958 if (target_nesting_level || taskreg_nesting_level)
8959 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8960 fold_stmt (&gsi);
8961 input_location = saved_location;
8964 /* Main entry point. */
8966 static unsigned int
8967 execute_lower_omp (void)
8969 gimple_seq body;
8970 int i;
8971 omp_context *ctx;
8973 /* This pass always runs, to provide PROP_gimple_lomp.
8974 But often, there is nothing to do. */
8975 if (flag_cilkplus == 0 && flag_openacc == 0 && flag_openmp == 0
8976 && flag_openmp_simd == 0)
8977 return 0;
8979 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
8980 delete_omp_context);
8982 body = gimple_body (current_function_decl);
8984 if (hsa_gen_requested_p ())
8985 omp_grid_gridify_all_targets (&body);
8987 scan_omp (&body, NULL);
8988 gcc_assert (taskreg_nesting_level == 0);
8989 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
8990 finish_taskreg_scan (ctx);
8991 taskreg_contexts.release ();
8993 if (all_contexts->root)
8995 if (task_shared_vars)
8996 push_gimplify_context ();
8997 lower_omp (&body, NULL);
8998 if (task_shared_vars)
8999 pop_gimplify_context (NULL);
9002 if (all_contexts)
9004 splay_tree_delete (all_contexts);
9005 all_contexts = NULL;
9007 BITMAP_FREE (task_shared_vars);
9008 return 0;
9011 namespace {
9013 const pass_data pass_data_lower_omp =
9015 GIMPLE_PASS, /* type */
9016 "omplower", /* name */
9017 OPTGROUP_OMP, /* optinfo_flags */
9018 TV_NONE, /* tv_id */
9019 PROP_gimple_any, /* properties_required */
9020 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
9021 0, /* properties_destroyed */
9022 0, /* todo_flags_start */
9023 0, /* todo_flags_finish */
9026 class pass_lower_omp : public gimple_opt_pass
9028 public:
9029 pass_lower_omp (gcc::context *ctxt)
9030 : gimple_opt_pass (pass_data_lower_omp, ctxt)
9033 /* opt_pass methods: */
9034 virtual unsigned int execute (function *) { return execute_lower_omp (); }
9036 }; // class pass_lower_omp
9038 } // anon namespace
9040 gimple_opt_pass *
9041 make_pass_lower_omp (gcc::context *ctxt)
9043 return new pass_lower_omp (ctxt);
9046 /* The following is a utility to diagnose structured block violations.
9047 It is not part of the "omplower" pass, as that's invoked too late. It
9048 should be invoked by the respective front ends after gimplification. */
9050 static splay_tree all_labels;
9052 /* Check for mismatched contexts and generate an error if needed. Return
9053 true if an error is detected. */
9055 static bool
9056 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9057 gimple *branch_ctx, gimple *label_ctx)
9059 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9060 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9062 if (label_ctx == branch_ctx)
9063 return false;
9065 const char* kind = NULL;
9067 if (flag_cilkplus)
9069 if ((branch_ctx
9070 && gimple_code (branch_ctx) == GIMPLE_OMP_FOR
9071 && gimple_omp_for_kind (branch_ctx) == GF_OMP_FOR_KIND_CILKSIMD)
9072 || (label_ctx
9073 && gimple_code (label_ctx) == GIMPLE_OMP_FOR
9074 && gimple_omp_for_kind (label_ctx) == GF_OMP_FOR_KIND_CILKSIMD))
9075 kind = "Cilk Plus";
9077 if (flag_openacc)
9079 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9080 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9082 gcc_checking_assert (kind == NULL);
9083 kind = "OpenACC";
9086 if (kind == NULL)
9088 gcc_checking_assert (flag_openmp || flag_openmp_simd);
9089 kind = "OpenMP";
9092 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9093 so we could traverse it and issue a correct "exit" or "enter" error
9094 message upon a structured block violation.
9096 We built the context by building a list with tree_cons'ing, but there is
9097 no easy counterpart in gimple tuples. It seems like far too much work
9098 for issuing exit/enter error messages. If someone really misses the
9099 distinct error message... patches welcome. */
9101 #if 0
9102 /* Try to avoid confusing the user by producing and error message
9103 with correct "exit" or "enter" verbiage. We prefer "exit"
9104 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9105 if (branch_ctx == NULL)
9106 exit_p = false;
9107 else
9109 while (label_ctx)
9111 if (TREE_VALUE (label_ctx) == branch_ctx)
9113 exit_p = false;
9114 break;
9116 label_ctx = TREE_CHAIN (label_ctx);
9120 if (exit_p)
9121 error ("invalid exit from %s structured block", kind);
9122 else
9123 error ("invalid entry to %s structured block", kind);
9124 #endif
9126 /* If it's obvious we have an invalid entry, be specific about the error. */
9127 if (branch_ctx == NULL)
9128 error ("invalid entry to %s structured block", kind);
9129 else
9131 /* Otherwise, be vague and lazy, but efficient. */
9132 error ("invalid branch to/from %s structured block", kind);
9135 gsi_replace (gsi_p, gimple_build_nop (), false);
9136 return true;
9139 /* Pass 1: Create a minimal tree of structured blocks, and record
9140 where each label is found. */
9142 static tree
9143 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9144 struct walk_stmt_info *wi)
9146 gimple *context = (gimple *) wi->info;
9147 gimple *inner_context;
9148 gimple *stmt = gsi_stmt (*gsi_p);
9150 *handled_ops_p = true;
9152 switch (gimple_code (stmt))
9154 WALK_SUBSTMTS;
9156 case GIMPLE_OMP_PARALLEL:
9157 case GIMPLE_OMP_TASK:
9158 case GIMPLE_OMP_SECTIONS:
9159 case GIMPLE_OMP_SINGLE:
9160 case GIMPLE_OMP_SECTION:
9161 case GIMPLE_OMP_MASTER:
9162 case GIMPLE_OMP_ORDERED:
9163 case GIMPLE_OMP_CRITICAL:
9164 case GIMPLE_OMP_TARGET:
9165 case GIMPLE_OMP_TEAMS:
9166 case GIMPLE_OMP_TASKGROUP:
9167 /* The minimal context here is just the current OMP construct. */
9168 inner_context = stmt;
9169 wi->info = inner_context;
9170 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9171 wi->info = context;
9172 break;
9174 case GIMPLE_OMP_FOR:
9175 inner_context = stmt;
9176 wi->info = inner_context;
9177 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9178 walk them. */
9179 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9180 diagnose_sb_1, NULL, wi);
9181 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9182 wi->info = context;
9183 break;
9185 case GIMPLE_LABEL:
9186 splay_tree_insert (all_labels,
9187 (splay_tree_key) gimple_label_label (
9188 as_a <glabel *> (stmt)),
9189 (splay_tree_value) context);
9190 break;
9192 default:
9193 break;
9196 return NULL_TREE;
9199 /* Pass 2: Check each branch and see if its context differs from that of
9200 the destination label's context. */
9202 static tree
9203 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9204 struct walk_stmt_info *wi)
9206 gimple *context = (gimple *) wi->info;
9207 splay_tree_node n;
9208 gimple *stmt = gsi_stmt (*gsi_p);
9210 *handled_ops_p = true;
9212 switch (gimple_code (stmt))
9214 WALK_SUBSTMTS;
9216 case GIMPLE_OMP_PARALLEL:
9217 case GIMPLE_OMP_TASK:
9218 case GIMPLE_OMP_SECTIONS:
9219 case GIMPLE_OMP_SINGLE:
9220 case GIMPLE_OMP_SECTION:
9221 case GIMPLE_OMP_MASTER:
9222 case GIMPLE_OMP_ORDERED:
9223 case GIMPLE_OMP_CRITICAL:
9224 case GIMPLE_OMP_TARGET:
9225 case GIMPLE_OMP_TEAMS:
9226 case GIMPLE_OMP_TASKGROUP:
9227 wi->info = stmt;
9228 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9229 wi->info = context;
9230 break;
9232 case GIMPLE_OMP_FOR:
9233 wi->info = stmt;
9234 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9235 walk them. */
9236 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9237 diagnose_sb_2, NULL, wi);
9238 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9239 wi->info = context;
9240 break;
9242 case GIMPLE_COND:
9244 gcond *cond_stmt = as_a <gcond *> (stmt);
9245 tree lab = gimple_cond_true_label (cond_stmt);
9246 if (lab)
9248 n = splay_tree_lookup (all_labels,
9249 (splay_tree_key) lab);
9250 diagnose_sb_0 (gsi_p, context,
9251 n ? (gimple *) n->value : NULL);
9253 lab = gimple_cond_false_label (cond_stmt);
9254 if (lab)
9256 n = splay_tree_lookup (all_labels,
9257 (splay_tree_key) lab);
9258 diagnose_sb_0 (gsi_p, context,
9259 n ? (gimple *) n->value : NULL);
9262 break;
9264 case GIMPLE_GOTO:
9266 tree lab = gimple_goto_dest (stmt);
9267 if (TREE_CODE (lab) != LABEL_DECL)
9268 break;
9270 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9271 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9273 break;
9275 case GIMPLE_SWITCH:
9277 gswitch *switch_stmt = as_a <gswitch *> (stmt);
9278 unsigned int i;
9279 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9281 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9282 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9283 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9284 break;
9287 break;
9289 case GIMPLE_RETURN:
9290 diagnose_sb_0 (gsi_p, context, NULL);
9291 break;
9293 default:
9294 break;
9297 return NULL_TREE;
9300 static unsigned int
9301 diagnose_omp_structured_block_errors (void)
9303 struct walk_stmt_info wi;
9304 gimple_seq body = gimple_body (current_function_decl);
9306 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9308 memset (&wi, 0, sizeof (wi));
9309 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9311 memset (&wi, 0, sizeof (wi));
9312 wi.want_locations = true;
9313 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9315 gimple_set_body (current_function_decl, body);
9317 splay_tree_delete (all_labels);
9318 all_labels = NULL;
9320 return 0;
9323 namespace {
9325 const pass_data pass_data_diagnose_omp_blocks =
9327 GIMPLE_PASS, /* type */
9328 "*diagnose_omp_blocks", /* name */
9329 OPTGROUP_OMP, /* optinfo_flags */
9330 TV_NONE, /* tv_id */
9331 PROP_gimple_any, /* properties_required */
9332 0, /* properties_provided */
9333 0, /* properties_destroyed */
9334 0, /* todo_flags_start */
9335 0, /* todo_flags_finish */
9338 class pass_diagnose_omp_blocks : public gimple_opt_pass
9340 public:
9341 pass_diagnose_omp_blocks (gcc::context *ctxt)
9342 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9345 /* opt_pass methods: */
9346 virtual bool gate (function *)
9348 return flag_cilkplus || flag_openacc || flag_openmp || flag_openmp_simd;
9350 virtual unsigned int execute (function *)
9352 return diagnose_omp_structured_block_errors ();
9355 }; // class pass_diagnose_omp_blocks
9357 } // anon namespace
9359 gimple_opt_pass *
9360 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9362 return new pass_diagnose_omp_blocks (ctxt);
9366 #include "gt-omp-low.h"