* gcc-interface/trans.c (Subprogram_Body_to_gnu): Initialize locus.
[official-gcc.git] / gcc / omp-low.c
blob3d566aaef12bf3137ce4e4c79ba9da5d18240329
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2017 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
78 struct omp_context
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
117 /* Nesting depth of this context. Used to beautify error messages re
118 invalid gotos. The outermost ctx is depth 1, with depth 0 being
119 reserved for the main body of the function. */
120 int depth;
122 /* True if this parallel directive is nested within another. */
123 bool is_nested;
125 /* True if this construct can be cancelled. */
126 bool cancellable;
129 static splay_tree all_contexts;
130 static int taskreg_nesting_level;
131 static int target_nesting_level;
132 static bitmap task_shared_vars;
133 static vec<omp_context *> taskreg_contexts;
135 static void scan_omp (gimple_seq *, omp_context *);
136 static tree scan_omp_1_op (tree *, int *, void *);
138 #define WALK_SUBSTMTS \
139 case GIMPLE_BIND: \
140 case GIMPLE_TRY: \
141 case GIMPLE_CATCH: \
142 case GIMPLE_EH_FILTER: \
143 case GIMPLE_TRANSACTION: \
144 /* The sub-statements for these should be walked. */ \
145 *handled_ops_p = false; \
146 break;
148 /* Return true if CTX corresponds to an oacc parallel region. */
150 static bool
151 is_oacc_parallel (omp_context *ctx)
153 enum gimple_code outer_type = gimple_code (ctx->stmt);
154 return ((outer_type == GIMPLE_OMP_TARGET)
155 && (gimple_omp_target_kind (ctx->stmt)
156 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
159 /* Return true if CTX corresponds to an oacc kernels region. */
161 static bool
162 is_oacc_kernels (omp_context *ctx)
164 enum gimple_code outer_type = gimple_code (ctx->stmt);
165 return ((outer_type == GIMPLE_OMP_TARGET)
166 && (gimple_omp_target_kind (ctx->stmt)
167 == GF_OMP_TARGET_KIND_OACC_KERNELS));
170 /* If DECL is the artificial dummy VAR_DECL created for non-static
171 data member privatization, return the underlying "this" parameter,
172 otherwise return NULL. */
174 tree
175 omp_member_access_dummy_var (tree decl)
177 if (!VAR_P (decl)
178 || !DECL_ARTIFICIAL (decl)
179 || !DECL_IGNORED_P (decl)
180 || !DECL_HAS_VALUE_EXPR_P (decl)
181 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
182 return NULL_TREE;
184 tree v = DECL_VALUE_EXPR (decl);
185 if (TREE_CODE (v) != COMPONENT_REF)
186 return NULL_TREE;
188 while (1)
189 switch (TREE_CODE (v))
191 case COMPONENT_REF:
192 case MEM_REF:
193 case INDIRECT_REF:
194 CASE_CONVERT:
195 case POINTER_PLUS_EXPR:
196 v = TREE_OPERAND (v, 0);
197 continue;
198 case PARM_DECL:
199 if (DECL_CONTEXT (v) == current_function_decl
200 && DECL_ARTIFICIAL (v)
201 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
202 return v;
203 return NULL_TREE;
204 default:
205 return NULL_TREE;
209 /* Helper for unshare_and_remap, called through walk_tree. */
211 static tree
212 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
214 tree *pair = (tree *) data;
215 if (*tp == pair[0])
217 *tp = unshare_expr (pair[1]);
218 *walk_subtrees = 0;
220 else if (IS_TYPE_OR_DECL_P (*tp))
221 *walk_subtrees = 0;
222 return NULL_TREE;
225 /* Return unshare_expr (X) with all occurrences of FROM
226 replaced with TO. */
228 static tree
229 unshare_and_remap (tree x, tree from, tree to)
231 tree pair[2] = { from, to };
232 x = unshare_expr (x);
233 walk_tree (&x, unshare_and_remap_1, pair, NULL);
234 return x;
237 /* Convenience function for calling scan_omp_1_op on tree operands. */
239 static inline tree
240 scan_omp_op (tree *tp, omp_context *ctx)
242 struct walk_stmt_info wi;
244 memset (&wi, 0, sizeof (wi));
245 wi.info = ctx;
246 wi.want_locations = true;
248 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
251 static void lower_omp (gimple_seq *, omp_context *);
252 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
253 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
255 /* Return true if CTX is for an omp parallel. */
257 static inline bool
258 is_parallel_ctx (omp_context *ctx)
260 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
264 /* Return true if CTX is for an omp task. */
266 static inline bool
267 is_task_ctx (omp_context *ctx)
269 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
273 /* Return true if CTX is for an omp taskloop. */
275 static inline bool
276 is_taskloop_ctx (omp_context *ctx)
278 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
279 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
283 /* Return true if CTX is for an omp parallel or omp task. */
285 static inline bool
286 is_taskreg_ctx (omp_context *ctx)
288 return is_parallel_ctx (ctx) || is_task_ctx (ctx);
291 /* Return true if EXPR is variable sized. */
293 static inline bool
294 is_variable_sized (const_tree expr)
296 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
299 /* Lookup variables. The "maybe" form
300 allows for the variable form to not have been entered, otherwise we
301 assert that the variable must have been entered. */
303 static inline tree
304 lookup_decl (tree var, omp_context *ctx)
306 tree *n = ctx->cb.decl_map->get (var);
307 return *n;
310 static inline tree
311 maybe_lookup_decl (const_tree var, omp_context *ctx)
313 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
314 return n ? *n : NULL_TREE;
317 static inline tree
318 lookup_field (tree var, omp_context *ctx)
320 splay_tree_node n;
321 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
322 return (tree) n->value;
325 static inline tree
326 lookup_sfield (splay_tree_key key, omp_context *ctx)
328 splay_tree_node n;
329 n = splay_tree_lookup (ctx->sfield_map
330 ? ctx->sfield_map : ctx->field_map, key);
331 return (tree) n->value;
334 static inline tree
335 lookup_sfield (tree var, omp_context *ctx)
337 return lookup_sfield ((splay_tree_key) var, ctx);
340 static inline tree
341 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
343 splay_tree_node n;
344 n = splay_tree_lookup (ctx->field_map, key);
345 return n ? (tree) n->value : NULL_TREE;
348 static inline tree
349 maybe_lookup_field (tree var, omp_context *ctx)
351 return maybe_lookup_field ((splay_tree_key) var, ctx);
354 /* Return true if DECL should be copied by pointer. SHARED_CTX is
355 the parallel context if DECL is to be shared. */
357 static bool
358 use_pointer_for_field (tree decl, omp_context *shared_ctx)
360 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
361 || TYPE_ATOMIC (TREE_TYPE (decl)))
362 return true;
364 /* We can only use copy-in/copy-out semantics for shared variables
365 when we know the value is not accessible from an outer scope. */
366 if (shared_ctx)
368 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
370 /* ??? Trivially accessible from anywhere. But why would we even
371 be passing an address in this case? Should we simply assert
372 this to be false, or should we have a cleanup pass that removes
373 these from the list of mappings? */
374 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
375 return true;
377 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
378 without analyzing the expression whether or not its location
379 is accessible to anyone else. In the case of nested parallel
380 regions it certainly may be. */
381 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
382 return true;
384 /* Do not use copy-in/copy-out for variables that have their
385 address taken. */
386 if (TREE_ADDRESSABLE (decl))
387 return true;
389 /* lower_send_shared_vars only uses copy-in, but not copy-out
390 for these. */
391 if (TREE_READONLY (decl)
392 || ((TREE_CODE (decl) == RESULT_DECL
393 || TREE_CODE (decl) == PARM_DECL)
394 && DECL_BY_REFERENCE (decl)))
395 return false;
397 /* Disallow copy-in/out in nested parallel if
398 decl is shared in outer parallel, otherwise
399 each thread could store the shared variable
400 in its own copy-in location, making the
401 variable no longer really shared. */
402 if (shared_ctx->is_nested)
404 omp_context *up;
406 for (up = shared_ctx->outer; up; up = up->outer)
407 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
408 break;
410 if (up)
412 tree c;
414 for (c = gimple_omp_taskreg_clauses (up->stmt);
415 c; c = OMP_CLAUSE_CHAIN (c))
416 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
417 && OMP_CLAUSE_DECL (c) == decl)
418 break;
420 if (c)
421 goto maybe_mark_addressable_and_ret;
425 /* For tasks avoid using copy-in/out. As tasks can be
426 deferred or executed in different thread, when GOMP_task
427 returns, the task hasn't necessarily terminated. */
428 if (is_task_ctx (shared_ctx))
430 tree outer;
431 maybe_mark_addressable_and_ret:
432 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
433 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
435 /* Taking address of OUTER in lower_send_shared_vars
436 might need regimplification of everything that uses the
437 variable. */
438 if (!task_shared_vars)
439 task_shared_vars = BITMAP_ALLOC (NULL);
440 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
441 TREE_ADDRESSABLE (outer) = 1;
443 return true;
447 return false;
450 /* Construct a new automatic decl similar to VAR. */
452 static tree
453 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
455 tree copy = copy_var_decl (var, name, type);
457 DECL_CONTEXT (copy) = current_function_decl;
458 DECL_CHAIN (copy) = ctx->block_vars;
459 /* If VAR is listed in task_shared_vars, it means it wasn't
460 originally addressable and is just because task needs to take
461 it's address. But we don't need to take address of privatizations
462 from that var. */
463 if (TREE_ADDRESSABLE (var)
464 && task_shared_vars
465 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
466 TREE_ADDRESSABLE (copy) = 0;
467 ctx->block_vars = copy;
469 return copy;
472 static tree
473 omp_copy_decl_1 (tree var, omp_context *ctx)
475 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
478 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
479 as appropriate. */
480 static tree
481 omp_build_component_ref (tree obj, tree field)
483 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
484 if (TREE_THIS_VOLATILE (field))
485 TREE_THIS_VOLATILE (ret) |= 1;
486 if (TREE_READONLY (field))
487 TREE_READONLY (ret) |= 1;
488 return ret;
491 /* Build tree nodes to access the field for VAR on the receiver side. */
493 static tree
494 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
496 tree x, field = lookup_field (var, ctx);
498 /* If the receiver record type was remapped in the child function,
499 remap the field into the new record type. */
500 x = maybe_lookup_field (field, ctx);
501 if (x != NULL)
502 field = x;
504 x = build_simple_mem_ref (ctx->receiver_decl);
505 TREE_THIS_NOTRAP (x) = 1;
506 x = omp_build_component_ref (x, field);
507 if (by_ref)
509 x = build_simple_mem_ref (x);
510 TREE_THIS_NOTRAP (x) = 1;
513 return x;
516 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
517 of a parallel, this is a component reference; for workshare constructs
518 this is some variable. */
520 static tree
521 build_outer_var_ref (tree var, omp_context *ctx,
522 enum omp_clause_code code = OMP_CLAUSE_ERROR)
524 tree x;
526 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
527 x = var;
528 else if (is_variable_sized (var))
530 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
531 x = build_outer_var_ref (x, ctx, code);
532 x = build_simple_mem_ref (x);
534 else if (is_taskreg_ctx (ctx))
536 bool by_ref = use_pointer_for_field (var, NULL);
537 x = build_receiver_ref (var, by_ref, ctx);
539 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
540 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
541 || (code == OMP_CLAUSE_PRIVATE
542 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
543 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
544 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
546 /* #pragma omp simd isn't a worksharing construct, and can reference
547 even private vars in its linear etc. clauses.
548 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
549 to private vars in all worksharing constructs. */
550 x = NULL_TREE;
551 if (ctx->outer && is_taskreg_ctx (ctx))
552 x = lookup_decl (var, ctx->outer);
553 else if (ctx->outer)
554 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
555 if (x == NULL_TREE)
556 x = var;
558 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
560 gcc_assert (ctx->outer);
561 splay_tree_node n
562 = splay_tree_lookup (ctx->outer->field_map,
563 (splay_tree_key) &DECL_UID (var));
564 if (n == NULL)
566 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
567 x = var;
568 else
569 x = lookup_decl (var, ctx->outer);
571 else
573 tree field = (tree) n->value;
574 /* If the receiver record type was remapped in the child function,
575 remap the field into the new record type. */
576 x = maybe_lookup_field (field, ctx->outer);
577 if (x != NULL)
578 field = x;
580 x = build_simple_mem_ref (ctx->outer->receiver_decl);
581 x = omp_build_component_ref (x, field);
582 if (use_pointer_for_field (var, ctx->outer))
583 x = build_simple_mem_ref (x);
586 else if (ctx->outer)
588 omp_context *outer = ctx->outer;
589 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
591 outer = outer->outer;
592 gcc_assert (outer
593 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
595 x = lookup_decl (var, outer);
597 else if (omp_is_reference (var))
598 /* This can happen with orphaned constructs. If var is reference, it is
599 possible it is shared and as such valid. */
600 x = var;
601 else if (omp_member_access_dummy_var (var))
602 x = var;
603 else
604 gcc_unreachable ();
606 if (x == var)
608 tree t = omp_member_access_dummy_var (var);
609 if (t)
611 x = DECL_VALUE_EXPR (var);
612 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
613 if (o != t)
614 x = unshare_and_remap (x, t, o);
615 else
616 x = unshare_expr (x);
620 if (omp_is_reference (var))
621 x = build_simple_mem_ref (x);
623 return x;
626 /* Build tree nodes to access the field for VAR on the sender side. */
628 static tree
629 build_sender_ref (splay_tree_key key, omp_context *ctx)
631 tree field = lookup_sfield (key, ctx);
632 return omp_build_component_ref (ctx->sender_decl, field);
635 static tree
636 build_sender_ref (tree var, omp_context *ctx)
638 return build_sender_ref ((splay_tree_key) var, ctx);
641 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
642 BASE_POINTERS_RESTRICT, declare the field with restrict. */
644 static void
645 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx,
646 bool base_pointers_restrict = false)
648 tree field, type, sfield = NULL_TREE;
649 splay_tree_key key = (splay_tree_key) var;
651 if ((mask & 8) != 0)
653 key = (splay_tree_key) &DECL_UID (var);
654 gcc_checking_assert (key != (splay_tree_key) var);
656 gcc_assert ((mask & 1) == 0
657 || !splay_tree_lookup (ctx->field_map, key));
658 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
659 || !splay_tree_lookup (ctx->sfield_map, key));
660 gcc_assert ((mask & 3) == 3
661 || !is_gimple_omp_oacc (ctx->stmt));
663 type = TREE_TYPE (var);
664 /* Prevent redeclaring the var in the split-off function with a restrict
665 pointer type. Note that we only clear type itself, restrict qualifiers in
666 the pointed-to type will be ignored by points-to analysis. */
667 if (POINTER_TYPE_P (type)
668 && TYPE_RESTRICT (type))
669 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
671 if (mask & 4)
673 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
674 type = build_pointer_type (build_pointer_type (type));
676 else if (by_ref)
678 type = build_pointer_type (type);
679 if (base_pointers_restrict)
680 type = build_qualified_type (type, TYPE_QUAL_RESTRICT);
682 else if ((mask & 3) == 1 && omp_is_reference (var))
683 type = TREE_TYPE (type);
685 field = build_decl (DECL_SOURCE_LOCATION (var),
686 FIELD_DECL, DECL_NAME (var), type);
688 /* Remember what variable this field was created for. This does have a
689 side effect of making dwarf2out ignore this member, so for helpful
690 debugging we clear it later in delete_omp_context. */
691 DECL_ABSTRACT_ORIGIN (field) = var;
692 if (type == TREE_TYPE (var))
694 SET_DECL_ALIGN (field, DECL_ALIGN (var));
695 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
696 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
698 else
699 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
701 if ((mask & 3) == 3)
703 insert_field_into_struct (ctx->record_type, field);
704 if (ctx->srecord_type)
706 sfield = build_decl (DECL_SOURCE_LOCATION (var),
707 FIELD_DECL, DECL_NAME (var), type);
708 DECL_ABSTRACT_ORIGIN (sfield) = var;
709 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
710 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
711 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
712 insert_field_into_struct (ctx->srecord_type, sfield);
715 else
717 if (ctx->srecord_type == NULL_TREE)
719 tree t;
721 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
722 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
723 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
725 sfield = build_decl (DECL_SOURCE_LOCATION (t),
726 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
727 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
728 insert_field_into_struct (ctx->srecord_type, sfield);
729 splay_tree_insert (ctx->sfield_map,
730 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
731 (splay_tree_value) sfield);
734 sfield = field;
735 insert_field_into_struct ((mask & 1) ? ctx->record_type
736 : ctx->srecord_type, field);
739 if (mask & 1)
740 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
741 if ((mask & 2) && ctx->sfield_map)
742 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
745 static tree
746 install_var_local (tree var, omp_context *ctx)
748 tree new_var = omp_copy_decl_1 (var, ctx);
749 insert_decl_map (&ctx->cb, var, new_var);
750 return new_var;
753 /* Adjust the replacement for DECL in CTX for the new context. This means
754 copying the DECL_VALUE_EXPR, and fixing up the type. */
756 static void
757 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
759 tree new_decl, size;
761 new_decl = lookup_decl (decl, ctx);
763 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
765 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
766 && DECL_HAS_VALUE_EXPR_P (decl))
768 tree ve = DECL_VALUE_EXPR (decl);
769 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
770 SET_DECL_VALUE_EXPR (new_decl, ve);
771 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
774 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
776 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
777 if (size == error_mark_node)
778 size = TYPE_SIZE (TREE_TYPE (new_decl));
779 DECL_SIZE (new_decl) = size;
781 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
782 if (size == error_mark_node)
783 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
784 DECL_SIZE_UNIT (new_decl) = size;
788 /* The callback for remap_decl. Search all containing contexts for a
789 mapping of the variable; this avoids having to duplicate the splay
790 tree ahead of time. We know a mapping doesn't already exist in the
791 given context. Create new mappings to implement default semantics. */
793 static tree
794 omp_copy_decl (tree var, copy_body_data *cb)
796 omp_context *ctx = (omp_context *) cb;
797 tree new_var;
799 if (TREE_CODE (var) == LABEL_DECL)
801 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
802 return var;
803 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
804 DECL_CONTEXT (new_var) = current_function_decl;
805 insert_decl_map (&ctx->cb, var, new_var);
806 return new_var;
809 while (!is_taskreg_ctx (ctx))
811 ctx = ctx->outer;
812 if (ctx == NULL)
813 return var;
814 new_var = maybe_lookup_decl (var, ctx);
815 if (new_var)
816 return new_var;
819 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
820 return var;
822 return error_mark_node;
825 /* Create a new context, with OUTER_CTX being the surrounding context. */
827 static omp_context *
828 new_omp_context (gimple *stmt, omp_context *outer_ctx)
830 omp_context *ctx = XCNEW (omp_context);
832 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
833 (splay_tree_value) ctx);
834 ctx->stmt = stmt;
836 if (outer_ctx)
838 ctx->outer = outer_ctx;
839 ctx->cb = outer_ctx->cb;
840 ctx->cb.block = NULL;
841 ctx->depth = outer_ctx->depth + 1;
843 else
845 ctx->cb.src_fn = current_function_decl;
846 ctx->cb.dst_fn = current_function_decl;
847 ctx->cb.src_node = cgraph_node::get (current_function_decl);
848 gcc_checking_assert (ctx->cb.src_node);
849 ctx->cb.dst_node = ctx->cb.src_node;
850 ctx->cb.src_cfun = cfun;
851 ctx->cb.copy_decl = omp_copy_decl;
852 ctx->cb.eh_lp_nr = 0;
853 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
854 ctx->depth = 1;
857 ctx->cb.decl_map = new hash_map<tree, tree>;
859 return ctx;
862 static gimple_seq maybe_catch_exception (gimple_seq);
864 /* Finalize task copyfn. */
866 static void
867 finalize_task_copyfn (gomp_task *task_stmt)
869 struct function *child_cfun;
870 tree child_fn;
871 gimple_seq seq = NULL, new_seq;
872 gbind *bind;
874 child_fn = gimple_omp_task_copy_fn (task_stmt);
875 if (child_fn == NULL_TREE)
876 return;
878 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
879 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
881 push_cfun (child_cfun);
882 bind = gimplify_body (child_fn, false);
883 gimple_seq_add_stmt (&seq, bind);
884 new_seq = maybe_catch_exception (seq);
885 if (new_seq != seq)
887 bind = gimple_build_bind (NULL, new_seq, NULL);
888 seq = NULL;
889 gimple_seq_add_stmt (&seq, bind);
891 gimple_set_body (child_fn, seq);
892 pop_cfun ();
894 /* Inform the callgraph about the new function. */
895 cgraph_node *node = cgraph_node::get_create (child_fn);
896 node->parallelized_function = 1;
897 cgraph_node::add_new_function (child_fn, false);
900 /* Destroy a omp_context data structures. Called through the splay tree
901 value delete callback. */
903 static void
904 delete_omp_context (splay_tree_value value)
906 omp_context *ctx = (omp_context *) value;
908 delete ctx->cb.decl_map;
910 if (ctx->field_map)
911 splay_tree_delete (ctx->field_map);
912 if (ctx->sfield_map)
913 splay_tree_delete (ctx->sfield_map);
915 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
916 it produces corrupt debug information. */
917 if (ctx->record_type)
919 tree t;
920 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
921 DECL_ABSTRACT_ORIGIN (t) = NULL;
923 if (ctx->srecord_type)
925 tree t;
926 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
927 DECL_ABSTRACT_ORIGIN (t) = NULL;
930 if (is_task_ctx (ctx))
931 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
933 XDELETE (ctx);
936 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
937 context. */
939 static void
940 fixup_child_record_type (omp_context *ctx)
942 tree f, type = ctx->record_type;
944 if (!ctx->receiver_decl)
945 return;
946 /* ??? It isn't sufficient to just call remap_type here, because
947 variably_modified_type_p doesn't work the way we expect for
948 record types. Testing each field for whether it needs remapping
949 and creating a new record by hand works, however. */
950 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
951 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
952 break;
953 if (f)
955 tree name, new_fields = NULL;
957 type = lang_hooks.types.make_type (RECORD_TYPE);
958 name = DECL_NAME (TYPE_NAME (ctx->record_type));
959 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
960 TYPE_DECL, name, type);
961 TYPE_NAME (type) = name;
963 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
965 tree new_f = copy_node (f);
966 DECL_CONTEXT (new_f) = type;
967 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
968 DECL_CHAIN (new_f) = new_fields;
969 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
970 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
971 &ctx->cb, NULL);
972 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
973 &ctx->cb, NULL);
974 new_fields = new_f;
976 /* Arrange to be able to look up the receiver field
977 given the sender field. */
978 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
979 (splay_tree_value) new_f);
981 TYPE_FIELDS (type) = nreverse (new_fields);
982 layout_type (type);
985 /* In a target region we never modify any of the pointers in *.omp_data_i,
986 so attempt to help the optimizers. */
987 if (is_gimple_omp_offloaded (ctx->stmt))
988 type = build_qualified_type (type, TYPE_QUAL_CONST);
990 TREE_TYPE (ctx->receiver_decl)
991 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
994 /* Instantiate decls as necessary in CTX to satisfy the data sharing
995 specified by CLAUSES. If BASE_POINTERS_RESTRICT, install var field with
996 restrict. */
998 static void
999 scan_sharing_clauses (tree clauses, omp_context *ctx,
1000 bool base_pointers_restrict = false)
1002 tree c, decl;
1003 bool scan_array_reductions = false;
1005 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1007 bool by_ref;
1009 switch (OMP_CLAUSE_CODE (c))
1011 case OMP_CLAUSE_PRIVATE:
1012 decl = OMP_CLAUSE_DECL (c);
1013 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1014 goto do_private;
1015 else if (!is_variable_sized (decl))
1016 install_var_local (decl, ctx);
1017 break;
1019 case OMP_CLAUSE_SHARED:
1020 decl = OMP_CLAUSE_DECL (c);
1021 /* Ignore shared directives in teams construct. */
1022 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1024 /* Global variables don't need to be copied,
1025 the receiver side will use them directly. */
1026 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1027 if (is_global_var (odecl))
1028 break;
1029 insert_decl_map (&ctx->cb, decl, odecl);
1030 break;
1032 gcc_assert (is_taskreg_ctx (ctx));
1033 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1034 || !is_variable_sized (decl));
1035 /* Global variables don't need to be copied,
1036 the receiver side will use them directly. */
1037 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1038 break;
1039 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1041 use_pointer_for_field (decl, ctx);
1042 break;
1044 by_ref = use_pointer_for_field (decl, NULL);
1045 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1046 || TREE_ADDRESSABLE (decl)
1047 || by_ref
1048 || omp_is_reference (decl))
1050 by_ref = use_pointer_for_field (decl, ctx);
1051 install_var_field (decl, by_ref, 3, ctx);
1052 install_var_local (decl, ctx);
1053 break;
1055 /* We don't need to copy const scalar vars back. */
1056 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1057 goto do_private;
1059 case OMP_CLAUSE_REDUCTION:
1060 decl = OMP_CLAUSE_DECL (c);
1061 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1062 && TREE_CODE (decl) == MEM_REF)
1064 tree t = TREE_OPERAND (decl, 0);
1065 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1066 t = TREE_OPERAND (t, 0);
1067 if (TREE_CODE (t) == INDIRECT_REF
1068 || TREE_CODE (t) == ADDR_EXPR)
1069 t = TREE_OPERAND (t, 0);
1070 install_var_local (t, ctx);
1071 if (is_taskreg_ctx (ctx)
1072 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1073 && !is_variable_sized (t))
1075 by_ref = use_pointer_for_field (t, ctx);
1076 install_var_field (t, by_ref, 3, ctx);
1078 break;
1080 goto do_private;
1082 case OMP_CLAUSE_LASTPRIVATE:
1083 /* Let the corresponding firstprivate clause create
1084 the variable. */
1085 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1086 break;
1087 /* FALLTHRU */
1089 case OMP_CLAUSE_FIRSTPRIVATE:
1090 case OMP_CLAUSE_LINEAR:
1091 decl = OMP_CLAUSE_DECL (c);
1092 do_private:
1093 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1094 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1095 && is_gimple_omp_offloaded (ctx->stmt))
1097 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1098 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1099 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1100 install_var_field (decl, true, 3, ctx);
1101 else
1102 install_var_field (decl, false, 3, ctx);
1104 if (is_variable_sized (decl))
1106 if (is_task_ctx (ctx))
1107 install_var_field (decl, false, 1, ctx);
1108 break;
1110 else if (is_taskreg_ctx (ctx))
1112 bool global
1113 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1114 by_ref = use_pointer_for_field (decl, NULL);
1116 if (is_task_ctx (ctx)
1117 && (global || by_ref || omp_is_reference (decl)))
1119 install_var_field (decl, false, 1, ctx);
1120 if (!global)
1121 install_var_field (decl, by_ref, 2, ctx);
1123 else if (!global)
1124 install_var_field (decl, by_ref, 3, ctx);
1126 install_var_local (decl, ctx);
1127 break;
1129 case OMP_CLAUSE_USE_DEVICE_PTR:
1130 decl = OMP_CLAUSE_DECL (c);
1131 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1132 install_var_field (decl, true, 3, ctx);
1133 else
1134 install_var_field (decl, false, 3, ctx);
1135 if (DECL_SIZE (decl)
1136 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1138 tree decl2 = DECL_VALUE_EXPR (decl);
1139 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1140 decl2 = TREE_OPERAND (decl2, 0);
1141 gcc_assert (DECL_P (decl2));
1142 install_var_local (decl2, ctx);
1144 install_var_local (decl, ctx);
1145 break;
1147 case OMP_CLAUSE_IS_DEVICE_PTR:
1148 decl = OMP_CLAUSE_DECL (c);
1149 goto do_private;
1151 case OMP_CLAUSE__LOOPTEMP_:
1152 gcc_assert (is_taskreg_ctx (ctx));
1153 decl = OMP_CLAUSE_DECL (c);
1154 install_var_field (decl, false, 3, ctx);
1155 install_var_local (decl, ctx);
1156 break;
1158 case OMP_CLAUSE_COPYPRIVATE:
1159 case OMP_CLAUSE_COPYIN:
1160 decl = OMP_CLAUSE_DECL (c);
1161 by_ref = use_pointer_for_field (decl, NULL);
1162 install_var_field (decl, by_ref, 3, ctx);
1163 break;
1165 case OMP_CLAUSE_FINAL:
1166 case OMP_CLAUSE_IF:
1167 case OMP_CLAUSE_NUM_THREADS:
1168 case OMP_CLAUSE_NUM_TEAMS:
1169 case OMP_CLAUSE_THREAD_LIMIT:
1170 case OMP_CLAUSE_DEVICE:
1171 case OMP_CLAUSE_SCHEDULE:
1172 case OMP_CLAUSE_DIST_SCHEDULE:
1173 case OMP_CLAUSE_DEPEND:
1174 case OMP_CLAUSE_PRIORITY:
1175 case OMP_CLAUSE_GRAINSIZE:
1176 case OMP_CLAUSE_NUM_TASKS:
1177 case OMP_CLAUSE_NUM_GANGS:
1178 case OMP_CLAUSE_NUM_WORKERS:
1179 case OMP_CLAUSE_VECTOR_LENGTH:
1180 if (ctx->outer)
1181 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1182 break;
1184 case OMP_CLAUSE_TO:
1185 case OMP_CLAUSE_FROM:
1186 case OMP_CLAUSE_MAP:
1187 if (ctx->outer)
1188 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1189 decl = OMP_CLAUSE_DECL (c);
1190 /* Global variables with "omp declare target" attribute
1191 don't need to be copied, the receiver side will use them
1192 directly. However, global variables with "omp declare target link"
1193 attribute need to be copied. */
1194 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1195 && DECL_P (decl)
1196 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1197 && (OMP_CLAUSE_MAP_KIND (c)
1198 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1199 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1200 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1201 && varpool_node::get_create (decl)->offloadable
1202 && !lookup_attribute ("omp declare target link",
1203 DECL_ATTRIBUTES (decl)))
1204 break;
1205 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1206 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1208 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1209 not offloaded; there is nothing to map for those. */
1210 if (!is_gimple_omp_offloaded (ctx->stmt)
1211 && !POINTER_TYPE_P (TREE_TYPE (decl))
1212 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1213 break;
1215 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1216 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1217 || (OMP_CLAUSE_MAP_KIND (c)
1218 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1220 if (TREE_CODE (decl) == COMPONENT_REF
1221 || (TREE_CODE (decl) == INDIRECT_REF
1222 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1223 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1224 == REFERENCE_TYPE)))
1225 break;
1226 if (DECL_SIZE (decl)
1227 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1229 tree decl2 = DECL_VALUE_EXPR (decl);
1230 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1231 decl2 = TREE_OPERAND (decl2, 0);
1232 gcc_assert (DECL_P (decl2));
1233 install_var_local (decl2, ctx);
1235 install_var_local (decl, ctx);
1236 break;
1238 if (DECL_P (decl))
1240 if (DECL_SIZE (decl)
1241 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1243 tree decl2 = DECL_VALUE_EXPR (decl);
1244 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1245 decl2 = TREE_OPERAND (decl2, 0);
1246 gcc_assert (DECL_P (decl2));
1247 install_var_field (decl2, true, 3, ctx);
1248 install_var_local (decl2, ctx);
1249 install_var_local (decl, ctx);
1251 else
1253 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1254 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1255 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1256 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1257 install_var_field (decl, true, 7, ctx);
1258 else
1259 install_var_field (decl, true, 3, ctx,
1260 base_pointers_restrict);
1261 if (is_gimple_omp_offloaded (ctx->stmt)
1262 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1263 install_var_local (decl, ctx);
1266 else
1268 tree base = get_base_address (decl);
1269 tree nc = OMP_CLAUSE_CHAIN (c);
1270 if (DECL_P (base)
1271 && nc != NULL_TREE
1272 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1273 && OMP_CLAUSE_DECL (nc) == base
1274 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1275 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1277 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1278 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1280 else
1282 if (ctx->outer)
1284 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1285 decl = OMP_CLAUSE_DECL (c);
1287 gcc_assert (!splay_tree_lookup (ctx->field_map,
1288 (splay_tree_key) decl));
1289 tree field
1290 = build_decl (OMP_CLAUSE_LOCATION (c),
1291 FIELD_DECL, NULL_TREE, ptr_type_node);
1292 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1293 insert_field_into_struct (ctx->record_type, field);
1294 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1295 (splay_tree_value) field);
1298 break;
1300 case OMP_CLAUSE__GRIDDIM_:
1301 if (ctx->outer)
1303 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1304 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1306 break;
1308 case OMP_CLAUSE_NOWAIT:
1309 case OMP_CLAUSE_ORDERED:
1310 case OMP_CLAUSE_COLLAPSE:
1311 case OMP_CLAUSE_UNTIED:
1312 case OMP_CLAUSE_MERGEABLE:
1313 case OMP_CLAUSE_PROC_BIND:
1314 case OMP_CLAUSE_SAFELEN:
1315 case OMP_CLAUSE_SIMDLEN:
1316 case OMP_CLAUSE_THREADS:
1317 case OMP_CLAUSE_SIMD:
1318 case OMP_CLAUSE_NOGROUP:
1319 case OMP_CLAUSE_DEFAULTMAP:
1320 case OMP_CLAUSE_ASYNC:
1321 case OMP_CLAUSE_WAIT:
1322 case OMP_CLAUSE_GANG:
1323 case OMP_CLAUSE_WORKER:
1324 case OMP_CLAUSE_VECTOR:
1325 case OMP_CLAUSE_INDEPENDENT:
1326 case OMP_CLAUSE_AUTO:
1327 case OMP_CLAUSE_SEQ:
1328 case OMP_CLAUSE_TILE:
1329 case OMP_CLAUSE__SIMT_:
1330 case OMP_CLAUSE_DEFAULT:
1331 break;
1333 case OMP_CLAUSE_ALIGNED:
1334 decl = OMP_CLAUSE_DECL (c);
1335 if (is_global_var (decl)
1336 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1337 install_var_local (decl, ctx);
1338 break;
1340 case OMP_CLAUSE__CACHE_:
1341 default:
1342 gcc_unreachable ();
1346 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1348 switch (OMP_CLAUSE_CODE (c))
1350 case OMP_CLAUSE_LASTPRIVATE:
1351 /* Let the corresponding firstprivate clause create
1352 the variable. */
1353 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1354 scan_array_reductions = true;
1355 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1356 break;
1357 /* FALLTHRU */
1359 case OMP_CLAUSE_FIRSTPRIVATE:
1360 case OMP_CLAUSE_PRIVATE:
1361 case OMP_CLAUSE_LINEAR:
1362 case OMP_CLAUSE_IS_DEVICE_PTR:
1363 decl = OMP_CLAUSE_DECL (c);
1364 if (is_variable_sized (decl))
1366 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1367 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1368 && is_gimple_omp_offloaded (ctx->stmt))
1370 tree decl2 = DECL_VALUE_EXPR (decl);
1371 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1372 decl2 = TREE_OPERAND (decl2, 0);
1373 gcc_assert (DECL_P (decl2));
1374 install_var_local (decl2, ctx);
1375 fixup_remapped_decl (decl2, ctx, false);
1377 install_var_local (decl, ctx);
1379 fixup_remapped_decl (decl, ctx,
1380 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1381 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1382 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1383 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1384 scan_array_reductions = true;
1385 break;
1387 case OMP_CLAUSE_REDUCTION:
1388 decl = OMP_CLAUSE_DECL (c);
1389 if (TREE_CODE (decl) != MEM_REF)
1391 if (is_variable_sized (decl))
1392 install_var_local (decl, ctx);
1393 fixup_remapped_decl (decl, ctx, false);
1395 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1396 scan_array_reductions = true;
1397 break;
1399 case OMP_CLAUSE_SHARED:
1400 /* Ignore shared directives in teams construct. */
1401 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1402 break;
1403 decl = OMP_CLAUSE_DECL (c);
1404 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1405 break;
1406 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1408 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1409 ctx->outer)))
1410 break;
1411 bool by_ref = use_pointer_for_field (decl, ctx);
1412 install_var_field (decl, by_ref, 11, ctx);
1413 break;
1415 fixup_remapped_decl (decl, ctx, false);
1416 break;
1418 case OMP_CLAUSE_MAP:
1419 if (!is_gimple_omp_offloaded (ctx->stmt))
1420 break;
1421 decl = OMP_CLAUSE_DECL (c);
1422 if (DECL_P (decl)
1423 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1424 && (OMP_CLAUSE_MAP_KIND (c)
1425 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1426 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1427 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1428 && varpool_node::get_create (decl)->offloadable)
1429 break;
1430 if (DECL_P (decl))
1432 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1433 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1434 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1435 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1437 tree new_decl = lookup_decl (decl, ctx);
1438 TREE_TYPE (new_decl)
1439 = remap_type (TREE_TYPE (decl), &ctx->cb);
1441 else if (DECL_SIZE (decl)
1442 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1444 tree decl2 = DECL_VALUE_EXPR (decl);
1445 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1446 decl2 = TREE_OPERAND (decl2, 0);
1447 gcc_assert (DECL_P (decl2));
1448 fixup_remapped_decl (decl2, ctx, false);
1449 fixup_remapped_decl (decl, ctx, true);
1451 else
1452 fixup_remapped_decl (decl, ctx, false);
1454 break;
1456 case OMP_CLAUSE_COPYPRIVATE:
1457 case OMP_CLAUSE_COPYIN:
1458 case OMP_CLAUSE_DEFAULT:
1459 case OMP_CLAUSE_IF:
1460 case OMP_CLAUSE_NUM_THREADS:
1461 case OMP_CLAUSE_NUM_TEAMS:
1462 case OMP_CLAUSE_THREAD_LIMIT:
1463 case OMP_CLAUSE_DEVICE:
1464 case OMP_CLAUSE_SCHEDULE:
1465 case OMP_CLAUSE_DIST_SCHEDULE:
1466 case OMP_CLAUSE_NOWAIT:
1467 case OMP_CLAUSE_ORDERED:
1468 case OMP_CLAUSE_COLLAPSE:
1469 case OMP_CLAUSE_UNTIED:
1470 case OMP_CLAUSE_FINAL:
1471 case OMP_CLAUSE_MERGEABLE:
1472 case OMP_CLAUSE_PROC_BIND:
1473 case OMP_CLAUSE_SAFELEN:
1474 case OMP_CLAUSE_SIMDLEN:
1475 case OMP_CLAUSE_ALIGNED:
1476 case OMP_CLAUSE_DEPEND:
1477 case OMP_CLAUSE__LOOPTEMP_:
1478 case OMP_CLAUSE_TO:
1479 case OMP_CLAUSE_FROM:
1480 case OMP_CLAUSE_PRIORITY:
1481 case OMP_CLAUSE_GRAINSIZE:
1482 case OMP_CLAUSE_NUM_TASKS:
1483 case OMP_CLAUSE_THREADS:
1484 case OMP_CLAUSE_SIMD:
1485 case OMP_CLAUSE_NOGROUP:
1486 case OMP_CLAUSE_DEFAULTMAP:
1487 case OMP_CLAUSE_USE_DEVICE_PTR:
1488 case OMP_CLAUSE_ASYNC:
1489 case OMP_CLAUSE_WAIT:
1490 case OMP_CLAUSE_NUM_GANGS:
1491 case OMP_CLAUSE_NUM_WORKERS:
1492 case OMP_CLAUSE_VECTOR_LENGTH:
1493 case OMP_CLAUSE_GANG:
1494 case OMP_CLAUSE_WORKER:
1495 case OMP_CLAUSE_VECTOR:
1496 case OMP_CLAUSE_INDEPENDENT:
1497 case OMP_CLAUSE_AUTO:
1498 case OMP_CLAUSE_SEQ:
1499 case OMP_CLAUSE_TILE:
1500 case OMP_CLAUSE__GRIDDIM_:
1501 case OMP_CLAUSE__SIMT_:
1502 break;
1504 case OMP_CLAUSE__CACHE_:
1505 default:
1506 gcc_unreachable ();
1510 gcc_checking_assert (!scan_array_reductions
1511 || !is_gimple_omp_oacc (ctx->stmt));
1512 if (scan_array_reductions)
1514 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1515 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1516 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1518 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1519 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1521 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1522 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1523 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1524 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1525 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1526 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1530 /* Create a new name for omp child function. Returns an identifier. */
1532 static tree
1533 create_omp_child_function_name (bool task_copy)
1535 return clone_function_name (current_function_decl,
1536 task_copy ? "_omp_cpyfn" : "_omp_fn");
1539 /* Return true if CTX may belong to offloaded code: either if current function
1540 is offloaded, or any enclosing context corresponds to a target region. */
1542 static bool
1543 omp_maybe_offloaded_ctx (omp_context *ctx)
1545 if (cgraph_node::get (current_function_decl)->offloadable)
1546 return true;
1547 for (; ctx; ctx = ctx->outer)
1548 if (is_gimple_omp_offloaded (ctx->stmt))
1549 return true;
1550 return false;
1553 /* Build a decl for the omp child function. It'll not contain a body
1554 yet, just the bare decl. */
1556 static void
1557 create_omp_child_function (omp_context *ctx, bool task_copy)
1559 tree decl, type, name, t;
1561 name = create_omp_child_function_name (task_copy);
1562 if (task_copy)
1563 type = build_function_type_list (void_type_node, ptr_type_node,
1564 ptr_type_node, NULL_TREE);
1565 else
1566 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1568 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1570 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1571 || !task_copy);
1572 if (!task_copy)
1573 ctx->cb.dst_fn = decl;
1574 else
1575 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1577 TREE_STATIC (decl) = 1;
1578 TREE_USED (decl) = 1;
1579 DECL_ARTIFICIAL (decl) = 1;
1580 DECL_IGNORED_P (decl) = 0;
1581 TREE_PUBLIC (decl) = 0;
1582 DECL_UNINLINABLE (decl) = 1;
1583 DECL_EXTERNAL (decl) = 0;
1584 DECL_CONTEXT (decl) = NULL_TREE;
1585 DECL_INITIAL (decl) = make_node (BLOCK);
1586 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1587 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1588 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1589 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1590 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1591 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1592 DECL_FUNCTION_VERSIONED (decl)
1593 = DECL_FUNCTION_VERSIONED (current_function_decl);
1595 if (omp_maybe_offloaded_ctx (ctx))
1597 cgraph_node::get_create (decl)->offloadable = 1;
1598 if (ENABLE_OFFLOADING)
1599 g->have_offload = true;
1602 if (cgraph_node::get_create (decl)->offloadable
1603 && !lookup_attribute ("omp declare target",
1604 DECL_ATTRIBUTES (current_function_decl)))
1606 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1607 ? "omp target entrypoint"
1608 : "omp declare target");
1609 DECL_ATTRIBUTES (decl)
1610 = tree_cons (get_identifier (target_attr),
1611 NULL_TREE, DECL_ATTRIBUTES (decl));
1614 t = build_decl (DECL_SOURCE_LOCATION (decl),
1615 RESULT_DECL, NULL_TREE, void_type_node);
1616 DECL_ARTIFICIAL (t) = 1;
1617 DECL_IGNORED_P (t) = 1;
1618 DECL_CONTEXT (t) = decl;
1619 DECL_RESULT (decl) = t;
1621 tree data_name = get_identifier (".omp_data_i");
1622 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1623 ptr_type_node);
1624 DECL_ARTIFICIAL (t) = 1;
1625 DECL_NAMELESS (t) = 1;
1626 DECL_ARG_TYPE (t) = ptr_type_node;
1627 DECL_CONTEXT (t) = current_function_decl;
1628 TREE_USED (t) = 1;
1629 TREE_READONLY (t) = 1;
1630 DECL_ARGUMENTS (decl) = t;
1631 if (!task_copy)
1632 ctx->receiver_decl = t;
1633 else
1635 t = build_decl (DECL_SOURCE_LOCATION (decl),
1636 PARM_DECL, get_identifier (".omp_data_o"),
1637 ptr_type_node);
1638 DECL_ARTIFICIAL (t) = 1;
1639 DECL_NAMELESS (t) = 1;
1640 DECL_ARG_TYPE (t) = ptr_type_node;
1641 DECL_CONTEXT (t) = current_function_decl;
1642 TREE_USED (t) = 1;
1643 TREE_ADDRESSABLE (t) = 1;
1644 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1645 DECL_ARGUMENTS (decl) = t;
1648 /* Allocate memory for the function structure. The call to
1649 allocate_struct_function clobbers CFUN, so we need to restore
1650 it afterward. */
1651 push_struct_function (decl);
1652 cfun->function_end_locus = gimple_location (ctx->stmt);
1653 init_tree_ssa (cfun);
1654 pop_cfun ();
1657 /* Callback for walk_gimple_seq. Check if combined parallel
1658 contains gimple_omp_for_combined_into_p OMP_FOR. */
1660 tree
1661 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1662 bool *handled_ops_p,
1663 struct walk_stmt_info *wi)
1665 gimple *stmt = gsi_stmt (*gsi_p);
1667 *handled_ops_p = true;
1668 switch (gimple_code (stmt))
1670 WALK_SUBSTMTS;
1672 case GIMPLE_OMP_FOR:
1673 if (gimple_omp_for_combined_into_p (stmt)
1674 && gimple_omp_for_kind (stmt)
1675 == *(const enum gf_mask *) (wi->info))
1677 wi->info = stmt;
1678 return integer_zero_node;
1680 break;
1681 default:
1682 break;
1684 return NULL;
1687 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1689 static void
1690 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1691 omp_context *outer_ctx)
1693 struct walk_stmt_info wi;
1695 memset (&wi, 0, sizeof (wi));
1696 wi.val_only = true;
1697 wi.info = (void *) &msk;
1698 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1699 if (wi.info != (void *) &msk)
1701 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1702 struct omp_for_data fd;
1703 omp_extract_for_data (for_stmt, &fd, NULL);
1704 /* We need two temporaries with fd.loop.v type (istart/iend)
1705 and then (fd.collapse - 1) temporaries with the same
1706 type for count2 ... countN-1 vars if not constant. */
1707 size_t count = 2, i;
1708 tree type = fd.iter_type;
1709 if (fd.collapse > 1
1710 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1712 count += fd.collapse - 1;
1713 /* If there are lastprivate clauses on the inner
1714 GIMPLE_OMP_FOR, add one more temporaries for the total number
1715 of iterations (product of count1 ... countN-1). */
1716 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1717 OMP_CLAUSE_LASTPRIVATE))
1718 count++;
1719 else if (msk == GF_OMP_FOR_KIND_FOR
1720 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1721 OMP_CLAUSE_LASTPRIVATE))
1722 count++;
1724 for (i = 0; i < count; i++)
1726 tree temp = create_tmp_var (type);
1727 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1728 insert_decl_map (&outer_ctx->cb, temp, temp);
1729 OMP_CLAUSE_DECL (c) = temp;
1730 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1731 gimple_omp_taskreg_set_clauses (stmt, c);
1736 /* Scan an OpenMP parallel directive. */
1738 static void
1739 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1741 omp_context *ctx;
1742 tree name;
1743 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1745 /* Ignore parallel directives with empty bodies, unless there
1746 are copyin clauses. */
1747 if (optimize > 0
1748 && empty_body_p (gimple_omp_body (stmt))
1749 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1750 OMP_CLAUSE_COPYIN) == NULL)
1752 gsi_replace (gsi, gimple_build_nop (), false);
1753 return;
1756 if (gimple_omp_parallel_combined_p (stmt))
1757 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1759 ctx = new_omp_context (stmt, outer_ctx);
1760 taskreg_contexts.safe_push (ctx);
1761 if (taskreg_nesting_level > 1)
1762 ctx->is_nested = true;
1763 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1764 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1765 name = create_tmp_var_name (".omp_data_s");
1766 name = build_decl (gimple_location (stmt),
1767 TYPE_DECL, name, ctx->record_type);
1768 DECL_ARTIFICIAL (name) = 1;
1769 DECL_NAMELESS (name) = 1;
1770 TYPE_NAME (ctx->record_type) = name;
1771 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1772 if (!gimple_omp_parallel_grid_phony (stmt))
1774 create_omp_child_function (ctx, false);
1775 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1778 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1779 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1781 if (TYPE_FIELDS (ctx->record_type) == NULL)
1782 ctx->record_type = ctx->receiver_decl = NULL;
1785 /* Scan an OpenMP task directive. */
1787 static void
1788 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1790 omp_context *ctx;
1791 tree name, t;
1792 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1794 /* Ignore task directives with empty bodies, unless they have depend
1795 clause. */
1796 if (optimize > 0
1797 && empty_body_p (gimple_omp_body (stmt))
1798 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1800 gsi_replace (gsi, gimple_build_nop (), false);
1801 return;
1804 if (gimple_omp_task_taskloop_p (stmt))
1805 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1807 ctx = new_omp_context (stmt, outer_ctx);
1808 taskreg_contexts.safe_push (ctx);
1809 if (taskreg_nesting_level > 1)
1810 ctx->is_nested = true;
1811 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1812 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1813 name = create_tmp_var_name (".omp_data_s");
1814 name = build_decl (gimple_location (stmt),
1815 TYPE_DECL, name, ctx->record_type);
1816 DECL_ARTIFICIAL (name) = 1;
1817 DECL_NAMELESS (name) = 1;
1818 TYPE_NAME (ctx->record_type) = name;
1819 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1820 create_omp_child_function (ctx, false);
1821 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1823 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1825 if (ctx->srecord_type)
1827 name = create_tmp_var_name (".omp_data_a");
1828 name = build_decl (gimple_location (stmt),
1829 TYPE_DECL, name, ctx->srecord_type);
1830 DECL_ARTIFICIAL (name) = 1;
1831 DECL_NAMELESS (name) = 1;
1832 TYPE_NAME (ctx->srecord_type) = name;
1833 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1834 create_omp_child_function (ctx, true);
1837 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1839 if (TYPE_FIELDS (ctx->record_type) == NULL)
1841 ctx->record_type = ctx->receiver_decl = NULL;
1842 t = build_int_cst (long_integer_type_node, 0);
1843 gimple_omp_task_set_arg_size (stmt, t);
1844 t = build_int_cst (long_integer_type_node, 1);
1845 gimple_omp_task_set_arg_align (stmt, t);
1849 /* Helper function for finish_taskreg_scan, called through walk_tree.
1850 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1851 tree, replace it in the expression. */
1853 static tree
1854 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1856 if (VAR_P (*tp))
1858 omp_context *ctx = (omp_context *) data;
1859 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1860 if (t != *tp)
1862 if (DECL_HAS_VALUE_EXPR_P (t))
1863 t = unshare_expr (DECL_VALUE_EXPR (t));
1864 *tp = t;
1866 *walk_subtrees = 0;
1868 else if (IS_TYPE_OR_DECL_P (*tp))
1869 *walk_subtrees = 0;
1870 return NULL_TREE;
1873 /* If any decls have been made addressable during scan_omp,
1874 adjust their fields if needed, and layout record types
1875 of parallel/task constructs. */
1877 static void
1878 finish_taskreg_scan (omp_context *ctx)
1880 if (ctx->record_type == NULL_TREE)
1881 return;
1883 /* If any task_shared_vars were needed, verify all
1884 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1885 statements if use_pointer_for_field hasn't changed
1886 because of that. If it did, update field types now. */
1887 if (task_shared_vars)
1889 tree c;
1891 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1892 c; c = OMP_CLAUSE_CHAIN (c))
1893 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1894 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1896 tree decl = OMP_CLAUSE_DECL (c);
1898 /* Global variables don't need to be copied,
1899 the receiver side will use them directly. */
1900 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1901 continue;
1902 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1903 || !use_pointer_for_field (decl, ctx))
1904 continue;
1905 tree field = lookup_field (decl, ctx);
1906 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1907 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1908 continue;
1909 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1910 TREE_THIS_VOLATILE (field) = 0;
1911 DECL_USER_ALIGN (field) = 0;
1912 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1913 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1914 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1915 if (ctx->srecord_type)
1917 tree sfield = lookup_sfield (decl, ctx);
1918 TREE_TYPE (sfield) = TREE_TYPE (field);
1919 TREE_THIS_VOLATILE (sfield) = 0;
1920 DECL_USER_ALIGN (sfield) = 0;
1921 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1922 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1923 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1928 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1930 layout_type (ctx->record_type);
1931 fixup_child_record_type (ctx);
1933 else
1935 location_t loc = gimple_location (ctx->stmt);
1936 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1937 /* Move VLA fields to the end. */
1938 p = &TYPE_FIELDS (ctx->record_type);
1939 while (*p)
1940 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1941 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1943 *q = *p;
1944 *p = TREE_CHAIN (*p);
1945 TREE_CHAIN (*q) = NULL_TREE;
1946 q = &TREE_CHAIN (*q);
1948 else
1949 p = &DECL_CHAIN (*p);
1950 *p = vla_fields;
1951 if (gimple_omp_task_taskloop_p (ctx->stmt))
1953 /* Move fields corresponding to first and second _looptemp_
1954 clause first. There are filled by GOMP_taskloop
1955 and thus need to be in specific positions. */
1956 tree c1 = gimple_omp_task_clauses (ctx->stmt);
1957 c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
1958 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
1959 OMP_CLAUSE__LOOPTEMP_);
1960 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
1961 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
1962 p = &TYPE_FIELDS (ctx->record_type);
1963 while (*p)
1964 if (*p == f1 || *p == f2)
1965 *p = DECL_CHAIN (*p);
1966 else
1967 p = &DECL_CHAIN (*p);
1968 DECL_CHAIN (f1) = f2;
1969 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
1970 TYPE_FIELDS (ctx->record_type) = f1;
1971 if (ctx->srecord_type)
1973 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
1974 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
1975 p = &TYPE_FIELDS (ctx->srecord_type);
1976 while (*p)
1977 if (*p == f1 || *p == f2)
1978 *p = DECL_CHAIN (*p);
1979 else
1980 p = &DECL_CHAIN (*p);
1981 DECL_CHAIN (f1) = f2;
1982 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
1983 TYPE_FIELDS (ctx->srecord_type) = f1;
1986 layout_type (ctx->record_type);
1987 fixup_child_record_type (ctx);
1988 if (ctx->srecord_type)
1989 layout_type (ctx->srecord_type);
1990 tree t = fold_convert_loc (loc, long_integer_type_node,
1991 TYPE_SIZE_UNIT (ctx->record_type));
1992 if (TREE_CODE (t) != INTEGER_CST)
1994 t = unshare_expr (t);
1995 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
1997 gimple_omp_task_set_arg_size (ctx->stmt, t);
1998 t = build_int_cst (long_integer_type_node,
1999 TYPE_ALIGN_UNIT (ctx->record_type));
2000 gimple_omp_task_set_arg_align (ctx->stmt, t);
2004 /* Find the enclosing offload context. */
2006 static omp_context *
2007 enclosing_target_ctx (omp_context *ctx)
2009 for (; ctx; ctx = ctx->outer)
2010 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2011 break;
2013 return ctx;
2016 /* Return true if ctx is part of an oacc kernels region. */
2018 static bool
2019 ctx_in_oacc_kernels_region (omp_context *ctx)
2021 for (;ctx != NULL; ctx = ctx->outer)
2023 gimple *stmt = ctx->stmt;
2024 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2025 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2026 return true;
2029 return false;
2032 /* Check the parallelism clauses inside a kernels regions.
2033 Until kernels handling moves to use the same loop indirection
2034 scheme as parallel, we need to do this checking early. */
2036 static unsigned
2037 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2039 bool checking = true;
2040 unsigned outer_mask = 0;
2041 unsigned this_mask = 0;
2042 bool has_seq = false, has_auto = false;
2044 if (ctx->outer)
2045 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2046 if (!stmt)
2048 checking = false;
2049 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2050 return outer_mask;
2051 stmt = as_a <gomp_for *> (ctx->stmt);
2054 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2056 switch (OMP_CLAUSE_CODE (c))
2058 case OMP_CLAUSE_GANG:
2059 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2060 break;
2061 case OMP_CLAUSE_WORKER:
2062 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2063 break;
2064 case OMP_CLAUSE_VECTOR:
2065 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2066 break;
2067 case OMP_CLAUSE_SEQ:
2068 has_seq = true;
2069 break;
2070 case OMP_CLAUSE_AUTO:
2071 has_auto = true;
2072 break;
2073 default:
2074 break;
2078 if (checking)
2080 if (has_seq && (this_mask || has_auto))
2081 error_at (gimple_location (stmt), "%<seq%> overrides other"
2082 " OpenACC loop specifiers");
2083 else if (has_auto && this_mask)
2084 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2085 " OpenACC loop specifiers");
2087 if (this_mask & outer_mask)
2088 error_at (gimple_location (stmt), "inner loop uses same"
2089 " OpenACC parallelism as containing loop");
2092 return outer_mask | this_mask;
2095 /* Scan a GIMPLE_OMP_FOR. */
2097 static omp_context *
2098 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2100 omp_context *ctx;
2101 size_t i;
2102 tree clauses = gimple_omp_for_clauses (stmt);
2104 ctx = new_omp_context (stmt, outer_ctx);
2106 if (is_gimple_omp_oacc (stmt))
2108 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2110 if (!tgt || is_oacc_parallel (tgt))
2111 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2113 char const *check = NULL;
2115 switch (OMP_CLAUSE_CODE (c))
2117 case OMP_CLAUSE_GANG:
2118 check = "gang";
2119 break;
2121 case OMP_CLAUSE_WORKER:
2122 check = "worker";
2123 break;
2125 case OMP_CLAUSE_VECTOR:
2126 check = "vector";
2127 break;
2129 default:
2130 break;
2133 if (check && OMP_CLAUSE_OPERAND (c, 0))
2134 error_at (gimple_location (stmt),
2135 "argument not permitted on %qs clause in"
2136 " OpenACC %<parallel%>", check);
2139 if (tgt && is_oacc_kernels (tgt))
2141 /* Strip out reductions, as they are not handled yet. */
2142 tree *prev_ptr = &clauses;
2144 while (tree probe = *prev_ptr)
2146 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2148 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2149 *prev_ptr = *next_ptr;
2150 else
2151 prev_ptr = next_ptr;
2154 gimple_omp_for_set_clauses (stmt, clauses);
2155 check_oacc_kernel_gwv (stmt, ctx);
2159 scan_sharing_clauses (clauses, ctx);
2161 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2162 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2164 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2165 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2166 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2167 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2169 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2170 return ctx;
2173 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2175 static void
2176 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2177 omp_context *outer_ctx)
2179 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2180 gsi_replace (gsi, bind, false);
2181 gimple_seq seq = NULL;
2182 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2183 tree cond = create_tmp_var_raw (integer_type_node);
2184 DECL_CONTEXT (cond) = current_function_decl;
2185 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2186 gimple_bind_set_vars (bind, cond);
2187 gimple_call_set_lhs (g, cond);
2188 gimple_seq_add_stmt (&seq, g);
2189 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2190 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2191 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2192 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2193 gimple_seq_add_stmt (&seq, g);
2194 g = gimple_build_label (lab1);
2195 gimple_seq_add_stmt (&seq, g);
2196 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2197 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2198 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2199 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2200 gimple_omp_for_set_clauses (new_stmt, clause);
2201 gimple_seq_add_stmt (&seq, new_stmt);
2202 g = gimple_build_goto (lab3);
2203 gimple_seq_add_stmt (&seq, g);
2204 g = gimple_build_label (lab2);
2205 gimple_seq_add_stmt (&seq, g);
2206 gimple_seq_add_stmt (&seq, stmt);
2207 g = gimple_build_label (lab3);
2208 gimple_seq_add_stmt (&seq, g);
2209 gimple_bind_set_body (bind, seq);
2210 update_stmt (bind);
2211 scan_omp_for (new_stmt, outer_ctx);
2212 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2215 /* Scan an OpenMP sections directive. */
2217 static void
2218 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2220 omp_context *ctx;
2222 ctx = new_omp_context (stmt, outer_ctx);
2223 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2224 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2227 /* Scan an OpenMP single directive. */
2229 static void
2230 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2232 omp_context *ctx;
2233 tree name;
2235 ctx = new_omp_context (stmt, outer_ctx);
2236 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2237 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2238 name = create_tmp_var_name (".omp_copy_s");
2239 name = build_decl (gimple_location (stmt),
2240 TYPE_DECL, name, ctx->record_type);
2241 TYPE_NAME (ctx->record_type) = name;
2243 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2244 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2246 if (TYPE_FIELDS (ctx->record_type) == NULL)
2247 ctx->record_type = NULL;
2248 else
2249 layout_type (ctx->record_type);
2252 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2253 used in the corresponding offloaded function are restrict. */
2255 static bool
2256 omp_target_base_pointers_restrict_p (tree clauses)
2258 /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2259 used by OpenACC. */
2260 if (flag_openacc == 0)
2261 return false;
2263 /* I. Basic example:
2265 void foo (void)
2267 unsigned int a[2], b[2];
2269 #pragma acc kernels \
2270 copyout (a) \
2271 copyout (b)
2273 a[0] = 0;
2274 b[0] = 1;
2278 After gimplification, we have:
2280 #pragma omp target oacc_kernels \
2281 map(force_from:a [len: 8]) \
2282 map(force_from:b [len: 8])
2284 a[0] = 0;
2285 b[0] = 1;
2288 Because both mappings have the force prefix, we know that they will be
2289 allocated when calling the corresponding offloaded function, which means we
2290 can mark the base pointers for a and b in the offloaded function as
2291 restrict. */
2293 tree c;
2294 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2296 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
2297 return false;
2299 switch (OMP_CLAUSE_MAP_KIND (c))
2301 case GOMP_MAP_FORCE_ALLOC:
2302 case GOMP_MAP_FORCE_TO:
2303 case GOMP_MAP_FORCE_FROM:
2304 case GOMP_MAP_FORCE_TOFROM:
2305 break;
2306 default:
2307 return false;
2311 return true;
2314 /* Scan a GIMPLE_OMP_TARGET. */
2316 static void
2317 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2319 omp_context *ctx;
2320 tree name;
2321 bool offloaded = is_gimple_omp_offloaded (stmt);
2322 tree clauses = gimple_omp_target_clauses (stmt);
2324 ctx = new_omp_context (stmt, outer_ctx);
2325 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2326 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2327 name = create_tmp_var_name (".omp_data_t");
2328 name = build_decl (gimple_location (stmt),
2329 TYPE_DECL, name, ctx->record_type);
2330 DECL_ARTIFICIAL (name) = 1;
2331 DECL_NAMELESS (name) = 1;
2332 TYPE_NAME (ctx->record_type) = name;
2333 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2335 bool base_pointers_restrict = false;
2336 if (offloaded)
2338 create_omp_child_function (ctx, false);
2339 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2341 base_pointers_restrict = omp_target_base_pointers_restrict_p (clauses);
2342 if (base_pointers_restrict
2343 && dump_file && (dump_flags & TDF_DETAILS))
2344 fprintf (dump_file,
2345 "Base pointers in offloaded function are restrict\n");
2348 scan_sharing_clauses (clauses, ctx, base_pointers_restrict);
2349 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2351 if (TYPE_FIELDS (ctx->record_type) == NULL)
2352 ctx->record_type = ctx->receiver_decl = NULL;
2353 else
2355 TYPE_FIELDS (ctx->record_type)
2356 = nreverse (TYPE_FIELDS (ctx->record_type));
2357 if (flag_checking)
2359 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2360 for (tree field = TYPE_FIELDS (ctx->record_type);
2361 field;
2362 field = DECL_CHAIN (field))
2363 gcc_assert (DECL_ALIGN (field) == align);
2365 layout_type (ctx->record_type);
2366 if (offloaded)
2367 fixup_child_record_type (ctx);
2371 /* Scan an OpenMP teams directive. */
2373 static void
2374 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2376 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2377 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2378 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2381 /* Check nesting restrictions. */
2382 static bool
2383 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2385 tree c;
2387 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2388 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2389 the original copy of its contents. */
2390 return true;
2392 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2393 inside an OpenACC CTX. */
2394 if (!(is_gimple_omp (stmt)
2395 && is_gimple_omp_oacc (stmt))
2396 /* Except for atomic codes that we share with OpenMP. */
2397 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2398 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2400 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2402 error_at (gimple_location (stmt),
2403 "non-OpenACC construct inside of OpenACC routine");
2404 return false;
2406 else
2407 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2408 if (is_gimple_omp (octx->stmt)
2409 && is_gimple_omp_oacc (octx->stmt))
2411 error_at (gimple_location (stmt),
2412 "non-OpenACC construct inside of OpenACC region");
2413 return false;
2417 if (ctx != NULL)
2419 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2420 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2422 c = NULL_TREE;
2423 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2425 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2426 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2428 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2429 && (ctx->outer == NULL
2430 || !gimple_omp_for_combined_into_p (ctx->stmt)
2431 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2432 || (gimple_omp_for_kind (ctx->outer->stmt)
2433 != GF_OMP_FOR_KIND_FOR)
2434 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2436 error_at (gimple_location (stmt),
2437 "%<ordered simd threads%> must be closely "
2438 "nested inside of %<for simd%> region");
2439 return false;
2441 return true;
2444 error_at (gimple_location (stmt),
2445 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2446 " may not be nested inside %<simd%> region");
2447 return false;
2449 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2451 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2452 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2453 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2454 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2456 error_at (gimple_location (stmt),
2457 "only %<distribute%> or %<parallel%> regions are "
2458 "allowed to be strictly nested inside %<teams%> "
2459 "region");
2460 return false;
2464 switch (gimple_code (stmt))
2466 case GIMPLE_OMP_FOR:
2467 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2468 return true;
2469 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2471 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2473 error_at (gimple_location (stmt),
2474 "%<distribute%> region must be strictly nested "
2475 "inside %<teams%> construct");
2476 return false;
2478 return true;
2480 /* We split taskloop into task and nested taskloop in it. */
2481 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2482 return true;
2483 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2485 bool ok = false;
2487 if (ctx)
2488 switch (gimple_code (ctx->stmt))
2490 case GIMPLE_OMP_FOR:
2491 ok = (gimple_omp_for_kind (ctx->stmt)
2492 == GF_OMP_FOR_KIND_OACC_LOOP);
2493 break;
2495 case GIMPLE_OMP_TARGET:
2496 switch (gimple_omp_target_kind (ctx->stmt))
2498 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2499 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2500 ok = true;
2501 break;
2503 default:
2504 break;
2507 default:
2508 break;
2510 else if (oacc_get_fn_attrib (current_function_decl))
2511 ok = true;
2512 if (!ok)
2514 error_at (gimple_location (stmt),
2515 "OpenACC loop directive must be associated with"
2516 " an OpenACC compute region");
2517 return false;
2520 /* FALLTHRU */
2521 case GIMPLE_CALL:
2522 if (is_gimple_call (stmt)
2523 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2524 == BUILT_IN_GOMP_CANCEL
2525 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2526 == BUILT_IN_GOMP_CANCELLATION_POINT))
2528 const char *bad = NULL;
2529 const char *kind = NULL;
2530 const char *construct
2531 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2532 == BUILT_IN_GOMP_CANCEL)
2533 ? "#pragma omp cancel"
2534 : "#pragma omp cancellation point";
2535 if (ctx == NULL)
2537 error_at (gimple_location (stmt), "orphaned %qs construct",
2538 construct);
2539 return false;
2541 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2542 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2543 : 0)
2545 case 1:
2546 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2547 bad = "#pragma omp parallel";
2548 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2549 == BUILT_IN_GOMP_CANCEL
2550 && !integer_zerop (gimple_call_arg (stmt, 1)))
2551 ctx->cancellable = true;
2552 kind = "parallel";
2553 break;
2554 case 2:
2555 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2556 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2557 bad = "#pragma omp for";
2558 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2559 == BUILT_IN_GOMP_CANCEL
2560 && !integer_zerop (gimple_call_arg (stmt, 1)))
2562 ctx->cancellable = true;
2563 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2564 OMP_CLAUSE_NOWAIT))
2565 warning_at (gimple_location (stmt), 0,
2566 "%<#pragma omp cancel for%> inside "
2567 "%<nowait%> for construct");
2568 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2569 OMP_CLAUSE_ORDERED))
2570 warning_at (gimple_location (stmt), 0,
2571 "%<#pragma omp cancel for%> inside "
2572 "%<ordered%> for construct");
2574 kind = "for";
2575 break;
2576 case 4:
2577 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2578 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2579 bad = "#pragma omp sections";
2580 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2581 == BUILT_IN_GOMP_CANCEL
2582 && !integer_zerop (gimple_call_arg (stmt, 1)))
2584 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2586 ctx->cancellable = true;
2587 if (omp_find_clause (gimple_omp_sections_clauses
2588 (ctx->stmt),
2589 OMP_CLAUSE_NOWAIT))
2590 warning_at (gimple_location (stmt), 0,
2591 "%<#pragma omp cancel sections%> inside "
2592 "%<nowait%> sections construct");
2594 else
2596 gcc_assert (ctx->outer
2597 && gimple_code (ctx->outer->stmt)
2598 == GIMPLE_OMP_SECTIONS);
2599 ctx->outer->cancellable = true;
2600 if (omp_find_clause (gimple_omp_sections_clauses
2601 (ctx->outer->stmt),
2602 OMP_CLAUSE_NOWAIT))
2603 warning_at (gimple_location (stmt), 0,
2604 "%<#pragma omp cancel sections%> inside "
2605 "%<nowait%> sections construct");
2608 kind = "sections";
2609 break;
2610 case 8:
2611 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2612 bad = "#pragma omp task";
2613 else
2615 for (omp_context *octx = ctx->outer;
2616 octx; octx = octx->outer)
2618 switch (gimple_code (octx->stmt))
2620 case GIMPLE_OMP_TASKGROUP:
2621 break;
2622 case GIMPLE_OMP_TARGET:
2623 if (gimple_omp_target_kind (octx->stmt)
2624 != GF_OMP_TARGET_KIND_REGION)
2625 continue;
2626 /* FALLTHRU */
2627 case GIMPLE_OMP_PARALLEL:
2628 case GIMPLE_OMP_TEAMS:
2629 error_at (gimple_location (stmt),
2630 "%<%s taskgroup%> construct not closely "
2631 "nested inside of %<taskgroup%> region",
2632 construct);
2633 return false;
2634 default:
2635 continue;
2637 break;
2639 ctx->cancellable = true;
2641 kind = "taskgroup";
2642 break;
2643 default:
2644 error_at (gimple_location (stmt), "invalid arguments");
2645 return false;
2647 if (bad)
2649 error_at (gimple_location (stmt),
2650 "%<%s %s%> construct not closely nested inside of %qs",
2651 construct, kind, bad);
2652 return false;
2655 /* FALLTHRU */
2656 case GIMPLE_OMP_SECTIONS:
2657 case GIMPLE_OMP_SINGLE:
2658 for (; ctx != NULL; ctx = ctx->outer)
2659 switch (gimple_code (ctx->stmt))
2661 case GIMPLE_OMP_FOR:
2662 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2663 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2664 break;
2665 /* FALLTHRU */
2666 case GIMPLE_OMP_SECTIONS:
2667 case GIMPLE_OMP_SINGLE:
2668 case GIMPLE_OMP_ORDERED:
2669 case GIMPLE_OMP_MASTER:
2670 case GIMPLE_OMP_TASK:
2671 case GIMPLE_OMP_CRITICAL:
2672 if (is_gimple_call (stmt))
2674 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2675 != BUILT_IN_GOMP_BARRIER)
2676 return true;
2677 error_at (gimple_location (stmt),
2678 "barrier region may not be closely nested inside "
2679 "of work-sharing, %<critical%>, %<ordered%>, "
2680 "%<master%>, explicit %<task%> or %<taskloop%> "
2681 "region");
2682 return false;
2684 error_at (gimple_location (stmt),
2685 "work-sharing region may not be closely nested inside "
2686 "of work-sharing, %<critical%>, %<ordered%>, "
2687 "%<master%>, explicit %<task%> or %<taskloop%> region");
2688 return false;
2689 case GIMPLE_OMP_PARALLEL:
2690 case GIMPLE_OMP_TEAMS:
2691 return true;
2692 case GIMPLE_OMP_TARGET:
2693 if (gimple_omp_target_kind (ctx->stmt)
2694 == GF_OMP_TARGET_KIND_REGION)
2695 return true;
2696 break;
2697 default:
2698 break;
2700 break;
2701 case GIMPLE_OMP_MASTER:
2702 for (; ctx != NULL; ctx = ctx->outer)
2703 switch (gimple_code (ctx->stmt))
2705 case GIMPLE_OMP_FOR:
2706 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2707 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2708 break;
2709 /* FALLTHRU */
2710 case GIMPLE_OMP_SECTIONS:
2711 case GIMPLE_OMP_SINGLE:
2712 case GIMPLE_OMP_TASK:
2713 error_at (gimple_location (stmt),
2714 "%<master%> region may not be closely nested inside "
2715 "of work-sharing, explicit %<task%> or %<taskloop%> "
2716 "region");
2717 return false;
2718 case GIMPLE_OMP_PARALLEL:
2719 case GIMPLE_OMP_TEAMS:
2720 return true;
2721 case GIMPLE_OMP_TARGET:
2722 if (gimple_omp_target_kind (ctx->stmt)
2723 == GF_OMP_TARGET_KIND_REGION)
2724 return true;
2725 break;
2726 default:
2727 break;
2729 break;
2730 case GIMPLE_OMP_TASK:
2731 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2732 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2733 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2734 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2736 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2737 error_at (OMP_CLAUSE_LOCATION (c),
2738 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2739 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2740 return false;
2742 break;
2743 case GIMPLE_OMP_ORDERED:
2744 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2745 c; c = OMP_CLAUSE_CHAIN (c))
2747 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2749 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2750 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2751 continue;
2753 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2754 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2755 || kind == OMP_CLAUSE_DEPEND_SINK)
2757 tree oclause;
2758 /* Look for containing ordered(N) loop. */
2759 if (ctx == NULL
2760 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2761 || (oclause
2762 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2763 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2765 error_at (OMP_CLAUSE_LOCATION (c),
2766 "%<ordered%> construct with %<depend%> clause "
2767 "must be closely nested inside an %<ordered%> "
2768 "loop");
2769 return false;
2771 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2773 error_at (OMP_CLAUSE_LOCATION (c),
2774 "%<ordered%> construct with %<depend%> clause "
2775 "must be closely nested inside a loop with "
2776 "%<ordered%> clause with a parameter");
2777 return false;
2780 else
2782 error_at (OMP_CLAUSE_LOCATION (c),
2783 "invalid depend kind in omp %<ordered%> %<depend%>");
2784 return false;
2787 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2788 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2790 /* ordered simd must be closely nested inside of simd region,
2791 and simd region must not encounter constructs other than
2792 ordered simd, therefore ordered simd may be either orphaned,
2793 or ctx->stmt must be simd. The latter case is handled already
2794 earlier. */
2795 if (ctx != NULL)
2797 error_at (gimple_location (stmt),
2798 "%<ordered%> %<simd%> must be closely nested inside "
2799 "%<simd%> region");
2800 return false;
2803 for (; ctx != NULL; ctx = ctx->outer)
2804 switch (gimple_code (ctx->stmt))
2806 case GIMPLE_OMP_CRITICAL:
2807 case GIMPLE_OMP_TASK:
2808 case GIMPLE_OMP_ORDERED:
2809 ordered_in_taskloop:
2810 error_at (gimple_location (stmt),
2811 "%<ordered%> region may not be closely nested inside "
2812 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2813 "%<taskloop%> region");
2814 return false;
2815 case GIMPLE_OMP_FOR:
2816 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2817 goto ordered_in_taskloop;
2818 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2819 OMP_CLAUSE_ORDERED) == NULL)
2821 error_at (gimple_location (stmt),
2822 "%<ordered%> region must be closely nested inside "
2823 "a loop region with an %<ordered%> clause");
2824 return false;
2826 return true;
2827 case GIMPLE_OMP_TARGET:
2828 if (gimple_omp_target_kind (ctx->stmt)
2829 != GF_OMP_TARGET_KIND_REGION)
2830 break;
2831 /* FALLTHRU */
2832 case GIMPLE_OMP_PARALLEL:
2833 case GIMPLE_OMP_TEAMS:
2834 error_at (gimple_location (stmt),
2835 "%<ordered%> region must be closely nested inside "
2836 "a loop region with an %<ordered%> clause");
2837 return false;
2838 default:
2839 break;
2841 break;
2842 case GIMPLE_OMP_CRITICAL:
2844 tree this_stmt_name
2845 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2846 for (; ctx != NULL; ctx = ctx->outer)
2847 if (gomp_critical *other_crit
2848 = dyn_cast <gomp_critical *> (ctx->stmt))
2849 if (this_stmt_name == gimple_omp_critical_name (other_crit))
2851 error_at (gimple_location (stmt),
2852 "%<critical%> region may not be nested inside "
2853 "a %<critical%> region with the same name");
2854 return false;
2857 break;
2858 case GIMPLE_OMP_TEAMS:
2859 if (ctx == NULL
2860 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2861 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2863 error_at (gimple_location (stmt),
2864 "%<teams%> construct not closely nested inside of "
2865 "%<target%> construct");
2866 return false;
2868 break;
2869 case GIMPLE_OMP_TARGET:
2870 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2871 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2872 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2873 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2875 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2876 error_at (OMP_CLAUSE_LOCATION (c),
2877 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2878 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2879 return false;
2881 if (is_gimple_omp_offloaded (stmt)
2882 && oacc_get_fn_attrib (cfun->decl) != NULL)
2884 error_at (gimple_location (stmt),
2885 "OpenACC region inside of OpenACC routine, nested "
2886 "parallelism not supported yet");
2887 return false;
2889 for (; ctx != NULL; ctx = ctx->outer)
2891 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2893 if (is_gimple_omp (stmt)
2894 && is_gimple_omp_oacc (stmt)
2895 && is_gimple_omp (ctx->stmt))
2897 error_at (gimple_location (stmt),
2898 "OpenACC construct inside of non-OpenACC region");
2899 return false;
2901 continue;
2904 const char *stmt_name, *ctx_stmt_name;
2905 switch (gimple_omp_target_kind (stmt))
2907 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2908 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2909 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2910 case GF_OMP_TARGET_KIND_ENTER_DATA:
2911 stmt_name = "target enter data"; break;
2912 case GF_OMP_TARGET_KIND_EXIT_DATA:
2913 stmt_name = "target exit data"; break;
2914 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2915 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2916 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2917 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
2918 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2919 stmt_name = "enter/exit data"; break;
2920 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2921 break;
2922 default: gcc_unreachable ();
2924 switch (gimple_omp_target_kind (ctx->stmt))
2926 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2927 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
2928 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2929 ctx_stmt_name = "parallel"; break;
2930 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2931 ctx_stmt_name = "kernels"; break;
2932 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
2933 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2934 ctx_stmt_name = "host_data"; break;
2935 default: gcc_unreachable ();
2938 /* OpenACC/OpenMP mismatch? */
2939 if (is_gimple_omp_oacc (stmt)
2940 != is_gimple_omp_oacc (ctx->stmt))
2942 error_at (gimple_location (stmt),
2943 "%s %qs construct inside of %s %qs region",
2944 (is_gimple_omp_oacc (stmt)
2945 ? "OpenACC" : "OpenMP"), stmt_name,
2946 (is_gimple_omp_oacc (ctx->stmt)
2947 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
2948 return false;
2950 if (is_gimple_omp_offloaded (ctx->stmt))
2952 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
2953 if (is_gimple_omp_oacc (ctx->stmt))
2955 error_at (gimple_location (stmt),
2956 "%qs construct inside of %qs region",
2957 stmt_name, ctx_stmt_name);
2958 return false;
2960 else
2962 warning_at (gimple_location (stmt), 0,
2963 "%qs construct inside of %qs region",
2964 stmt_name, ctx_stmt_name);
2968 break;
2969 default:
2970 break;
2972 return true;
2976 /* Helper function scan_omp.
2978 Callback for walk_tree or operators in walk_gimple_stmt used to
2979 scan for OMP directives in TP. */
2981 static tree
2982 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
2984 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2985 omp_context *ctx = (omp_context *) wi->info;
2986 tree t = *tp;
2988 switch (TREE_CODE (t))
2990 case VAR_DECL:
2991 case PARM_DECL:
2992 case LABEL_DECL:
2993 case RESULT_DECL:
2994 if (ctx)
2996 tree repl = remap_decl (t, &ctx->cb);
2997 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
2998 *tp = repl;
3000 break;
3002 default:
3003 if (ctx && TYPE_P (t))
3004 *tp = remap_type (t, &ctx->cb);
3005 else if (!DECL_P (t))
3007 *walk_subtrees = 1;
3008 if (ctx)
3010 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3011 if (tem != TREE_TYPE (t))
3013 if (TREE_CODE (t) == INTEGER_CST)
3014 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3015 else
3016 TREE_TYPE (t) = tem;
3020 break;
3023 return NULL_TREE;
3026 /* Return true if FNDECL is a setjmp or a longjmp. */
3028 static bool
3029 setjmp_or_longjmp_p (const_tree fndecl)
3031 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3032 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
3033 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
3034 return true;
3036 tree declname = DECL_NAME (fndecl);
3037 if (!declname)
3038 return false;
3039 const char *name = IDENTIFIER_POINTER (declname);
3040 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3044 /* Helper function for scan_omp.
3046 Callback for walk_gimple_stmt used to scan for OMP directives in
3047 the current statement in GSI. */
3049 static tree
3050 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3051 struct walk_stmt_info *wi)
3053 gimple *stmt = gsi_stmt (*gsi);
3054 omp_context *ctx = (omp_context *) wi->info;
3056 if (gimple_has_location (stmt))
3057 input_location = gimple_location (stmt);
3059 /* Check the nesting restrictions. */
3060 bool remove = false;
3061 if (is_gimple_omp (stmt))
3062 remove = !check_omp_nesting_restrictions (stmt, ctx);
3063 else if (is_gimple_call (stmt))
3065 tree fndecl = gimple_call_fndecl (stmt);
3066 if (fndecl)
3068 if (setjmp_or_longjmp_p (fndecl)
3069 && ctx
3070 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3071 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3073 remove = true;
3074 error_at (gimple_location (stmt),
3075 "setjmp/longjmp inside simd construct");
3077 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3078 switch (DECL_FUNCTION_CODE (fndecl))
3080 case BUILT_IN_GOMP_BARRIER:
3081 case BUILT_IN_GOMP_CANCEL:
3082 case BUILT_IN_GOMP_CANCELLATION_POINT:
3083 case BUILT_IN_GOMP_TASKYIELD:
3084 case BUILT_IN_GOMP_TASKWAIT:
3085 case BUILT_IN_GOMP_TASKGROUP_START:
3086 case BUILT_IN_GOMP_TASKGROUP_END:
3087 remove = !check_omp_nesting_restrictions (stmt, ctx);
3088 break;
3089 default:
3090 break;
3094 if (remove)
3096 stmt = gimple_build_nop ();
3097 gsi_replace (gsi, stmt, false);
3100 *handled_ops_p = true;
3102 switch (gimple_code (stmt))
3104 case GIMPLE_OMP_PARALLEL:
3105 taskreg_nesting_level++;
3106 scan_omp_parallel (gsi, ctx);
3107 taskreg_nesting_level--;
3108 break;
3110 case GIMPLE_OMP_TASK:
3111 taskreg_nesting_level++;
3112 scan_omp_task (gsi, ctx);
3113 taskreg_nesting_level--;
3114 break;
3116 case GIMPLE_OMP_FOR:
3117 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3118 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3119 && omp_maybe_offloaded_ctx (ctx)
3120 && omp_max_simt_vf ())
3121 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3122 else
3123 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3124 break;
3126 case GIMPLE_OMP_SECTIONS:
3127 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3128 break;
3130 case GIMPLE_OMP_SINGLE:
3131 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3132 break;
3134 case GIMPLE_OMP_SECTION:
3135 case GIMPLE_OMP_MASTER:
3136 case GIMPLE_OMP_TASKGROUP:
3137 case GIMPLE_OMP_ORDERED:
3138 case GIMPLE_OMP_CRITICAL:
3139 case GIMPLE_OMP_GRID_BODY:
3140 ctx = new_omp_context (stmt, ctx);
3141 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3142 break;
3144 case GIMPLE_OMP_TARGET:
3145 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3146 break;
3148 case GIMPLE_OMP_TEAMS:
3149 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3150 break;
3152 case GIMPLE_BIND:
3154 tree var;
3156 *handled_ops_p = false;
3157 if (ctx)
3158 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3159 var ;
3160 var = DECL_CHAIN (var))
3161 insert_decl_map (&ctx->cb, var, var);
3163 break;
3164 default:
3165 *handled_ops_p = false;
3166 break;
3169 return NULL_TREE;
3173 /* Scan all the statements starting at the current statement. CTX
3174 contains context information about the OMP directives and
3175 clauses found during the scan. */
3177 static void
3178 scan_omp (gimple_seq *body_p, omp_context *ctx)
3180 location_t saved_location;
3181 struct walk_stmt_info wi;
3183 memset (&wi, 0, sizeof (wi));
3184 wi.info = ctx;
3185 wi.want_locations = true;
3187 saved_location = input_location;
3188 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3189 input_location = saved_location;
3192 /* Re-gimplification and code generation routines. */
3194 /* If a context was created for STMT when it was scanned, return it. */
3196 static omp_context *
3197 maybe_lookup_ctx (gimple *stmt)
3199 splay_tree_node n;
3200 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3201 return n ? (omp_context *) n->value : NULL;
3205 /* Find the mapping for DECL in CTX or the immediately enclosing
3206 context that has a mapping for DECL.
3208 If CTX is a nested parallel directive, we may have to use the decl
3209 mappings created in CTX's parent context. Suppose that we have the
3210 following parallel nesting (variable UIDs showed for clarity):
3212 iD.1562 = 0;
3213 #omp parallel shared(iD.1562) -> outer parallel
3214 iD.1562 = iD.1562 + 1;
3216 #omp parallel shared (iD.1562) -> inner parallel
3217 iD.1562 = iD.1562 - 1;
3219 Each parallel structure will create a distinct .omp_data_s structure
3220 for copying iD.1562 in/out of the directive:
3222 outer parallel .omp_data_s.1.i -> iD.1562
3223 inner parallel .omp_data_s.2.i -> iD.1562
3225 A shared variable mapping will produce a copy-out operation before
3226 the parallel directive and a copy-in operation after it. So, in
3227 this case we would have:
3229 iD.1562 = 0;
3230 .omp_data_o.1.i = iD.1562;
3231 #omp parallel shared(iD.1562) -> outer parallel
3232 .omp_data_i.1 = &.omp_data_o.1
3233 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3235 .omp_data_o.2.i = iD.1562; -> **
3236 #omp parallel shared(iD.1562) -> inner parallel
3237 .omp_data_i.2 = &.omp_data_o.2
3238 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3241 ** This is a problem. The symbol iD.1562 cannot be referenced
3242 inside the body of the outer parallel region. But since we are
3243 emitting this copy operation while expanding the inner parallel
3244 directive, we need to access the CTX structure of the outer
3245 parallel directive to get the correct mapping:
3247 .omp_data_o.2.i = .omp_data_i.1->i
3249 Since there may be other workshare or parallel directives enclosing
3250 the parallel directive, it may be necessary to walk up the context
3251 parent chain. This is not a problem in general because nested
3252 parallelism happens only rarely. */
3254 static tree
3255 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3257 tree t;
3258 omp_context *up;
3260 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3261 t = maybe_lookup_decl (decl, up);
3263 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3265 return t ? t : decl;
3269 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3270 in outer contexts. */
3272 static tree
3273 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3275 tree t = NULL;
3276 omp_context *up;
3278 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3279 t = maybe_lookup_decl (decl, up);
3281 return t ? t : decl;
3285 /* Construct the initialization value for reduction operation OP. */
3287 tree
3288 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3290 switch (op)
3292 case PLUS_EXPR:
3293 case MINUS_EXPR:
3294 case BIT_IOR_EXPR:
3295 case BIT_XOR_EXPR:
3296 case TRUTH_OR_EXPR:
3297 case TRUTH_ORIF_EXPR:
3298 case TRUTH_XOR_EXPR:
3299 case NE_EXPR:
3300 return build_zero_cst (type);
3302 case MULT_EXPR:
3303 case TRUTH_AND_EXPR:
3304 case TRUTH_ANDIF_EXPR:
3305 case EQ_EXPR:
3306 return fold_convert_loc (loc, type, integer_one_node);
3308 case BIT_AND_EXPR:
3309 return fold_convert_loc (loc, type, integer_minus_one_node);
3311 case MAX_EXPR:
3312 if (SCALAR_FLOAT_TYPE_P (type))
3314 REAL_VALUE_TYPE max, min;
3315 if (HONOR_INFINITIES (type))
3317 real_inf (&max);
3318 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3320 else
3321 real_maxval (&min, 1, TYPE_MODE (type));
3322 return build_real (type, min);
3324 else if (POINTER_TYPE_P (type))
3326 wide_int min
3327 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3328 return wide_int_to_tree (type, min);
3330 else
3332 gcc_assert (INTEGRAL_TYPE_P (type));
3333 return TYPE_MIN_VALUE (type);
3336 case MIN_EXPR:
3337 if (SCALAR_FLOAT_TYPE_P (type))
3339 REAL_VALUE_TYPE max;
3340 if (HONOR_INFINITIES (type))
3341 real_inf (&max);
3342 else
3343 real_maxval (&max, 0, TYPE_MODE (type));
3344 return build_real (type, max);
3346 else if (POINTER_TYPE_P (type))
3348 wide_int max
3349 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3350 return wide_int_to_tree (type, max);
3352 else
3354 gcc_assert (INTEGRAL_TYPE_P (type));
3355 return TYPE_MAX_VALUE (type);
3358 default:
3359 gcc_unreachable ();
3363 /* Construct the initialization value for reduction CLAUSE. */
3365 tree
3366 omp_reduction_init (tree clause, tree type)
3368 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3369 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3372 /* Return alignment to be assumed for var in CLAUSE, which should be
3373 OMP_CLAUSE_ALIGNED. */
3375 static tree
3376 omp_clause_aligned_alignment (tree clause)
3378 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3379 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3381 /* Otherwise return implementation defined alignment. */
3382 unsigned int al = 1;
3383 opt_scalar_mode mode_iter;
3384 int vs = targetm.vectorize.autovectorize_vector_sizes ();
3385 if (vs)
3386 vs = 1 << floor_log2 (vs);
3387 static enum mode_class classes[]
3388 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3389 for (int i = 0; i < 4; i += 2)
3390 /* The for loop above dictates that we only walk through scalar classes. */
3391 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3393 scalar_mode mode = mode_iter.require ();
3394 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3395 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3396 continue;
3397 while (vs
3398 && GET_MODE_SIZE (vmode) < vs
3399 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3400 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3402 tree type = lang_hooks.types.type_for_mode (mode, 1);
3403 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3404 continue;
3405 type = build_vector_type (type, GET_MODE_SIZE (vmode)
3406 / GET_MODE_SIZE (mode));
3407 if (TYPE_MODE (type) != vmode)
3408 continue;
3409 if (TYPE_ALIGN_UNIT (type) > al)
3410 al = TYPE_ALIGN_UNIT (type);
3412 return build_int_cst (integer_type_node, al);
3416 /* This structure is part of the interface between lower_rec_simd_input_clauses
3417 and lower_rec_input_clauses. */
3419 struct omplow_simd_context {
3420 tree idx;
3421 tree lane;
3422 vec<tree, va_heap> simt_eargs;
3423 gimple_seq simt_dlist;
3424 int max_vf;
3425 bool is_simt;
3428 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3429 privatization. */
3431 static bool
3432 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3433 omplow_simd_context *sctx, tree &ivar, tree &lvar)
3435 if (sctx->max_vf == 0)
3437 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3438 if (sctx->max_vf > 1)
3440 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3441 OMP_CLAUSE_SAFELEN);
3442 if (c
3443 && (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) != INTEGER_CST
3444 || tree_int_cst_sgn (OMP_CLAUSE_SAFELEN_EXPR (c)) != 1))
3445 sctx->max_vf = 1;
3446 else if (c && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
3447 sctx->max_vf) == -1)
3448 sctx->max_vf = tree_to_shwi (OMP_CLAUSE_SAFELEN_EXPR (c));
3450 if (sctx->max_vf > 1)
3452 sctx->idx = create_tmp_var (unsigned_type_node);
3453 sctx->lane = create_tmp_var (unsigned_type_node);
3456 if (sctx->max_vf == 1)
3457 return false;
3459 if (sctx->is_simt)
3461 if (is_gimple_reg (new_var))
3463 ivar = lvar = new_var;
3464 return true;
3466 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3467 ivar = lvar = create_tmp_var (type);
3468 TREE_ADDRESSABLE (ivar) = 1;
3469 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3470 NULL, DECL_ATTRIBUTES (ivar));
3471 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3472 tree clobber = build_constructor (type, NULL);
3473 TREE_THIS_VOLATILE (clobber) = 1;
3474 gimple *g = gimple_build_assign (ivar, clobber);
3475 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3477 else
3479 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3480 tree avar = create_tmp_var_raw (atype);
3481 if (TREE_ADDRESSABLE (new_var))
3482 TREE_ADDRESSABLE (avar) = 1;
3483 DECL_ATTRIBUTES (avar)
3484 = tree_cons (get_identifier ("omp simd array"), NULL,
3485 DECL_ATTRIBUTES (avar));
3486 gimple_add_tmp_var (avar);
3487 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3488 NULL_TREE, NULL_TREE);
3489 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3490 NULL_TREE, NULL_TREE);
3492 if (DECL_P (new_var))
3494 SET_DECL_VALUE_EXPR (new_var, lvar);
3495 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3497 return true;
3500 /* Helper function of lower_rec_input_clauses. For a reference
3501 in simd reduction, add an underlying variable it will reference. */
3503 static void
3504 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3506 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3507 if (TREE_CONSTANT (z))
3509 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3510 get_name (new_vard));
3511 gimple_add_tmp_var (z);
3512 TREE_ADDRESSABLE (z) = 1;
3513 z = build_fold_addr_expr_loc (loc, z);
3514 gimplify_assign (new_vard, z, ilist);
3518 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3519 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3520 private variables. Initialization statements go in ILIST, while calls
3521 to destructors go in DLIST. */
3523 static void
3524 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3525 omp_context *ctx, struct omp_for_data *fd)
3527 tree c, dtor, copyin_seq, x, ptr;
3528 bool copyin_by_ref = false;
3529 bool lastprivate_firstprivate = false;
3530 bool reduction_omp_orig_ref = false;
3531 int pass;
3532 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3533 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3534 omplow_simd_context sctx = omplow_simd_context ();
3535 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3536 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3537 gimple_seq llist[3] = { };
3539 copyin_seq = NULL;
3540 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3542 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3543 with data sharing clauses referencing variable sized vars. That
3544 is unnecessarily hard to support and very unlikely to result in
3545 vectorized code anyway. */
3546 if (is_simd)
3547 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3548 switch (OMP_CLAUSE_CODE (c))
3550 case OMP_CLAUSE_LINEAR:
3551 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3552 sctx.max_vf = 1;
3553 /* FALLTHRU */
3554 case OMP_CLAUSE_PRIVATE:
3555 case OMP_CLAUSE_FIRSTPRIVATE:
3556 case OMP_CLAUSE_LASTPRIVATE:
3557 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3558 sctx.max_vf = 1;
3559 break;
3560 case OMP_CLAUSE_REDUCTION:
3561 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3562 || is_variable_sized (OMP_CLAUSE_DECL (c)))
3563 sctx.max_vf = 1;
3564 break;
3565 default:
3566 continue;
3569 /* Add a placeholder for simduid. */
3570 if (sctx.is_simt && sctx.max_vf != 1)
3571 sctx.simt_eargs.safe_push (NULL_TREE);
3573 /* Do all the fixed sized types in the first pass, and the variable sized
3574 types in the second pass. This makes sure that the scalar arguments to
3575 the variable sized types are processed before we use them in the
3576 variable sized operations. */
3577 for (pass = 0; pass < 2; ++pass)
3579 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3581 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3582 tree var, new_var;
3583 bool by_ref;
3584 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3586 switch (c_kind)
3588 case OMP_CLAUSE_PRIVATE:
3589 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3590 continue;
3591 break;
3592 case OMP_CLAUSE_SHARED:
3593 /* Ignore shared directives in teams construct. */
3594 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3595 continue;
3596 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3598 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3599 || is_global_var (OMP_CLAUSE_DECL (c)));
3600 continue;
3602 case OMP_CLAUSE_FIRSTPRIVATE:
3603 case OMP_CLAUSE_COPYIN:
3604 break;
3605 case OMP_CLAUSE_LINEAR:
3606 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3607 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3608 lastprivate_firstprivate = true;
3609 break;
3610 case OMP_CLAUSE_REDUCTION:
3611 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3612 reduction_omp_orig_ref = true;
3613 break;
3614 case OMP_CLAUSE__LOOPTEMP_:
3615 /* Handle _looptemp_ clauses only on parallel/task. */
3616 if (fd)
3617 continue;
3618 break;
3619 case OMP_CLAUSE_LASTPRIVATE:
3620 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3622 lastprivate_firstprivate = true;
3623 if (pass != 0 || is_taskloop_ctx (ctx))
3624 continue;
3626 /* Even without corresponding firstprivate, if
3627 decl is Fortran allocatable, it needs outer var
3628 reference. */
3629 else if (pass == 0
3630 && lang_hooks.decls.omp_private_outer_ref
3631 (OMP_CLAUSE_DECL (c)))
3632 lastprivate_firstprivate = true;
3633 break;
3634 case OMP_CLAUSE_ALIGNED:
3635 if (pass == 0)
3636 continue;
3637 var = OMP_CLAUSE_DECL (c);
3638 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3639 && !is_global_var (var))
3641 new_var = maybe_lookup_decl (var, ctx);
3642 if (new_var == NULL_TREE)
3643 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3644 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3645 tree alarg = omp_clause_aligned_alignment (c);
3646 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3647 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3648 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3649 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3650 gimplify_and_add (x, ilist);
3652 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3653 && is_global_var (var))
3655 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3656 new_var = lookup_decl (var, ctx);
3657 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3658 t = build_fold_addr_expr_loc (clause_loc, t);
3659 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3660 tree alarg = omp_clause_aligned_alignment (c);
3661 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3662 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3663 t = fold_convert_loc (clause_loc, ptype, t);
3664 x = create_tmp_var (ptype);
3665 t = build2 (MODIFY_EXPR, ptype, x, t);
3666 gimplify_and_add (t, ilist);
3667 t = build_simple_mem_ref_loc (clause_loc, x);
3668 SET_DECL_VALUE_EXPR (new_var, t);
3669 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3671 continue;
3672 default:
3673 continue;
3676 new_var = var = OMP_CLAUSE_DECL (c);
3677 if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3679 var = TREE_OPERAND (var, 0);
3680 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3681 var = TREE_OPERAND (var, 0);
3682 if (TREE_CODE (var) == INDIRECT_REF
3683 || TREE_CODE (var) == ADDR_EXPR)
3684 var = TREE_OPERAND (var, 0);
3685 if (is_variable_sized (var))
3687 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3688 var = DECL_VALUE_EXPR (var);
3689 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3690 var = TREE_OPERAND (var, 0);
3691 gcc_assert (DECL_P (var));
3693 new_var = var;
3695 if (c_kind != OMP_CLAUSE_COPYIN)
3696 new_var = lookup_decl (var, ctx);
3698 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3700 if (pass != 0)
3701 continue;
3703 /* C/C++ array section reductions. */
3704 else if (c_kind == OMP_CLAUSE_REDUCTION
3705 && var != OMP_CLAUSE_DECL (c))
3707 if (pass == 0)
3708 continue;
3710 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3711 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3712 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3714 tree b = TREE_OPERAND (orig_var, 1);
3715 b = maybe_lookup_decl (b, ctx);
3716 if (b == NULL)
3718 b = TREE_OPERAND (orig_var, 1);
3719 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3721 if (integer_zerop (bias))
3722 bias = b;
3723 else
3725 bias = fold_convert_loc (clause_loc,
3726 TREE_TYPE (b), bias);
3727 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3728 TREE_TYPE (b), b, bias);
3730 orig_var = TREE_OPERAND (orig_var, 0);
3732 if (TREE_CODE (orig_var) == INDIRECT_REF
3733 || TREE_CODE (orig_var) == ADDR_EXPR)
3734 orig_var = TREE_OPERAND (orig_var, 0);
3735 tree d = OMP_CLAUSE_DECL (c);
3736 tree type = TREE_TYPE (d);
3737 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3738 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3739 const char *name = get_name (orig_var);
3740 if (TREE_CONSTANT (v))
3742 x = create_tmp_var_raw (type, name);
3743 gimple_add_tmp_var (x);
3744 TREE_ADDRESSABLE (x) = 1;
3745 x = build_fold_addr_expr_loc (clause_loc, x);
3747 else
3749 tree atmp
3750 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3751 tree t = maybe_lookup_decl (v, ctx);
3752 if (t)
3753 v = t;
3754 else
3755 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3756 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3757 t = fold_build2_loc (clause_loc, PLUS_EXPR,
3758 TREE_TYPE (v), v,
3759 build_int_cst (TREE_TYPE (v), 1));
3760 t = fold_build2_loc (clause_loc, MULT_EXPR,
3761 TREE_TYPE (v), t,
3762 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3763 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3764 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3767 tree ptype = build_pointer_type (TREE_TYPE (type));
3768 x = fold_convert_loc (clause_loc, ptype, x);
3769 tree y = create_tmp_var (ptype, name);
3770 gimplify_assign (y, x, ilist);
3771 x = y;
3772 tree yb = y;
3774 if (!integer_zerop (bias))
3776 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3777 bias);
3778 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3780 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3781 pointer_sized_int_node, yb, bias);
3782 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3783 yb = create_tmp_var (ptype, name);
3784 gimplify_assign (yb, x, ilist);
3785 x = yb;
3788 d = TREE_OPERAND (d, 0);
3789 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3790 d = TREE_OPERAND (d, 0);
3791 if (TREE_CODE (d) == ADDR_EXPR)
3793 if (orig_var != var)
3795 gcc_assert (is_variable_sized (orig_var));
3796 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3798 gimplify_assign (new_var, x, ilist);
3799 tree new_orig_var = lookup_decl (orig_var, ctx);
3800 tree t = build_fold_indirect_ref (new_var);
3801 DECL_IGNORED_P (new_var) = 0;
3802 TREE_THIS_NOTRAP (t);
3803 SET_DECL_VALUE_EXPR (new_orig_var, t);
3804 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3806 else
3808 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3809 build_int_cst (ptype, 0));
3810 SET_DECL_VALUE_EXPR (new_var, x);
3811 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3814 else
3816 gcc_assert (orig_var == var);
3817 if (TREE_CODE (d) == INDIRECT_REF)
3819 x = create_tmp_var (ptype, name);
3820 TREE_ADDRESSABLE (x) = 1;
3821 gimplify_assign (x, yb, ilist);
3822 x = build_fold_addr_expr_loc (clause_loc, x);
3824 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3825 gimplify_assign (new_var, x, ilist);
3827 tree y1 = create_tmp_var (ptype, NULL);
3828 gimplify_assign (y1, y, ilist);
3829 tree i2 = NULL_TREE, y2 = NULL_TREE;
3830 tree body2 = NULL_TREE, end2 = NULL_TREE;
3831 tree y3 = NULL_TREE, y4 = NULL_TREE;
3832 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3834 y2 = create_tmp_var (ptype, NULL);
3835 gimplify_assign (y2, y, ilist);
3836 tree ref = build_outer_var_ref (var, ctx);
3837 /* For ref build_outer_var_ref already performs this. */
3838 if (TREE_CODE (d) == INDIRECT_REF)
3839 gcc_assert (omp_is_reference (var));
3840 else if (TREE_CODE (d) == ADDR_EXPR)
3841 ref = build_fold_addr_expr (ref);
3842 else if (omp_is_reference (var))
3843 ref = build_fold_addr_expr (ref);
3844 ref = fold_convert_loc (clause_loc, ptype, ref);
3845 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3846 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3848 y3 = create_tmp_var (ptype, NULL);
3849 gimplify_assign (y3, unshare_expr (ref), ilist);
3851 if (is_simd)
3853 y4 = create_tmp_var (ptype, NULL);
3854 gimplify_assign (y4, ref, dlist);
3857 tree i = create_tmp_var (TREE_TYPE (v), NULL);
3858 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3859 tree body = create_artificial_label (UNKNOWN_LOCATION);
3860 tree end = create_artificial_label (UNKNOWN_LOCATION);
3861 gimple_seq_add_stmt (ilist, gimple_build_label (body));
3862 if (y2)
3864 i2 = create_tmp_var (TREE_TYPE (v), NULL);
3865 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3866 body2 = create_artificial_label (UNKNOWN_LOCATION);
3867 end2 = create_artificial_label (UNKNOWN_LOCATION);
3868 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3870 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3872 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3873 tree decl_placeholder
3874 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3875 SET_DECL_VALUE_EXPR (decl_placeholder,
3876 build_simple_mem_ref (y1));
3877 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3878 SET_DECL_VALUE_EXPR (placeholder,
3879 y3 ? build_simple_mem_ref (y3)
3880 : error_mark_node);
3881 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3882 x = lang_hooks.decls.omp_clause_default_ctor
3883 (c, build_simple_mem_ref (y1),
3884 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3885 if (x)
3886 gimplify_and_add (x, ilist);
3887 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3889 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3890 lower_omp (&tseq, ctx);
3891 gimple_seq_add_seq (ilist, tseq);
3893 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3894 if (is_simd)
3896 SET_DECL_VALUE_EXPR (decl_placeholder,
3897 build_simple_mem_ref (y2));
3898 SET_DECL_VALUE_EXPR (placeholder,
3899 build_simple_mem_ref (y4));
3900 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3901 lower_omp (&tseq, ctx);
3902 gimple_seq_add_seq (dlist, tseq);
3903 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3905 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3906 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3907 x = lang_hooks.decls.omp_clause_dtor
3908 (c, build_simple_mem_ref (y2));
3909 if (x)
3911 gimple_seq tseq = NULL;
3912 dtor = x;
3913 gimplify_stmt (&dtor, &tseq);
3914 gimple_seq_add_seq (dlist, tseq);
3917 else
3919 x = omp_reduction_init (c, TREE_TYPE (type));
3920 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3922 /* reduction(-:var) sums up the partial results, so it
3923 acts identically to reduction(+:var). */
3924 if (code == MINUS_EXPR)
3925 code = PLUS_EXPR;
3927 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
3928 if (is_simd)
3930 x = build2 (code, TREE_TYPE (type),
3931 build_simple_mem_ref (y4),
3932 build_simple_mem_ref (y2));
3933 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
3936 gimple *g
3937 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
3938 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3939 gimple_seq_add_stmt (ilist, g);
3940 if (y3)
3942 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
3943 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3944 gimple_seq_add_stmt (ilist, g);
3946 g = gimple_build_assign (i, PLUS_EXPR, i,
3947 build_int_cst (TREE_TYPE (i), 1));
3948 gimple_seq_add_stmt (ilist, g);
3949 g = gimple_build_cond (LE_EXPR, i, v, body, end);
3950 gimple_seq_add_stmt (ilist, g);
3951 gimple_seq_add_stmt (ilist, gimple_build_label (end));
3952 if (y2)
3954 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
3955 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3956 gimple_seq_add_stmt (dlist, g);
3957 if (y4)
3959 g = gimple_build_assign
3960 (y4, POINTER_PLUS_EXPR, y4,
3961 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3962 gimple_seq_add_stmt (dlist, g);
3964 g = gimple_build_assign (i2, PLUS_EXPR, i2,
3965 build_int_cst (TREE_TYPE (i2), 1));
3966 gimple_seq_add_stmt (dlist, g);
3967 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
3968 gimple_seq_add_stmt (dlist, g);
3969 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
3971 continue;
3973 else if (is_variable_sized (var))
3975 /* For variable sized types, we need to allocate the
3976 actual storage here. Call alloca and store the
3977 result in the pointer decl that we created elsewhere. */
3978 if (pass == 0)
3979 continue;
3981 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
3983 gcall *stmt;
3984 tree tmp, atmp;
3986 ptr = DECL_VALUE_EXPR (new_var);
3987 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
3988 ptr = TREE_OPERAND (ptr, 0);
3989 gcc_assert (DECL_P (ptr));
3990 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
3992 /* void *tmp = __builtin_alloca */
3993 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3994 stmt = gimple_build_call (atmp, 2, x,
3995 size_int (DECL_ALIGN (var)));
3996 tmp = create_tmp_var_raw (ptr_type_node);
3997 gimple_add_tmp_var (tmp);
3998 gimple_call_set_lhs (stmt, tmp);
4000 gimple_seq_add_stmt (ilist, stmt);
4002 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4003 gimplify_assign (ptr, x, ilist);
4006 else if (omp_is_reference (var))
4008 /* For references that are being privatized for Fortran,
4009 allocate new backing storage for the new pointer
4010 variable. This allows us to avoid changing all the
4011 code that expects a pointer to something that expects
4012 a direct variable. */
4013 if (pass == 0)
4014 continue;
4016 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4017 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4019 x = build_receiver_ref (var, false, ctx);
4020 x = build_fold_addr_expr_loc (clause_loc, x);
4022 else if (TREE_CONSTANT (x))
4024 /* For reduction in SIMD loop, defer adding the
4025 initialization of the reference, because if we decide
4026 to use SIMD array for it, the initilization could cause
4027 expansion ICE. */
4028 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4029 x = NULL_TREE;
4030 else
4032 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4033 get_name (var));
4034 gimple_add_tmp_var (x);
4035 TREE_ADDRESSABLE (x) = 1;
4036 x = build_fold_addr_expr_loc (clause_loc, x);
4039 else
4041 tree atmp
4042 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4043 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4044 tree al = size_int (TYPE_ALIGN (rtype));
4045 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4048 if (x)
4050 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4051 gimplify_assign (new_var, x, ilist);
4054 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4056 else if (c_kind == OMP_CLAUSE_REDUCTION
4057 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4059 if (pass == 0)
4060 continue;
4062 else if (pass != 0)
4063 continue;
4065 switch (OMP_CLAUSE_CODE (c))
4067 case OMP_CLAUSE_SHARED:
4068 /* Ignore shared directives in teams construct. */
4069 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4070 continue;
4071 /* Shared global vars are just accessed directly. */
4072 if (is_global_var (new_var))
4073 break;
4074 /* For taskloop firstprivate/lastprivate, represented
4075 as firstprivate and shared clause on the task, new_var
4076 is the firstprivate var. */
4077 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4078 break;
4079 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4080 needs to be delayed until after fixup_child_record_type so
4081 that we get the correct type during the dereference. */
4082 by_ref = use_pointer_for_field (var, ctx);
4083 x = build_receiver_ref (var, by_ref, ctx);
4084 SET_DECL_VALUE_EXPR (new_var, x);
4085 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4087 /* ??? If VAR is not passed by reference, and the variable
4088 hasn't been initialized yet, then we'll get a warning for
4089 the store into the omp_data_s structure. Ideally, we'd be
4090 able to notice this and not store anything at all, but
4091 we're generating code too early. Suppress the warning. */
4092 if (!by_ref)
4093 TREE_NO_WARNING (var) = 1;
4094 break;
4096 case OMP_CLAUSE_LASTPRIVATE:
4097 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4098 break;
4099 /* FALLTHRU */
4101 case OMP_CLAUSE_PRIVATE:
4102 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4103 x = build_outer_var_ref (var, ctx);
4104 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4106 if (is_task_ctx (ctx))
4107 x = build_receiver_ref (var, false, ctx);
4108 else
4109 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4111 else
4112 x = NULL;
4113 do_private:
4114 tree nx;
4115 nx = lang_hooks.decls.omp_clause_default_ctor
4116 (c, unshare_expr (new_var), x);
4117 if (is_simd)
4119 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4120 if ((TREE_ADDRESSABLE (new_var) || nx || y
4121 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4122 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4123 ivar, lvar))
4125 if (nx)
4126 x = lang_hooks.decls.omp_clause_default_ctor
4127 (c, unshare_expr (ivar), x);
4128 if (nx && x)
4129 gimplify_and_add (x, &llist[0]);
4130 if (y)
4132 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4133 if (y)
4135 gimple_seq tseq = NULL;
4137 dtor = y;
4138 gimplify_stmt (&dtor, &tseq);
4139 gimple_seq_add_seq (&llist[1], tseq);
4142 break;
4145 if (nx)
4146 gimplify_and_add (nx, ilist);
4147 /* FALLTHRU */
4149 do_dtor:
4150 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4151 if (x)
4153 gimple_seq tseq = NULL;
4155 dtor = x;
4156 gimplify_stmt (&dtor, &tseq);
4157 gimple_seq_add_seq (dlist, tseq);
4159 break;
4161 case OMP_CLAUSE_LINEAR:
4162 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4163 goto do_firstprivate;
4164 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4165 x = NULL;
4166 else
4167 x = build_outer_var_ref (var, ctx);
4168 goto do_private;
4170 case OMP_CLAUSE_FIRSTPRIVATE:
4171 if (is_task_ctx (ctx))
4173 if (omp_is_reference (var) || is_variable_sized (var))
4174 goto do_dtor;
4175 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4176 ctx))
4177 || use_pointer_for_field (var, NULL))
4179 x = build_receiver_ref (var, false, ctx);
4180 SET_DECL_VALUE_EXPR (new_var, x);
4181 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4182 goto do_dtor;
4185 do_firstprivate:
4186 x = build_outer_var_ref (var, ctx);
4187 if (is_simd)
4189 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4190 && gimple_omp_for_combined_into_p (ctx->stmt))
4192 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4193 tree stept = TREE_TYPE (t);
4194 tree ct = omp_find_clause (clauses,
4195 OMP_CLAUSE__LOOPTEMP_);
4196 gcc_assert (ct);
4197 tree l = OMP_CLAUSE_DECL (ct);
4198 tree n1 = fd->loop.n1;
4199 tree step = fd->loop.step;
4200 tree itype = TREE_TYPE (l);
4201 if (POINTER_TYPE_P (itype))
4202 itype = signed_type_for (itype);
4203 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4204 if (TYPE_UNSIGNED (itype)
4205 && fd->loop.cond_code == GT_EXPR)
4206 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4207 fold_build1 (NEGATE_EXPR, itype, l),
4208 fold_build1 (NEGATE_EXPR,
4209 itype, step));
4210 else
4211 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4212 t = fold_build2 (MULT_EXPR, stept,
4213 fold_convert (stept, l), t);
4215 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4217 x = lang_hooks.decls.omp_clause_linear_ctor
4218 (c, new_var, x, t);
4219 gimplify_and_add (x, ilist);
4220 goto do_dtor;
4223 if (POINTER_TYPE_P (TREE_TYPE (x)))
4224 x = fold_build2 (POINTER_PLUS_EXPR,
4225 TREE_TYPE (x), x, t);
4226 else
4227 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4230 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4231 || TREE_ADDRESSABLE (new_var))
4232 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4233 ivar, lvar))
4235 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4237 tree iv = create_tmp_var (TREE_TYPE (new_var));
4238 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4239 gimplify_and_add (x, ilist);
4240 gimple_stmt_iterator gsi
4241 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4242 gassign *g
4243 = gimple_build_assign (unshare_expr (lvar), iv);
4244 gsi_insert_before_without_update (&gsi, g,
4245 GSI_SAME_STMT);
4246 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4247 enum tree_code code = PLUS_EXPR;
4248 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4249 code = POINTER_PLUS_EXPR;
4250 g = gimple_build_assign (iv, code, iv, t);
4251 gsi_insert_before_without_update (&gsi, g,
4252 GSI_SAME_STMT);
4253 break;
4255 x = lang_hooks.decls.omp_clause_copy_ctor
4256 (c, unshare_expr (ivar), x);
4257 gimplify_and_add (x, &llist[0]);
4258 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4259 if (x)
4261 gimple_seq tseq = NULL;
4263 dtor = x;
4264 gimplify_stmt (&dtor, &tseq);
4265 gimple_seq_add_seq (&llist[1], tseq);
4267 break;
4270 x = lang_hooks.decls.omp_clause_copy_ctor
4271 (c, unshare_expr (new_var), x);
4272 gimplify_and_add (x, ilist);
4273 goto do_dtor;
4275 case OMP_CLAUSE__LOOPTEMP_:
4276 gcc_assert (is_taskreg_ctx (ctx));
4277 x = build_outer_var_ref (var, ctx);
4278 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4279 gimplify_and_add (x, ilist);
4280 break;
4282 case OMP_CLAUSE_COPYIN:
4283 by_ref = use_pointer_for_field (var, NULL);
4284 x = build_receiver_ref (var, by_ref, ctx);
4285 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4286 append_to_statement_list (x, &copyin_seq);
4287 copyin_by_ref |= by_ref;
4288 break;
4290 case OMP_CLAUSE_REDUCTION:
4291 /* OpenACC reductions are initialized using the
4292 GOACC_REDUCTION internal function. */
4293 if (is_gimple_omp_oacc (ctx->stmt))
4294 break;
4295 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4297 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4298 gimple *tseq;
4299 x = build_outer_var_ref (var, ctx);
4301 if (omp_is_reference (var)
4302 && !useless_type_conversion_p (TREE_TYPE (placeholder),
4303 TREE_TYPE (x)))
4304 x = build_fold_addr_expr_loc (clause_loc, x);
4305 SET_DECL_VALUE_EXPR (placeholder, x);
4306 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4307 tree new_vard = new_var;
4308 if (omp_is_reference (var))
4310 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4311 new_vard = TREE_OPERAND (new_var, 0);
4312 gcc_assert (DECL_P (new_vard));
4314 if (is_simd
4315 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4316 ivar, lvar))
4318 if (new_vard == new_var)
4320 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4321 SET_DECL_VALUE_EXPR (new_var, ivar);
4323 else
4325 SET_DECL_VALUE_EXPR (new_vard,
4326 build_fold_addr_expr (ivar));
4327 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4329 x = lang_hooks.decls.omp_clause_default_ctor
4330 (c, unshare_expr (ivar),
4331 build_outer_var_ref (var, ctx));
4332 if (x)
4333 gimplify_and_add (x, &llist[0]);
4334 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4336 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4337 lower_omp (&tseq, ctx);
4338 gimple_seq_add_seq (&llist[0], tseq);
4340 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4341 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4342 lower_omp (&tseq, ctx);
4343 gimple_seq_add_seq (&llist[1], tseq);
4344 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4345 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4346 if (new_vard == new_var)
4347 SET_DECL_VALUE_EXPR (new_var, lvar);
4348 else
4349 SET_DECL_VALUE_EXPR (new_vard,
4350 build_fold_addr_expr (lvar));
4351 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4352 if (x)
4354 tseq = NULL;
4355 dtor = x;
4356 gimplify_stmt (&dtor, &tseq);
4357 gimple_seq_add_seq (&llist[1], tseq);
4359 break;
4361 /* If this is a reference to constant size reduction var
4362 with placeholder, we haven't emitted the initializer
4363 for it because it is undesirable if SIMD arrays are used.
4364 But if they aren't used, we need to emit the deferred
4365 initialization now. */
4366 else if (omp_is_reference (var) && is_simd)
4367 handle_simd_reference (clause_loc, new_vard, ilist);
4368 x = lang_hooks.decls.omp_clause_default_ctor
4369 (c, unshare_expr (new_var),
4370 build_outer_var_ref (var, ctx));
4371 if (x)
4372 gimplify_and_add (x, ilist);
4373 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4375 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4376 lower_omp (&tseq, ctx);
4377 gimple_seq_add_seq (ilist, tseq);
4379 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4380 if (is_simd)
4382 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4383 lower_omp (&tseq, ctx);
4384 gimple_seq_add_seq (dlist, tseq);
4385 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4387 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4388 goto do_dtor;
4390 else
4392 x = omp_reduction_init (c, TREE_TYPE (new_var));
4393 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4394 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4396 /* reduction(-:var) sums up the partial results, so it
4397 acts identically to reduction(+:var). */
4398 if (code == MINUS_EXPR)
4399 code = PLUS_EXPR;
4401 tree new_vard = new_var;
4402 if (is_simd && omp_is_reference (var))
4404 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4405 new_vard = TREE_OPERAND (new_var, 0);
4406 gcc_assert (DECL_P (new_vard));
4408 if (is_simd
4409 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4410 ivar, lvar))
4412 tree ref = build_outer_var_ref (var, ctx);
4414 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4416 if (sctx.is_simt)
4418 if (!simt_lane)
4419 simt_lane = create_tmp_var (unsigned_type_node);
4420 x = build_call_expr_internal_loc
4421 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4422 TREE_TYPE (ivar), 2, ivar, simt_lane);
4423 x = build2 (code, TREE_TYPE (ivar), ivar, x);
4424 gimplify_assign (ivar, x, &llist[2]);
4426 x = build2 (code, TREE_TYPE (ref), ref, ivar);
4427 ref = build_outer_var_ref (var, ctx);
4428 gimplify_assign (ref, x, &llist[1]);
4430 if (new_vard != new_var)
4432 SET_DECL_VALUE_EXPR (new_vard,
4433 build_fold_addr_expr (lvar));
4434 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4437 else
4439 if (omp_is_reference (var) && is_simd)
4440 handle_simd_reference (clause_loc, new_vard, ilist);
4441 gimplify_assign (new_var, x, ilist);
4442 if (is_simd)
4444 tree ref = build_outer_var_ref (var, ctx);
4446 x = build2 (code, TREE_TYPE (ref), ref, new_var);
4447 ref = build_outer_var_ref (var, ctx);
4448 gimplify_assign (ref, x, dlist);
4452 break;
4454 default:
4455 gcc_unreachable ();
4460 if (sctx.max_vf == 1)
4461 sctx.is_simt = false;
4463 if (sctx.lane || sctx.is_simt)
4465 uid = create_tmp_var (ptr_type_node, "simduid");
4466 /* Don't want uninit warnings on simduid, it is always uninitialized,
4467 but we use it not for the value, but for the DECL_UID only. */
4468 TREE_NO_WARNING (uid) = 1;
4469 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4470 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4471 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4472 gimple_omp_for_set_clauses (ctx->stmt, c);
4474 /* Emit calls denoting privatized variables and initializing a pointer to
4475 structure that holds private variables as fields after ompdevlow pass. */
4476 if (sctx.is_simt)
4478 sctx.simt_eargs[0] = uid;
4479 gimple *g
4480 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4481 gimple_call_set_lhs (g, uid);
4482 gimple_seq_add_stmt (ilist, g);
4483 sctx.simt_eargs.release ();
4485 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4486 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4487 gimple_call_set_lhs (g, simtrec);
4488 gimple_seq_add_stmt (ilist, g);
4490 if (sctx.lane)
4492 gimple *g
4493 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4494 gimple_call_set_lhs (g, sctx.lane);
4495 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4496 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4497 g = gimple_build_assign (sctx.lane, INTEGER_CST,
4498 build_int_cst (unsigned_type_node, 0));
4499 gimple_seq_add_stmt (ilist, g);
4500 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4501 if (llist[2])
4503 tree simt_vf = create_tmp_var (unsigned_type_node);
4504 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4505 gimple_call_set_lhs (g, simt_vf);
4506 gimple_seq_add_stmt (dlist, g);
4508 tree t = build_int_cst (unsigned_type_node, 1);
4509 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4510 gimple_seq_add_stmt (dlist, g);
4512 t = build_int_cst (unsigned_type_node, 0);
4513 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4514 gimple_seq_add_stmt (dlist, g);
4516 tree body = create_artificial_label (UNKNOWN_LOCATION);
4517 tree header = create_artificial_label (UNKNOWN_LOCATION);
4518 tree end = create_artificial_label (UNKNOWN_LOCATION);
4519 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4520 gimple_seq_add_stmt (dlist, gimple_build_label (body));
4522 gimple_seq_add_seq (dlist, llist[2]);
4524 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4525 gimple_seq_add_stmt (dlist, g);
4527 gimple_seq_add_stmt (dlist, gimple_build_label (header));
4528 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4529 gimple_seq_add_stmt (dlist, g);
4531 gimple_seq_add_stmt (dlist, gimple_build_label (end));
4533 for (int i = 0; i < 2; i++)
4534 if (llist[i])
4536 tree vf = create_tmp_var (unsigned_type_node);
4537 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4538 gimple_call_set_lhs (g, vf);
4539 gimple_seq *seq = i == 0 ? ilist : dlist;
4540 gimple_seq_add_stmt (seq, g);
4541 tree t = build_int_cst (unsigned_type_node, 0);
4542 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4543 gimple_seq_add_stmt (seq, g);
4544 tree body = create_artificial_label (UNKNOWN_LOCATION);
4545 tree header = create_artificial_label (UNKNOWN_LOCATION);
4546 tree end = create_artificial_label (UNKNOWN_LOCATION);
4547 gimple_seq_add_stmt (seq, gimple_build_goto (header));
4548 gimple_seq_add_stmt (seq, gimple_build_label (body));
4549 gimple_seq_add_seq (seq, llist[i]);
4550 t = build_int_cst (unsigned_type_node, 1);
4551 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
4552 gimple_seq_add_stmt (seq, g);
4553 gimple_seq_add_stmt (seq, gimple_build_label (header));
4554 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
4555 gimple_seq_add_stmt (seq, g);
4556 gimple_seq_add_stmt (seq, gimple_build_label (end));
4559 if (sctx.is_simt)
4561 gimple_seq_add_seq (dlist, sctx.simt_dlist);
4562 gimple *g
4563 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4564 gimple_seq_add_stmt (dlist, g);
4567 /* The copyin sequence is not to be executed by the main thread, since
4568 that would result in self-copies. Perhaps not visible to scalars,
4569 but it certainly is to C++ operator=. */
4570 if (copyin_seq)
4572 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4574 x = build2 (NE_EXPR, boolean_type_node, x,
4575 build_int_cst (TREE_TYPE (x), 0));
4576 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4577 gimplify_and_add (x, ilist);
4580 /* If any copyin variable is passed by reference, we must ensure the
4581 master thread doesn't modify it before it is copied over in all
4582 threads. Similarly for variables in both firstprivate and
4583 lastprivate clauses we need to ensure the lastprivate copying
4584 happens after firstprivate copying in all threads. And similarly
4585 for UDRs if initializer expression refers to omp_orig. */
4586 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4588 /* Don't add any barrier for #pragma omp simd or
4589 #pragma omp distribute. */
4590 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4591 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4592 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4595 /* If max_vf is non-zero, then we can use only a vectorization factor
4596 up to the max_vf we chose. So stick it into the safelen clause. */
4597 if (sctx.max_vf)
4599 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4600 OMP_CLAUSE_SAFELEN);
4601 if (c == NULL_TREE
4602 || (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) == INTEGER_CST
4603 && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
4604 sctx.max_vf) == 1))
4606 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4607 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4608 sctx.max_vf);
4609 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4610 gimple_omp_for_set_clauses (ctx->stmt, c);
4616 /* Generate code to implement the LASTPRIVATE clauses. This is used for
4617 both parallel and workshare constructs. PREDICATE may be NULL if it's
4618 always true. */
4620 static void
4621 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4622 omp_context *ctx)
4624 tree x, c, label = NULL, orig_clauses = clauses;
4625 bool par_clauses = false;
4626 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4628 /* Early exit if there are no lastprivate or linear clauses. */
4629 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4630 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4631 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4632 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4633 break;
4634 if (clauses == NULL)
4636 /* If this was a workshare clause, see if it had been combined
4637 with its parallel. In that case, look for the clauses on the
4638 parallel statement itself. */
4639 if (is_parallel_ctx (ctx))
4640 return;
4642 ctx = ctx->outer;
4643 if (ctx == NULL || !is_parallel_ctx (ctx))
4644 return;
4646 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4647 OMP_CLAUSE_LASTPRIVATE);
4648 if (clauses == NULL)
4649 return;
4650 par_clauses = true;
4653 bool maybe_simt = false;
4654 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4655 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4657 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4658 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4659 if (simduid)
4660 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4663 if (predicate)
4665 gcond *stmt;
4666 tree label_true, arm1, arm2;
4667 enum tree_code pred_code = TREE_CODE (predicate);
4669 label = create_artificial_label (UNKNOWN_LOCATION);
4670 label_true = create_artificial_label (UNKNOWN_LOCATION);
4671 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4673 arm1 = TREE_OPERAND (predicate, 0);
4674 arm2 = TREE_OPERAND (predicate, 1);
4675 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4676 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4678 else
4680 arm1 = predicate;
4681 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4682 arm2 = boolean_false_node;
4683 pred_code = NE_EXPR;
4685 if (maybe_simt)
4687 c = build2 (pred_code, boolean_type_node, arm1, arm2);
4688 c = fold_convert (integer_type_node, c);
4689 simtcond = create_tmp_var (integer_type_node);
4690 gimplify_assign (simtcond, c, stmt_list);
4691 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4692 1, simtcond);
4693 c = create_tmp_var (integer_type_node);
4694 gimple_call_set_lhs (g, c);
4695 gimple_seq_add_stmt (stmt_list, g);
4696 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4697 label_true, label);
4699 else
4700 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4701 gimple_seq_add_stmt (stmt_list, stmt);
4702 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4705 for (c = clauses; c ;)
4707 tree var, new_var;
4708 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4710 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4711 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4712 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4714 var = OMP_CLAUSE_DECL (c);
4715 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4716 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4717 && is_taskloop_ctx (ctx))
4719 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4720 new_var = lookup_decl (var, ctx->outer);
4722 else
4724 new_var = lookup_decl (var, ctx);
4725 /* Avoid uninitialized warnings for lastprivate and
4726 for linear iterators. */
4727 if (predicate
4728 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4729 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4730 TREE_NO_WARNING (new_var) = 1;
4733 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4735 tree val = DECL_VALUE_EXPR (new_var);
4736 if (TREE_CODE (val) == ARRAY_REF
4737 && VAR_P (TREE_OPERAND (val, 0))
4738 && lookup_attribute ("omp simd array",
4739 DECL_ATTRIBUTES (TREE_OPERAND (val,
4740 0))))
4742 if (lastlane == NULL)
4744 lastlane = create_tmp_var (unsigned_type_node);
4745 gcall *g
4746 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4747 2, simduid,
4748 TREE_OPERAND (val, 1));
4749 gimple_call_set_lhs (g, lastlane);
4750 gimple_seq_add_stmt (stmt_list, g);
4752 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4753 TREE_OPERAND (val, 0), lastlane,
4754 NULL_TREE, NULL_TREE);
4757 else if (maybe_simt)
4759 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
4760 ? DECL_VALUE_EXPR (new_var)
4761 : new_var);
4762 if (simtlast == NULL)
4764 simtlast = create_tmp_var (unsigned_type_node);
4765 gcall *g = gimple_build_call_internal
4766 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4767 gimple_call_set_lhs (g, simtlast);
4768 gimple_seq_add_stmt (stmt_list, g);
4770 x = build_call_expr_internal_loc
4771 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4772 TREE_TYPE (val), 2, val, simtlast);
4773 new_var = unshare_expr (new_var);
4774 gimplify_assign (new_var, x, stmt_list);
4775 new_var = unshare_expr (new_var);
4778 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4779 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4781 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4782 gimple_seq_add_seq (stmt_list,
4783 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4784 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4786 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4787 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4789 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4790 gimple_seq_add_seq (stmt_list,
4791 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4792 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4795 x = NULL_TREE;
4796 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4797 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4799 gcc_checking_assert (is_taskloop_ctx (ctx));
4800 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4801 ctx->outer->outer);
4802 if (is_global_var (ovar))
4803 x = ovar;
4805 if (!x)
4806 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4807 if (omp_is_reference (var))
4808 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4809 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4810 gimplify_and_add (x, stmt_list);
4812 c = OMP_CLAUSE_CHAIN (c);
4813 if (c == NULL && !par_clauses)
4815 /* If this was a workshare clause, see if it had been combined
4816 with its parallel. In that case, continue looking for the
4817 clauses also on the parallel statement itself. */
4818 if (is_parallel_ctx (ctx))
4819 break;
4821 ctx = ctx->outer;
4822 if (ctx == NULL || !is_parallel_ctx (ctx))
4823 break;
4825 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4826 OMP_CLAUSE_LASTPRIVATE);
4827 par_clauses = true;
4831 if (label)
4832 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4835 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4836 (which might be a placeholder). INNER is true if this is an inner
4837 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4838 join markers. Generate the before-loop forking sequence in
4839 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4840 general form of these sequences is
4842 GOACC_REDUCTION_SETUP
4843 GOACC_FORK
4844 GOACC_REDUCTION_INIT
4846 GOACC_REDUCTION_FINI
4847 GOACC_JOIN
4848 GOACC_REDUCTION_TEARDOWN. */
4850 static void
4851 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4852 gcall *fork, gcall *join, gimple_seq *fork_seq,
4853 gimple_seq *join_seq, omp_context *ctx)
4855 gimple_seq before_fork = NULL;
4856 gimple_seq after_fork = NULL;
4857 gimple_seq before_join = NULL;
4858 gimple_seq after_join = NULL;
4859 tree init_code = NULL_TREE, fini_code = NULL_TREE,
4860 setup_code = NULL_TREE, teardown_code = NULL_TREE;
4861 unsigned offset = 0;
4863 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4864 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4866 tree orig = OMP_CLAUSE_DECL (c);
4867 tree var = maybe_lookup_decl (orig, ctx);
4868 tree ref_to_res = NULL_TREE;
4869 tree incoming, outgoing, v1, v2, v3;
4870 bool is_private = false;
4872 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4873 if (rcode == MINUS_EXPR)
4874 rcode = PLUS_EXPR;
4875 else if (rcode == TRUTH_ANDIF_EXPR)
4876 rcode = BIT_AND_EXPR;
4877 else if (rcode == TRUTH_ORIF_EXPR)
4878 rcode = BIT_IOR_EXPR;
4879 tree op = build_int_cst (unsigned_type_node, rcode);
4881 if (!var)
4882 var = orig;
4884 incoming = outgoing = var;
4886 if (!inner)
4888 /* See if an outer construct also reduces this variable. */
4889 omp_context *outer = ctx;
4891 while (omp_context *probe = outer->outer)
4893 enum gimple_code type = gimple_code (probe->stmt);
4894 tree cls;
4896 switch (type)
4898 case GIMPLE_OMP_FOR:
4899 cls = gimple_omp_for_clauses (probe->stmt);
4900 break;
4902 case GIMPLE_OMP_TARGET:
4903 if (gimple_omp_target_kind (probe->stmt)
4904 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
4905 goto do_lookup;
4907 cls = gimple_omp_target_clauses (probe->stmt);
4908 break;
4910 default:
4911 goto do_lookup;
4914 outer = probe;
4915 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
4916 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4917 && orig == OMP_CLAUSE_DECL (cls))
4919 incoming = outgoing = lookup_decl (orig, probe);
4920 goto has_outer_reduction;
4922 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4923 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4924 && orig == OMP_CLAUSE_DECL (cls))
4926 is_private = true;
4927 goto do_lookup;
4931 do_lookup:
4932 /* This is the outermost construct with this reduction,
4933 see if there's a mapping for it. */
4934 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
4935 && maybe_lookup_field (orig, outer) && !is_private)
4937 ref_to_res = build_receiver_ref (orig, false, outer);
4938 if (omp_is_reference (orig))
4939 ref_to_res = build_simple_mem_ref (ref_to_res);
4941 tree type = TREE_TYPE (var);
4942 if (POINTER_TYPE_P (type))
4943 type = TREE_TYPE (type);
4945 outgoing = var;
4946 incoming = omp_reduction_init_op (loc, rcode, type);
4948 else
4950 /* Try to look at enclosing contexts for reduction var,
4951 use original if no mapping found. */
4952 tree t = NULL_TREE;
4953 omp_context *c = ctx->outer;
4954 while (c && !t)
4956 t = maybe_lookup_decl (orig, c);
4957 c = c->outer;
4959 incoming = outgoing = (t ? t : orig);
4962 has_outer_reduction:;
4965 if (!ref_to_res)
4966 ref_to_res = integer_zero_node;
4968 if (omp_is_reference (orig))
4970 tree type = TREE_TYPE (var);
4971 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
4973 if (!inner)
4975 tree x = create_tmp_var (TREE_TYPE (type), id);
4976 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
4979 v1 = create_tmp_var (type, id);
4980 v2 = create_tmp_var (type, id);
4981 v3 = create_tmp_var (type, id);
4983 gimplify_assign (v1, var, fork_seq);
4984 gimplify_assign (v2, var, fork_seq);
4985 gimplify_assign (v3, var, fork_seq);
4987 var = build_simple_mem_ref (var);
4988 v1 = build_simple_mem_ref (v1);
4989 v2 = build_simple_mem_ref (v2);
4990 v3 = build_simple_mem_ref (v3);
4991 outgoing = build_simple_mem_ref (outgoing);
4993 if (!TREE_CONSTANT (incoming))
4994 incoming = build_simple_mem_ref (incoming);
4996 else
4997 v1 = v2 = v3 = var;
4999 /* Determine position in reduction buffer, which may be used
5000 by target. The parser has ensured that this is not a
5001 variable-sized type. */
5002 fixed_size_mode mode
5003 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
5004 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
5005 offset = (offset + align - 1) & ~(align - 1);
5006 tree off = build_int_cst (sizetype, offset);
5007 offset += GET_MODE_SIZE (mode);
5009 if (!init_code)
5011 init_code = build_int_cst (integer_type_node,
5012 IFN_GOACC_REDUCTION_INIT);
5013 fini_code = build_int_cst (integer_type_node,
5014 IFN_GOACC_REDUCTION_FINI);
5015 setup_code = build_int_cst (integer_type_node,
5016 IFN_GOACC_REDUCTION_SETUP);
5017 teardown_code = build_int_cst (integer_type_node,
5018 IFN_GOACC_REDUCTION_TEARDOWN);
5021 tree setup_call
5022 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5023 TREE_TYPE (var), 6, setup_code,
5024 unshare_expr (ref_to_res),
5025 incoming, level, op, off);
5026 tree init_call
5027 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5028 TREE_TYPE (var), 6, init_code,
5029 unshare_expr (ref_to_res),
5030 v1, level, op, off);
5031 tree fini_call
5032 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5033 TREE_TYPE (var), 6, fini_code,
5034 unshare_expr (ref_to_res),
5035 v2, level, op, off);
5036 tree teardown_call
5037 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5038 TREE_TYPE (var), 6, teardown_code,
5039 ref_to_res, v3, level, op, off);
5041 gimplify_assign (v1, setup_call, &before_fork);
5042 gimplify_assign (v2, init_call, &after_fork);
5043 gimplify_assign (v3, fini_call, &before_join);
5044 gimplify_assign (outgoing, teardown_call, &after_join);
5047 /* Now stitch things together. */
5048 gimple_seq_add_seq (fork_seq, before_fork);
5049 if (fork)
5050 gimple_seq_add_stmt (fork_seq, fork);
5051 gimple_seq_add_seq (fork_seq, after_fork);
5053 gimple_seq_add_seq (join_seq, before_join);
5054 if (join)
5055 gimple_seq_add_stmt (join_seq, join);
5056 gimple_seq_add_seq (join_seq, after_join);
5059 /* Generate code to implement the REDUCTION clauses. */
5061 static void
5062 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5064 gimple_seq sub_seq = NULL;
5065 gimple *stmt;
5066 tree x, c;
5067 int count = 0;
5069 /* OpenACC loop reductions are handled elsewhere. */
5070 if (is_gimple_omp_oacc (ctx->stmt))
5071 return;
5073 /* SIMD reductions are handled in lower_rec_input_clauses. */
5074 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5075 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5076 return;
5078 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5079 update in that case, otherwise use a lock. */
5080 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5081 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5083 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5084 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5086 /* Never use OMP_ATOMIC for array reductions or UDRs. */
5087 count = -1;
5088 break;
5090 count++;
5093 if (count == 0)
5094 return;
5096 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5098 tree var, ref, new_var, orig_var;
5099 enum tree_code code;
5100 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5102 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5103 continue;
5105 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
5106 orig_var = var = OMP_CLAUSE_DECL (c);
5107 if (TREE_CODE (var) == MEM_REF)
5109 var = TREE_OPERAND (var, 0);
5110 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5111 var = TREE_OPERAND (var, 0);
5112 if (TREE_CODE (var) == ADDR_EXPR)
5113 var = TREE_OPERAND (var, 0);
5114 else
5116 /* If this is a pointer or referenced based array
5117 section, the var could be private in the outer
5118 context e.g. on orphaned loop construct. Pretend this
5119 is private variable's outer reference. */
5120 ccode = OMP_CLAUSE_PRIVATE;
5121 if (TREE_CODE (var) == INDIRECT_REF)
5122 var = TREE_OPERAND (var, 0);
5124 orig_var = var;
5125 if (is_variable_sized (var))
5127 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5128 var = DECL_VALUE_EXPR (var);
5129 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5130 var = TREE_OPERAND (var, 0);
5131 gcc_assert (DECL_P (var));
5134 new_var = lookup_decl (var, ctx);
5135 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5136 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5137 ref = build_outer_var_ref (var, ctx, ccode);
5138 code = OMP_CLAUSE_REDUCTION_CODE (c);
5140 /* reduction(-:var) sums up the partial results, so it acts
5141 identically to reduction(+:var). */
5142 if (code == MINUS_EXPR)
5143 code = PLUS_EXPR;
5145 if (count == 1)
5147 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5149 addr = save_expr (addr);
5150 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5151 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5152 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5153 gimplify_and_add (x, stmt_seqp);
5154 return;
5156 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5158 tree d = OMP_CLAUSE_DECL (c);
5159 tree type = TREE_TYPE (d);
5160 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5161 tree i = create_tmp_var (TREE_TYPE (v), NULL);
5162 tree ptype = build_pointer_type (TREE_TYPE (type));
5163 tree bias = TREE_OPERAND (d, 1);
5164 d = TREE_OPERAND (d, 0);
5165 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5167 tree b = TREE_OPERAND (d, 1);
5168 b = maybe_lookup_decl (b, ctx);
5169 if (b == NULL)
5171 b = TREE_OPERAND (d, 1);
5172 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5174 if (integer_zerop (bias))
5175 bias = b;
5176 else
5178 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5179 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5180 TREE_TYPE (b), b, bias);
5182 d = TREE_OPERAND (d, 0);
5184 /* For ref build_outer_var_ref already performs this, so
5185 only new_var needs a dereference. */
5186 if (TREE_CODE (d) == INDIRECT_REF)
5188 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5189 gcc_assert (omp_is_reference (var) && var == orig_var);
5191 else if (TREE_CODE (d) == ADDR_EXPR)
5193 if (orig_var == var)
5195 new_var = build_fold_addr_expr (new_var);
5196 ref = build_fold_addr_expr (ref);
5199 else
5201 gcc_assert (orig_var == var);
5202 if (omp_is_reference (var))
5203 ref = build_fold_addr_expr (ref);
5205 if (DECL_P (v))
5207 tree t = maybe_lookup_decl (v, ctx);
5208 if (t)
5209 v = t;
5210 else
5211 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5212 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5214 if (!integer_zerop (bias))
5216 bias = fold_convert_loc (clause_loc, sizetype, bias);
5217 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5218 TREE_TYPE (new_var), new_var,
5219 unshare_expr (bias));
5220 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5221 TREE_TYPE (ref), ref, bias);
5223 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5224 ref = fold_convert_loc (clause_loc, ptype, ref);
5225 tree m = create_tmp_var (ptype, NULL);
5226 gimplify_assign (m, new_var, stmt_seqp);
5227 new_var = m;
5228 m = create_tmp_var (ptype, NULL);
5229 gimplify_assign (m, ref, stmt_seqp);
5230 ref = m;
5231 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5232 tree body = create_artificial_label (UNKNOWN_LOCATION);
5233 tree end = create_artificial_label (UNKNOWN_LOCATION);
5234 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5235 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5236 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5237 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5239 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5240 tree decl_placeholder
5241 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5242 SET_DECL_VALUE_EXPR (placeholder, out);
5243 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5244 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5245 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5246 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5247 gimple_seq_add_seq (&sub_seq,
5248 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5249 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5250 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5251 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5253 else
5255 x = build2 (code, TREE_TYPE (out), out, priv);
5256 out = unshare_expr (out);
5257 gimplify_assign (out, x, &sub_seq);
5259 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5260 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5261 gimple_seq_add_stmt (&sub_seq, g);
5262 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5263 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5264 gimple_seq_add_stmt (&sub_seq, g);
5265 g = gimple_build_assign (i, PLUS_EXPR, i,
5266 build_int_cst (TREE_TYPE (i), 1));
5267 gimple_seq_add_stmt (&sub_seq, g);
5268 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5269 gimple_seq_add_stmt (&sub_seq, g);
5270 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5272 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5274 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5276 if (omp_is_reference (var)
5277 && !useless_type_conversion_p (TREE_TYPE (placeholder),
5278 TREE_TYPE (ref)))
5279 ref = build_fold_addr_expr_loc (clause_loc, ref);
5280 SET_DECL_VALUE_EXPR (placeholder, ref);
5281 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5282 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5283 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5284 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5285 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5287 else
5289 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5290 ref = build_outer_var_ref (var, ctx);
5291 gimplify_assign (ref, x, &sub_seq);
5295 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5297 gimple_seq_add_stmt (stmt_seqp, stmt);
5299 gimple_seq_add_seq (stmt_seqp, sub_seq);
5301 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5303 gimple_seq_add_stmt (stmt_seqp, stmt);
5307 /* Generate code to implement the COPYPRIVATE clauses. */
5309 static void
5310 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5311 omp_context *ctx)
5313 tree c;
5315 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5317 tree var, new_var, ref, x;
5318 bool by_ref;
5319 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5321 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5322 continue;
5324 var = OMP_CLAUSE_DECL (c);
5325 by_ref = use_pointer_for_field (var, NULL);
5327 ref = build_sender_ref (var, ctx);
5328 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5329 if (by_ref)
5331 x = build_fold_addr_expr_loc (clause_loc, new_var);
5332 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5334 gimplify_assign (ref, x, slist);
5336 ref = build_receiver_ref (var, false, ctx);
5337 if (by_ref)
5339 ref = fold_convert_loc (clause_loc,
5340 build_pointer_type (TREE_TYPE (new_var)),
5341 ref);
5342 ref = build_fold_indirect_ref_loc (clause_loc, ref);
5344 if (omp_is_reference (var))
5346 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5347 ref = build_simple_mem_ref_loc (clause_loc, ref);
5348 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5350 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5351 gimplify_and_add (x, rlist);
5356 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5357 and REDUCTION from the sender (aka parent) side. */
5359 static void
5360 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5361 omp_context *ctx)
5363 tree c, t;
5364 int ignored_looptemp = 0;
5365 bool is_taskloop = false;
5367 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5368 by GOMP_taskloop. */
5369 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5371 ignored_looptemp = 2;
5372 is_taskloop = true;
5375 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5377 tree val, ref, x, var;
5378 bool by_ref, do_in = false, do_out = false;
5379 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5381 switch (OMP_CLAUSE_CODE (c))
5383 case OMP_CLAUSE_PRIVATE:
5384 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5385 break;
5386 continue;
5387 case OMP_CLAUSE_FIRSTPRIVATE:
5388 case OMP_CLAUSE_COPYIN:
5389 case OMP_CLAUSE_LASTPRIVATE:
5390 case OMP_CLAUSE_REDUCTION:
5391 break;
5392 case OMP_CLAUSE_SHARED:
5393 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5394 break;
5395 continue;
5396 case OMP_CLAUSE__LOOPTEMP_:
5397 if (ignored_looptemp)
5399 ignored_looptemp--;
5400 continue;
5402 break;
5403 default:
5404 continue;
5407 val = OMP_CLAUSE_DECL (c);
5408 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5409 && TREE_CODE (val) == MEM_REF)
5411 val = TREE_OPERAND (val, 0);
5412 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5413 val = TREE_OPERAND (val, 0);
5414 if (TREE_CODE (val) == INDIRECT_REF
5415 || TREE_CODE (val) == ADDR_EXPR)
5416 val = TREE_OPERAND (val, 0);
5417 if (is_variable_sized (val))
5418 continue;
5421 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5422 outer taskloop region. */
5423 omp_context *ctx_for_o = ctx;
5424 if (is_taskloop
5425 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5426 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5427 ctx_for_o = ctx->outer;
5429 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5431 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5432 && is_global_var (var))
5433 continue;
5435 t = omp_member_access_dummy_var (var);
5436 if (t)
5438 var = DECL_VALUE_EXPR (var);
5439 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5440 if (o != t)
5441 var = unshare_and_remap (var, t, o);
5442 else
5443 var = unshare_expr (var);
5446 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5448 /* Handle taskloop firstprivate/lastprivate, where the
5449 lastprivate on GIMPLE_OMP_TASK is represented as
5450 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5451 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5452 x = omp_build_component_ref (ctx->sender_decl, f);
5453 if (use_pointer_for_field (val, ctx))
5454 var = build_fold_addr_expr (var);
5455 gimplify_assign (x, var, ilist);
5456 DECL_ABSTRACT_ORIGIN (f) = NULL;
5457 continue;
5460 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5461 || val == OMP_CLAUSE_DECL (c))
5462 && is_variable_sized (val))
5463 continue;
5464 by_ref = use_pointer_for_field (val, NULL);
5466 switch (OMP_CLAUSE_CODE (c))
5468 case OMP_CLAUSE_FIRSTPRIVATE:
5469 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5470 && !by_ref
5471 && is_task_ctx (ctx))
5472 TREE_NO_WARNING (var) = 1;
5473 do_in = true;
5474 break;
5476 case OMP_CLAUSE_PRIVATE:
5477 case OMP_CLAUSE_COPYIN:
5478 case OMP_CLAUSE__LOOPTEMP_:
5479 do_in = true;
5480 break;
5482 case OMP_CLAUSE_LASTPRIVATE:
5483 if (by_ref || omp_is_reference (val))
5485 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5486 continue;
5487 do_in = true;
5489 else
5491 do_out = true;
5492 if (lang_hooks.decls.omp_private_outer_ref (val))
5493 do_in = true;
5495 break;
5497 case OMP_CLAUSE_REDUCTION:
5498 do_in = true;
5499 if (val == OMP_CLAUSE_DECL (c))
5500 do_out = !(by_ref || omp_is_reference (val));
5501 else
5502 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5503 break;
5505 default:
5506 gcc_unreachable ();
5509 if (do_in)
5511 ref = build_sender_ref (val, ctx);
5512 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5513 gimplify_assign (ref, x, ilist);
5514 if (is_task_ctx (ctx))
5515 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5518 if (do_out)
5520 ref = build_sender_ref (val, ctx);
5521 gimplify_assign (var, ref, olist);
5526 /* Generate code to implement SHARED from the sender (aka parent)
5527 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5528 list things that got automatically shared. */
5530 static void
5531 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5533 tree var, ovar, nvar, t, f, x, record_type;
5535 if (ctx->record_type == NULL)
5536 return;
5538 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5539 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5541 ovar = DECL_ABSTRACT_ORIGIN (f);
5542 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5543 continue;
5545 nvar = maybe_lookup_decl (ovar, ctx);
5546 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5547 continue;
5549 /* If CTX is a nested parallel directive. Find the immediately
5550 enclosing parallel or workshare construct that contains a
5551 mapping for OVAR. */
5552 var = lookup_decl_in_outer_ctx (ovar, ctx);
5554 t = omp_member_access_dummy_var (var);
5555 if (t)
5557 var = DECL_VALUE_EXPR (var);
5558 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5559 if (o != t)
5560 var = unshare_and_remap (var, t, o);
5561 else
5562 var = unshare_expr (var);
5565 if (use_pointer_for_field (ovar, ctx))
5567 x = build_sender_ref (ovar, ctx);
5568 var = build_fold_addr_expr (var);
5569 gimplify_assign (x, var, ilist);
5571 else
5573 x = build_sender_ref (ovar, ctx);
5574 gimplify_assign (x, var, ilist);
5576 if (!TREE_READONLY (var)
5577 /* We don't need to receive a new reference to a result
5578 or parm decl. In fact we may not store to it as we will
5579 invalidate any pending RSO and generate wrong gimple
5580 during inlining. */
5581 && !((TREE_CODE (var) == RESULT_DECL
5582 || TREE_CODE (var) == PARM_DECL)
5583 && DECL_BY_REFERENCE (var)))
5585 x = build_sender_ref (ovar, ctx);
5586 gimplify_assign (var, x, olist);
5592 /* Emit an OpenACC head marker call, encapulating the partitioning and
5593 other information that must be processed by the target compiler.
5594 Return the maximum number of dimensions the associated loop might
5595 be partitioned over. */
5597 static unsigned
5598 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5599 gimple_seq *seq, omp_context *ctx)
5601 unsigned levels = 0;
5602 unsigned tag = 0;
5603 tree gang_static = NULL_TREE;
5604 auto_vec<tree, 5> args;
5606 args.quick_push (build_int_cst
5607 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5608 args.quick_push (ddvar);
5609 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5611 switch (OMP_CLAUSE_CODE (c))
5613 case OMP_CLAUSE_GANG:
5614 tag |= OLF_DIM_GANG;
5615 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5616 /* static:* is represented by -1, and we can ignore it, as
5617 scheduling is always static. */
5618 if (gang_static && integer_minus_onep (gang_static))
5619 gang_static = NULL_TREE;
5620 levels++;
5621 break;
5623 case OMP_CLAUSE_WORKER:
5624 tag |= OLF_DIM_WORKER;
5625 levels++;
5626 break;
5628 case OMP_CLAUSE_VECTOR:
5629 tag |= OLF_DIM_VECTOR;
5630 levels++;
5631 break;
5633 case OMP_CLAUSE_SEQ:
5634 tag |= OLF_SEQ;
5635 break;
5637 case OMP_CLAUSE_AUTO:
5638 tag |= OLF_AUTO;
5639 break;
5641 case OMP_CLAUSE_INDEPENDENT:
5642 tag |= OLF_INDEPENDENT;
5643 break;
5645 case OMP_CLAUSE_TILE:
5646 tag |= OLF_TILE;
5647 break;
5649 default:
5650 continue;
5654 if (gang_static)
5656 if (DECL_P (gang_static))
5657 gang_static = build_outer_var_ref (gang_static, ctx);
5658 tag |= OLF_GANG_STATIC;
5661 /* In a parallel region, loops are implicitly INDEPENDENT. */
5662 omp_context *tgt = enclosing_target_ctx (ctx);
5663 if (!tgt || is_oacc_parallel (tgt))
5664 tag |= OLF_INDEPENDENT;
5666 if (tag & OLF_TILE)
5667 /* Tiling could use all 3 levels. */
5668 levels = 3;
5669 else
5671 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5672 Ensure at least one level, or 2 for possible auto
5673 partitioning */
5674 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5675 << OLF_DIM_BASE) | OLF_SEQ));
5677 if (levels < 1u + maybe_auto)
5678 levels = 1u + maybe_auto;
5681 args.quick_push (build_int_cst (integer_type_node, levels));
5682 args.quick_push (build_int_cst (integer_type_node, tag));
5683 if (gang_static)
5684 args.quick_push (gang_static);
5686 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5687 gimple_set_location (call, loc);
5688 gimple_set_lhs (call, ddvar);
5689 gimple_seq_add_stmt (seq, call);
5691 return levels;
5694 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5695 partitioning level of the enclosed region. */
5697 static void
5698 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5699 tree tofollow, gimple_seq *seq)
5701 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5702 : IFN_UNIQUE_OACC_TAIL_MARK);
5703 tree marker = build_int_cst (integer_type_node, marker_kind);
5704 int nargs = 2 + (tofollow != NULL_TREE);
5705 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5706 marker, ddvar, tofollow);
5707 gimple_set_location (call, loc);
5708 gimple_set_lhs (call, ddvar);
5709 gimple_seq_add_stmt (seq, call);
5712 /* Generate the before and after OpenACC loop sequences. CLAUSES are
5713 the loop clauses, from which we extract reductions. Initialize
5714 HEAD and TAIL. */
5716 static void
5717 lower_oacc_head_tail (location_t loc, tree clauses,
5718 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5720 bool inner = false;
5721 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5722 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5724 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5725 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5726 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5728 gcc_assert (count);
5729 for (unsigned done = 1; count; count--, done++)
5731 gimple_seq fork_seq = NULL;
5732 gimple_seq join_seq = NULL;
5734 tree place = build_int_cst (integer_type_node, -1);
5735 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5736 fork_kind, ddvar, place);
5737 gimple_set_location (fork, loc);
5738 gimple_set_lhs (fork, ddvar);
5740 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5741 join_kind, ddvar, place);
5742 gimple_set_location (join, loc);
5743 gimple_set_lhs (join, ddvar);
5745 /* Mark the beginning of this level sequence. */
5746 if (inner)
5747 lower_oacc_loop_marker (loc, ddvar, true,
5748 build_int_cst (integer_type_node, count),
5749 &fork_seq);
5750 lower_oacc_loop_marker (loc, ddvar, false,
5751 build_int_cst (integer_type_node, done),
5752 &join_seq);
5754 lower_oacc_reductions (loc, clauses, place, inner,
5755 fork, join, &fork_seq, &join_seq, ctx);
5757 /* Append this level to head. */
5758 gimple_seq_add_seq (head, fork_seq);
5759 /* Prepend it to tail. */
5760 gimple_seq_add_seq (&join_seq, *tail);
5761 *tail = join_seq;
5763 inner = true;
5766 /* Mark the end of the sequence. */
5767 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5768 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5771 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5772 catch handler and return it. This prevents programs from violating the
5773 structured block semantics with throws. */
5775 static gimple_seq
5776 maybe_catch_exception (gimple_seq body)
5778 gimple *g;
5779 tree decl;
5781 if (!flag_exceptions)
5782 return body;
5784 if (lang_hooks.eh_protect_cleanup_actions != NULL)
5785 decl = lang_hooks.eh_protect_cleanup_actions ();
5786 else
5787 decl = builtin_decl_explicit (BUILT_IN_TRAP);
5789 g = gimple_build_eh_must_not_throw (decl);
5790 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5791 GIMPLE_TRY_CATCH);
5793 return gimple_seq_alloc_with_stmt (g);
5797 /* Routines to lower OMP directives into OMP-GIMPLE. */
5799 /* If ctx is a worksharing context inside of a cancellable parallel
5800 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5801 and conditional branch to parallel's cancel_label to handle
5802 cancellation in the implicit barrier. */
5804 static void
5805 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5807 gimple *omp_return = gimple_seq_last_stmt (*body);
5808 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5809 if (gimple_omp_return_nowait_p (omp_return))
5810 return;
5811 if (ctx->outer
5812 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5813 && ctx->outer->cancellable)
5815 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5816 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5817 tree lhs = create_tmp_var (c_bool_type);
5818 gimple_omp_return_set_lhs (omp_return, lhs);
5819 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5820 gimple *g = gimple_build_cond (NE_EXPR, lhs,
5821 fold_convert (c_bool_type,
5822 boolean_false_node),
5823 ctx->outer->cancel_label, fallthru_label);
5824 gimple_seq_add_stmt (body, g);
5825 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5829 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5830 CTX is the enclosing OMP context for the current statement. */
5832 static void
5833 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5835 tree block, control;
5836 gimple_stmt_iterator tgsi;
5837 gomp_sections *stmt;
5838 gimple *t;
5839 gbind *new_stmt, *bind;
5840 gimple_seq ilist, dlist, olist, new_body;
5842 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5844 push_gimplify_context ();
5846 dlist = NULL;
5847 ilist = NULL;
5848 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5849 &ilist, &dlist, ctx, NULL);
5851 new_body = gimple_omp_body (stmt);
5852 gimple_omp_set_body (stmt, NULL);
5853 tgsi = gsi_start (new_body);
5854 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5856 omp_context *sctx;
5857 gimple *sec_start;
5859 sec_start = gsi_stmt (tgsi);
5860 sctx = maybe_lookup_ctx (sec_start);
5861 gcc_assert (sctx);
5863 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5864 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5865 GSI_CONTINUE_LINKING);
5866 gimple_omp_set_body (sec_start, NULL);
5868 if (gsi_one_before_end_p (tgsi))
5870 gimple_seq l = NULL;
5871 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5872 &l, ctx);
5873 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5874 gimple_omp_section_set_last (sec_start);
5877 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5878 GSI_CONTINUE_LINKING);
5881 block = make_node (BLOCK);
5882 bind = gimple_build_bind (NULL, new_body, block);
5884 olist = NULL;
5885 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
5887 block = make_node (BLOCK);
5888 new_stmt = gimple_build_bind (NULL, NULL, block);
5889 gsi_replace (gsi_p, new_stmt, true);
5891 pop_gimplify_context (new_stmt);
5892 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5893 BLOCK_VARS (block) = gimple_bind_vars (bind);
5894 if (BLOCK_VARS (block))
5895 TREE_USED (block) = 1;
5897 new_body = NULL;
5898 gimple_seq_add_seq (&new_body, ilist);
5899 gimple_seq_add_stmt (&new_body, stmt);
5900 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5901 gimple_seq_add_stmt (&new_body, bind);
5903 control = create_tmp_var (unsigned_type_node, ".section");
5904 t = gimple_build_omp_continue (control, control);
5905 gimple_omp_sections_set_control (stmt, control);
5906 gimple_seq_add_stmt (&new_body, t);
5908 gimple_seq_add_seq (&new_body, olist);
5909 if (ctx->cancellable)
5910 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5911 gimple_seq_add_seq (&new_body, dlist);
5913 new_body = maybe_catch_exception (new_body);
5915 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5916 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5917 t = gimple_build_omp_return (nowait);
5918 gimple_seq_add_stmt (&new_body, t);
5919 maybe_add_implicit_barrier_cancel (ctx, &new_body);
5921 gimple_bind_set_body (new_stmt, new_body);
5925 /* A subroutine of lower_omp_single. Expand the simple form of
5926 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5928 if (GOMP_single_start ())
5929 BODY;
5930 [ GOMP_barrier (); ] -> unless 'nowait' is present.
5932 FIXME. It may be better to delay expanding the logic of this until
5933 pass_expand_omp. The expanded logic may make the job more difficult
5934 to a synchronization analysis pass. */
5936 static void
5937 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
5939 location_t loc = gimple_location (single_stmt);
5940 tree tlabel = create_artificial_label (loc);
5941 tree flabel = create_artificial_label (loc);
5942 gimple *call, *cond;
5943 tree lhs, decl;
5945 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
5946 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
5947 call = gimple_build_call (decl, 0);
5948 gimple_call_set_lhs (call, lhs);
5949 gimple_seq_add_stmt (pre_p, call);
5951 cond = gimple_build_cond (EQ_EXPR, lhs,
5952 fold_convert_loc (loc, TREE_TYPE (lhs),
5953 boolean_true_node),
5954 tlabel, flabel);
5955 gimple_seq_add_stmt (pre_p, cond);
5956 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
5957 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
5958 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
5962 /* A subroutine of lower_omp_single. Expand the simple form of
5963 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
5965 #pragma omp single copyprivate (a, b, c)
5967 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
5970 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
5972 BODY;
5973 copyout.a = a;
5974 copyout.b = b;
5975 copyout.c = c;
5976 GOMP_single_copy_end (&copyout);
5978 else
5980 a = copyout_p->a;
5981 b = copyout_p->b;
5982 c = copyout_p->c;
5984 GOMP_barrier ();
5987 FIXME. It may be better to delay expanding the logic of this until
5988 pass_expand_omp. The expanded logic may make the job more difficult
5989 to a synchronization analysis pass. */
5991 static void
5992 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
5993 omp_context *ctx)
5995 tree ptr_type, t, l0, l1, l2, bfn_decl;
5996 gimple_seq copyin_seq;
5997 location_t loc = gimple_location (single_stmt);
5999 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
6001 ptr_type = build_pointer_type (ctx->record_type);
6002 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
6004 l0 = create_artificial_label (loc);
6005 l1 = create_artificial_label (loc);
6006 l2 = create_artificial_label (loc);
6008 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6009 t = build_call_expr_loc (loc, bfn_decl, 0);
6010 t = fold_convert_loc (loc, ptr_type, t);
6011 gimplify_assign (ctx->receiver_decl, t, pre_p);
6013 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6014 build_int_cst (ptr_type, 0));
6015 t = build3 (COND_EXPR, void_type_node, t,
6016 build_and_jump (&l0), build_and_jump (&l1));
6017 gimplify_and_add (t, pre_p);
6019 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6021 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6023 copyin_seq = NULL;
6024 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6025 &copyin_seq, ctx);
6027 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6028 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6029 t = build_call_expr_loc (loc, bfn_decl, 1, t);
6030 gimplify_and_add (t, pre_p);
6032 t = build_and_jump (&l2);
6033 gimplify_and_add (t, pre_p);
6035 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6037 gimple_seq_add_seq (pre_p, copyin_seq);
6039 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6043 /* Expand code for an OpenMP single directive. */
6045 static void
6046 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6048 tree block;
6049 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6050 gbind *bind;
6051 gimple_seq bind_body, bind_body_tail = NULL, dlist;
6053 push_gimplify_context ();
6055 block = make_node (BLOCK);
6056 bind = gimple_build_bind (NULL, NULL, block);
6057 gsi_replace (gsi_p, bind, true);
6058 bind_body = NULL;
6059 dlist = NULL;
6060 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6061 &bind_body, &dlist, ctx, NULL);
6062 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6064 gimple_seq_add_stmt (&bind_body, single_stmt);
6066 if (ctx->record_type)
6067 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6068 else
6069 lower_omp_single_simple (single_stmt, &bind_body);
6071 gimple_omp_set_body (single_stmt, NULL);
6073 gimple_seq_add_seq (&bind_body, dlist);
6075 bind_body = maybe_catch_exception (bind_body);
6077 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6078 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6079 gimple *g = gimple_build_omp_return (nowait);
6080 gimple_seq_add_stmt (&bind_body_tail, g);
6081 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6082 if (ctx->record_type)
6084 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6085 tree clobber = build_constructor (ctx->record_type, NULL);
6086 TREE_THIS_VOLATILE (clobber) = 1;
6087 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6088 clobber), GSI_SAME_STMT);
6090 gimple_seq_add_seq (&bind_body, bind_body_tail);
6091 gimple_bind_set_body (bind, bind_body);
6093 pop_gimplify_context (bind);
6095 gimple_bind_append_vars (bind, ctx->block_vars);
6096 BLOCK_VARS (block) = ctx->block_vars;
6097 if (BLOCK_VARS (block))
6098 TREE_USED (block) = 1;
6102 /* Expand code for an OpenMP master directive. */
6104 static void
6105 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6107 tree block, lab = NULL, x, bfn_decl;
6108 gimple *stmt = gsi_stmt (*gsi_p);
6109 gbind *bind;
6110 location_t loc = gimple_location (stmt);
6111 gimple_seq tseq;
6113 push_gimplify_context ();
6115 block = make_node (BLOCK);
6116 bind = gimple_build_bind (NULL, NULL, block);
6117 gsi_replace (gsi_p, bind, true);
6118 gimple_bind_add_stmt (bind, stmt);
6120 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6121 x = build_call_expr_loc (loc, bfn_decl, 0);
6122 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6123 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6124 tseq = NULL;
6125 gimplify_and_add (x, &tseq);
6126 gimple_bind_add_seq (bind, tseq);
6128 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6129 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6130 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6131 gimple_omp_set_body (stmt, NULL);
6133 gimple_bind_add_stmt (bind, gimple_build_label (lab));
6135 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6137 pop_gimplify_context (bind);
6139 gimple_bind_append_vars (bind, ctx->block_vars);
6140 BLOCK_VARS (block) = ctx->block_vars;
6144 /* Expand code for an OpenMP taskgroup directive. */
6146 static void
6147 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6149 gimple *stmt = gsi_stmt (*gsi_p);
6150 gcall *x;
6151 gbind *bind;
6152 tree block = make_node (BLOCK);
6154 bind = gimple_build_bind (NULL, NULL, block);
6155 gsi_replace (gsi_p, bind, true);
6156 gimple_bind_add_stmt (bind, stmt);
6158 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6160 gimple_bind_add_stmt (bind, x);
6162 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6163 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6164 gimple_omp_set_body (stmt, NULL);
6166 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6168 gimple_bind_append_vars (bind, ctx->block_vars);
6169 BLOCK_VARS (block) = ctx->block_vars;
6173 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
6175 static void
6176 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6177 omp_context *ctx)
6179 struct omp_for_data fd;
6180 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6181 return;
6183 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6184 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6185 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6186 if (!fd.ordered)
6187 return;
6189 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6190 tree c = gimple_omp_ordered_clauses (ord_stmt);
6191 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6192 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6194 /* Merge depend clauses from multiple adjacent
6195 #pragma omp ordered depend(sink:...) constructs
6196 into one #pragma omp ordered depend(sink:...), so that
6197 we can optimize them together. */
6198 gimple_stmt_iterator gsi = *gsi_p;
6199 gsi_next (&gsi);
6200 while (!gsi_end_p (gsi))
6202 gimple *stmt = gsi_stmt (gsi);
6203 if (is_gimple_debug (stmt)
6204 || gimple_code (stmt) == GIMPLE_NOP)
6206 gsi_next (&gsi);
6207 continue;
6209 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6210 break;
6211 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6212 c = gimple_omp_ordered_clauses (ord_stmt2);
6213 if (c == NULL_TREE
6214 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6215 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6216 break;
6217 while (*list_p)
6218 list_p = &OMP_CLAUSE_CHAIN (*list_p);
6219 *list_p = c;
6220 gsi_remove (&gsi, true);
6224 /* Canonicalize sink dependence clauses into one folded clause if
6225 possible.
6227 The basic algorithm is to create a sink vector whose first
6228 element is the GCD of all the first elements, and whose remaining
6229 elements are the minimum of the subsequent columns.
6231 We ignore dependence vectors whose first element is zero because
6232 such dependencies are known to be executed by the same thread.
6234 We take into account the direction of the loop, so a minimum
6235 becomes a maximum if the loop is iterating forwards. We also
6236 ignore sink clauses where the loop direction is unknown, or where
6237 the offsets are clearly invalid because they are not a multiple
6238 of the loop increment.
6240 For example:
6242 #pragma omp for ordered(2)
6243 for (i=0; i < N; ++i)
6244 for (j=0; j < M; ++j)
6246 #pragma omp ordered \
6247 depend(sink:i-8,j-2) \
6248 depend(sink:i,j-1) \ // Completely ignored because i+0.
6249 depend(sink:i-4,j-3) \
6250 depend(sink:i-6,j-4)
6251 #pragma omp ordered depend(source)
6254 Folded clause is:
6256 depend(sink:-gcd(8,4,6),-min(2,3,4))
6257 -or-
6258 depend(sink:-2,-2)
6261 /* FIXME: Computing GCD's where the first element is zero is
6262 non-trivial in the presence of collapsed loops. Do this later. */
6263 if (fd.collapse > 1)
6264 return;
6266 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6268 /* wide_int is not a POD so it must be default-constructed. */
6269 for (unsigned i = 0; i != 2 * len - 1; ++i)
6270 new (static_cast<void*>(folded_deps + i)) wide_int ();
6272 tree folded_dep = NULL_TREE;
6273 /* TRUE if the first dimension's offset is negative. */
6274 bool neg_offset_p = false;
6276 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6277 unsigned int i;
6278 while ((c = *list_p) != NULL)
6280 bool remove = false;
6282 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6283 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6284 goto next_ordered_clause;
6286 tree vec;
6287 for (vec = OMP_CLAUSE_DECL (c), i = 0;
6288 vec && TREE_CODE (vec) == TREE_LIST;
6289 vec = TREE_CHAIN (vec), ++i)
6291 gcc_assert (i < len);
6293 /* omp_extract_for_data has canonicalized the condition. */
6294 gcc_assert (fd.loops[i].cond_code == LT_EXPR
6295 || fd.loops[i].cond_code == GT_EXPR);
6296 bool forward = fd.loops[i].cond_code == LT_EXPR;
6297 bool maybe_lexically_later = true;
6299 /* While the committee makes up its mind, bail if we have any
6300 non-constant steps. */
6301 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6302 goto lower_omp_ordered_ret;
6304 tree itype = TREE_TYPE (TREE_VALUE (vec));
6305 if (POINTER_TYPE_P (itype))
6306 itype = sizetype;
6307 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
6308 TYPE_PRECISION (itype),
6309 TYPE_SIGN (itype));
6311 /* Ignore invalid offsets that are not multiples of the step. */
6312 if (!wi::multiple_of_p (wi::abs (offset),
6313 wi::abs (wi::to_wide (fd.loops[i].step)),
6314 UNSIGNED))
6316 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6317 "ignoring sink clause with offset that is not "
6318 "a multiple of the loop step");
6319 remove = true;
6320 goto next_ordered_clause;
6323 /* Calculate the first dimension. The first dimension of
6324 the folded dependency vector is the GCD of the first
6325 elements, while ignoring any first elements whose offset
6326 is 0. */
6327 if (i == 0)
6329 /* Ignore dependence vectors whose first dimension is 0. */
6330 if (offset == 0)
6332 remove = true;
6333 goto next_ordered_clause;
6335 else
6337 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6339 error_at (OMP_CLAUSE_LOCATION (c),
6340 "first offset must be in opposite direction "
6341 "of loop iterations");
6342 goto lower_omp_ordered_ret;
6344 if (forward)
6345 offset = -offset;
6346 neg_offset_p = forward;
6347 /* Initialize the first time around. */
6348 if (folded_dep == NULL_TREE)
6350 folded_dep = c;
6351 folded_deps[0] = offset;
6353 else
6354 folded_deps[0] = wi::gcd (folded_deps[0],
6355 offset, UNSIGNED);
6358 /* Calculate minimum for the remaining dimensions. */
6359 else
6361 folded_deps[len + i - 1] = offset;
6362 if (folded_dep == c)
6363 folded_deps[i] = offset;
6364 else if (maybe_lexically_later
6365 && !wi::eq_p (folded_deps[i], offset))
6367 if (forward ^ wi::gts_p (folded_deps[i], offset))
6369 unsigned int j;
6370 folded_dep = c;
6371 for (j = 1; j <= i; j++)
6372 folded_deps[j] = folded_deps[len + j - 1];
6374 else
6375 maybe_lexically_later = false;
6379 gcc_assert (i == len);
6381 remove = true;
6383 next_ordered_clause:
6384 if (remove)
6385 *list_p = OMP_CLAUSE_CHAIN (c);
6386 else
6387 list_p = &OMP_CLAUSE_CHAIN (c);
6390 if (folded_dep)
6392 if (neg_offset_p)
6393 folded_deps[0] = -folded_deps[0];
6395 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6396 if (POINTER_TYPE_P (itype))
6397 itype = sizetype;
6399 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6400 = wide_int_to_tree (itype, folded_deps[0]);
6401 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6402 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6405 lower_omp_ordered_ret:
6407 /* Ordered without clauses is #pragma omp threads, while we want
6408 a nop instead if we remove all clauses. */
6409 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6410 gsi_replace (gsi_p, gimple_build_nop (), true);
6414 /* Expand code for an OpenMP ordered directive. */
6416 static void
6417 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6419 tree block;
6420 gimple *stmt = gsi_stmt (*gsi_p), *g;
6421 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6422 gcall *x;
6423 gbind *bind;
6424 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6425 OMP_CLAUSE_SIMD);
6426 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6427 loop. */
6428 bool maybe_simt
6429 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6430 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6431 OMP_CLAUSE_THREADS);
6433 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6434 OMP_CLAUSE_DEPEND))
6436 /* FIXME: This is needs to be moved to the expansion to verify various
6437 conditions only testable on cfg with dominators computed, and also
6438 all the depend clauses to be merged still might need to be available
6439 for the runtime checks. */
6440 if (0)
6441 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6442 return;
6445 push_gimplify_context ();
6447 block = make_node (BLOCK);
6448 bind = gimple_build_bind (NULL, NULL, block);
6449 gsi_replace (gsi_p, bind, true);
6450 gimple_bind_add_stmt (bind, stmt);
6452 if (simd)
6454 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6455 build_int_cst (NULL_TREE, threads));
6456 cfun->has_simduid_loops = true;
6458 else
6459 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6461 gimple_bind_add_stmt (bind, x);
6463 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6464 if (maybe_simt)
6466 counter = create_tmp_var (integer_type_node);
6467 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6468 gimple_call_set_lhs (g, counter);
6469 gimple_bind_add_stmt (bind, g);
6471 body = create_artificial_label (UNKNOWN_LOCATION);
6472 test = create_artificial_label (UNKNOWN_LOCATION);
6473 gimple_bind_add_stmt (bind, gimple_build_label (body));
6475 tree simt_pred = create_tmp_var (integer_type_node);
6476 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6477 gimple_call_set_lhs (g, simt_pred);
6478 gimple_bind_add_stmt (bind, g);
6480 tree t = create_artificial_label (UNKNOWN_LOCATION);
6481 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6482 gimple_bind_add_stmt (bind, g);
6484 gimple_bind_add_stmt (bind, gimple_build_label (t));
6486 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6487 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6488 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6489 gimple_omp_set_body (stmt, NULL);
6491 if (maybe_simt)
6493 gimple_bind_add_stmt (bind, gimple_build_label (test));
6494 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6495 gimple_bind_add_stmt (bind, g);
6497 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6498 tree nonneg = create_tmp_var (integer_type_node);
6499 gimple_seq tseq = NULL;
6500 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6501 gimple_bind_add_seq (bind, tseq);
6503 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6504 gimple_call_set_lhs (g, nonneg);
6505 gimple_bind_add_stmt (bind, g);
6507 tree end = create_artificial_label (UNKNOWN_LOCATION);
6508 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6509 gimple_bind_add_stmt (bind, g);
6511 gimple_bind_add_stmt (bind, gimple_build_label (end));
6513 if (simd)
6514 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6515 build_int_cst (NULL_TREE, threads));
6516 else
6517 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6519 gimple_bind_add_stmt (bind, x);
6521 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6523 pop_gimplify_context (bind);
6525 gimple_bind_append_vars (bind, ctx->block_vars);
6526 BLOCK_VARS (block) = gimple_bind_vars (bind);
6530 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6531 substitution of a couple of function calls. But in the NAMED case,
6532 requires that languages coordinate a symbol name. It is therefore
6533 best put here in common code. */
6535 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6537 static void
6538 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6540 tree block;
6541 tree name, lock, unlock;
6542 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6543 gbind *bind;
6544 location_t loc = gimple_location (stmt);
6545 gimple_seq tbody;
6547 name = gimple_omp_critical_name (stmt);
6548 if (name)
6550 tree decl;
6552 if (!critical_name_mutexes)
6553 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6555 tree *n = critical_name_mutexes->get (name);
6556 if (n == NULL)
6558 char *new_str;
6560 decl = create_tmp_var_raw (ptr_type_node);
6562 new_str = ACONCAT ((".gomp_critical_user_",
6563 IDENTIFIER_POINTER (name), NULL));
6564 DECL_NAME (decl) = get_identifier (new_str);
6565 TREE_PUBLIC (decl) = 1;
6566 TREE_STATIC (decl) = 1;
6567 DECL_COMMON (decl) = 1;
6568 DECL_ARTIFICIAL (decl) = 1;
6569 DECL_IGNORED_P (decl) = 1;
6571 varpool_node::finalize_decl (decl);
6573 critical_name_mutexes->put (name, decl);
6575 else
6576 decl = *n;
6578 /* If '#pragma omp critical' is inside offloaded region or
6579 inside function marked as offloadable, the symbol must be
6580 marked as offloadable too. */
6581 omp_context *octx;
6582 if (cgraph_node::get (current_function_decl)->offloadable)
6583 varpool_node::get_create (decl)->offloadable = 1;
6584 else
6585 for (octx = ctx->outer; octx; octx = octx->outer)
6586 if (is_gimple_omp_offloaded (octx->stmt))
6588 varpool_node::get_create (decl)->offloadable = 1;
6589 break;
6592 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6593 lock = build_call_expr_loc (loc, lock, 1,
6594 build_fold_addr_expr_loc (loc, decl));
6596 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6597 unlock = build_call_expr_loc (loc, unlock, 1,
6598 build_fold_addr_expr_loc (loc, decl));
6600 else
6602 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6603 lock = build_call_expr_loc (loc, lock, 0);
6605 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6606 unlock = build_call_expr_loc (loc, unlock, 0);
6609 push_gimplify_context ();
6611 block = make_node (BLOCK);
6612 bind = gimple_build_bind (NULL, NULL, block);
6613 gsi_replace (gsi_p, bind, true);
6614 gimple_bind_add_stmt (bind, stmt);
6616 tbody = gimple_bind_body (bind);
6617 gimplify_and_add (lock, &tbody);
6618 gimple_bind_set_body (bind, tbody);
6620 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6621 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6622 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6623 gimple_omp_set_body (stmt, NULL);
6625 tbody = gimple_bind_body (bind);
6626 gimplify_and_add (unlock, &tbody);
6627 gimple_bind_set_body (bind, tbody);
6629 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6631 pop_gimplify_context (bind);
6632 gimple_bind_append_vars (bind, ctx->block_vars);
6633 BLOCK_VARS (block) = gimple_bind_vars (bind);
6636 /* A subroutine of lower_omp_for. Generate code to emit the predicate
6637 for a lastprivate clause. Given a loop control predicate of (V
6638 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6639 is appended to *DLIST, iterator initialization is appended to
6640 *BODY_P. */
6642 static void
6643 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6644 gimple_seq *dlist, struct omp_context *ctx)
6646 tree clauses, cond, vinit;
6647 enum tree_code cond_code;
6648 gimple_seq stmts;
6650 cond_code = fd->loop.cond_code;
6651 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6653 /* When possible, use a strict equality expression. This can let VRP
6654 type optimizations deduce the value and remove a copy. */
6655 if (tree_fits_shwi_p (fd->loop.step))
6657 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6658 if (step == 1 || step == -1)
6659 cond_code = EQ_EXPR;
6662 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6663 || gimple_omp_for_grid_phony (fd->for_stmt))
6664 cond = omp_grid_lastprivate_predicate (fd);
6665 else
6667 tree n2 = fd->loop.n2;
6668 if (fd->collapse > 1
6669 && TREE_CODE (n2) != INTEGER_CST
6670 && gimple_omp_for_combined_into_p (fd->for_stmt))
6672 struct omp_context *taskreg_ctx = NULL;
6673 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6675 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6676 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6677 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6679 if (gimple_omp_for_combined_into_p (gfor))
6681 gcc_assert (ctx->outer->outer
6682 && is_parallel_ctx (ctx->outer->outer));
6683 taskreg_ctx = ctx->outer->outer;
6685 else
6687 struct omp_for_data outer_fd;
6688 omp_extract_for_data (gfor, &outer_fd, NULL);
6689 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6692 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6693 taskreg_ctx = ctx->outer->outer;
6695 else if (is_taskreg_ctx (ctx->outer))
6696 taskreg_ctx = ctx->outer;
6697 if (taskreg_ctx)
6699 int i;
6700 tree taskreg_clauses
6701 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6702 tree innerc = omp_find_clause (taskreg_clauses,
6703 OMP_CLAUSE__LOOPTEMP_);
6704 gcc_assert (innerc);
6705 for (i = 0; i < fd->collapse; i++)
6707 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6708 OMP_CLAUSE__LOOPTEMP_);
6709 gcc_assert (innerc);
6711 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6712 OMP_CLAUSE__LOOPTEMP_);
6713 if (innerc)
6714 n2 = fold_convert (TREE_TYPE (n2),
6715 lookup_decl (OMP_CLAUSE_DECL (innerc),
6716 taskreg_ctx));
6719 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6722 clauses = gimple_omp_for_clauses (fd->for_stmt);
6723 stmts = NULL;
6724 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6725 if (!gimple_seq_empty_p (stmts))
6727 gimple_seq_add_seq (&stmts, *dlist);
6728 *dlist = stmts;
6730 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6731 vinit = fd->loop.n1;
6732 if (cond_code == EQ_EXPR
6733 && tree_fits_shwi_p (fd->loop.n2)
6734 && ! integer_zerop (fd->loop.n2))
6735 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6736 else
6737 vinit = unshare_expr (vinit);
6739 /* Initialize the iterator variable, so that threads that don't execute
6740 any iterations don't execute the lastprivate clauses by accident. */
6741 gimplify_assign (fd->loop.v, vinit, body_p);
6746 /* Lower code for an OMP loop directive. */
6748 static void
6749 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6751 tree *rhs_p, block;
6752 struct omp_for_data fd, *fdp = NULL;
6753 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6754 gbind *new_stmt;
6755 gimple_seq omp_for_body, body, dlist;
6756 gimple_seq oacc_head = NULL, oacc_tail = NULL;
6757 size_t i;
6759 push_gimplify_context ();
6761 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6763 block = make_node (BLOCK);
6764 new_stmt = gimple_build_bind (NULL, NULL, block);
6765 /* Replace at gsi right away, so that 'stmt' is no member
6766 of a sequence anymore as we're going to add to a different
6767 one below. */
6768 gsi_replace (gsi_p, new_stmt, true);
6770 /* Move declaration of temporaries in the loop body before we make
6771 it go away. */
6772 omp_for_body = gimple_omp_body (stmt);
6773 if (!gimple_seq_empty_p (omp_for_body)
6774 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6776 gbind *inner_bind
6777 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6778 tree vars = gimple_bind_vars (inner_bind);
6779 gimple_bind_append_vars (new_stmt, vars);
6780 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6781 keep them on the inner_bind and it's block. */
6782 gimple_bind_set_vars (inner_bind, NULL_TREE);
6783 if (gimple_bind_block (inner_bind))
6784 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6787 if (gimple_omp_for_combined_into_p (stmt))
6789 omp_extract_for_data (stmt, &fd, NULL);
6790 fdp = &fd;
6792 /* We need two temporaries with fd.loop.v type (istart/iend)
6793 and then (fd.collapse - 1) temporaries with the same
6794 type for count2 ... countN-1 vars if not constant. */
6795 size_t count = 2;
6796 tree type = fd.iter_type;
6797 if (fd.collapse > 1
6798 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6799 count += fd.collapse - 1;
6800 bool taskreg_for
6801 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6802 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6803 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6804 tree simtc = NULL;
6805 tree clauses = *pc;
6806 if (taskreg_for)
6807 outerc
6808 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6809 OMP_CLAUSE__LOOPTEMP_);
6810 if (ctx->simt_stmt)
6811 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6812 OMP_CLAUSE__LOOPTEMP_);
6813 for (i = 0; i < count; i++)
6815 tree temp;
6816 if (taskreg_for)
6818 gcc_assert (outerc);
6819 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6820 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6821 OMP_CLAUSE__LOOPTEMP_);
6823 else
6825 /* If there are 2 adjacent SIMD stmts, one with _simt_
6826 clause, another without, make sure they have the same
6827 decls in _looptemp_ clauses, because the outer stmt
6828 they are combined into will look up just one inner_stmt. */
6829 if (ctx->simt_stmt)
6830 temp = OMP_CLAUSE_DECL (simtc);
6831 else
6832 temp = create_tmp_var (type);
6833 insert_decl_map (&ctx->outer->cb, temp, temp);
6835 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6836 OMP_CLAUSE_DECL (*pc) = temp;
6837 pc = &OMP_CLAUSE_CHAIN (*pc);
6838 if (ctx->simt_stmt)
6839 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6840 OMP_CLAUSE__LOOPTEMP_);
6842 *pc = clauses;
6845 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6846 dlist = NULL;
6847 body = NULL;
6848 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6849 fdp);
6850 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6852 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6854 /* Lower the header expressions. At this point, we can assume that
6855 the header is of the form:
6857 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6859 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6860 using the .omp_data_s mapping, if needed. */
6861 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6863 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6864 if (!is_gimple_min_invariant (*rhs_p))
6865 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6866 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6867 recompute_tree_invariant_for_addr_expr (*rhs_p);
6869 rhs_p = gimple_omp_for_final_ptr (stmt, i);
6870 if (!is_gimple_min_invariant (*rhs_p))
6871 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6872 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6873 recompute_tree_invariant_for_addr_expr (*rhs_p);
6875 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6876 if (!is_gimple_min_invariant (*rhs_p))
6877 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6880 /* Once lowered, extract the bounds and clauses. */
6881 omp_extract_for_data (stmt, &fd, NULL);
6883 if (is_gimple_omp_oacc (ctx->stmt)
6884 && !ctx_in_oacc_kernels_region (ctx))
6885 lower_oacc_head_tail (gimple_location (stmt),
6886 gimple_omp_for_clauses (stmt),
6887 &oacc_head, &oacc_tail, ctx);
6889 /* Add OpenACC partitioning and reduction markers just before the loop. */
6890 if (oacc_head)
6891 gimple_seq_add_seq (&body, oacc_head);
6893 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
6895 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6896 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
6897 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6898 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6900 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6901 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6902 OMP_CLAUSE_LINEAR_STEP (c)
6903 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6904 ctx);
6907 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6908 && gimple_omp_for_grid_phony (stmt));
6909 if (!phony_loop)
6910 gimple_seq_add_stmt (&body, stmt);
6911 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6913 if (!phony_loop)
6914 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6915 fd.loop.v));
6917 /* After the loop, add exit clauses. */
6918 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
6920 if (ctx->cancellable)
6921 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
6923 gimple_seq_add_seq (&body, dlist);
6925 body = maybe_catch_exception (body);
6927 if (!phony_loop)
6929 /* Region exit marker goes at the end of the loop body. */
6930 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
6931 maybe_add_implicit_barrier_cancel (ctx, &body);
6934 /* Add OpenACC joining and reduction markers just after the loop. */
6935 if (oacc_tail)
6936 gimple_seq_add_seq (&body, oacc_tail);
6938 pop_gimplify_context (new_stmt);
6940 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6941 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
6942 if (BLOCK_VARS (block))
6943 TREE_USED (block) = 1;
6945 gimple_bind_set_body (new_stmt, body);
6946 gimple_omp_set_body (stmt, NULL);
6947 gimple_omp_for_set_pre_body (stmt, NULL);
6950 /* Callback for walk_stmts. Check if the current statement only contains
6951 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
6953 static tree
6954 check_combined_parallel (gimple_stmt_iterator *gsi_p,
6955 bool *handled_ops_p,
6956 struct walk_stmt_info *wi)
6958 int *info = (int *) wi->info;
6959 gimple *stmt = gsi_stmt (*gsi_p);
6961 *handled_ops_p = true;
6962 switch (gimple_code (stmt))
6964 WALK_SUBSTMTS;
6966 case GIMPLE_DEBUG:
6967 break;
6968 case GIMPLE_OMP_FOR:
6969 case GIMPLE_OMP_SECTIONS:
6970 *info = *info == 0 ? 1 : -1;
6971 break;
6972 default:
6973 *info = -1;
6974 break;
6976 return NULL;
6979 struct omp_taskcopy_context
6981 /* This field must be at the beginning, as we do "inheritance": Some
6982 callback functions for tree-inline.c (e.g., omp_copy_decl)
6983 receive a copy_body_data pointer that is up-casted to an
6984 omp_context pointer. */
6985 copy_body_data cb;
6986 omp_context *ctx;
6989 static tree
6990 task_copyfn_copy_decl (tree var, copy_body_data *cb)
6992 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
6994 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
6995 return create_tmp_var (TREE_TYPE (var));
6997 return var;
7000 static tree
7001 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
7003 tree name, new_fields = NULL, type, f;
7005 type = lang_hooks.types.make_type (RECORD_TYPE);
7006 name = DECL_NAME (TYPE_NAME (orig_type));
7007 name = build_decl (gimple_location (tcctx->ctx->stmt),
7008 TYPE_DECL, name, type);
7009 TYPE_NAME (type) = name;
7011 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
7013 tree new_f = copy_node (f);
7014 DECL_CONTEXT (new_f) = type;
7015 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7016 TREE_CHAIN (new_f) = new_fields;
7017 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7018 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7019 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7020 &tcctx->cb, NULL);
7021 new_fields = new_f;
7022 tcctx->cb.decl_map->put (f, new_f);
7024 TYPE_FIELDS (type) = nreverse (new_fields);
7025 layout_type (type);
7026 return type;
7029 /* Create task copyfn. */
7031 static void
7032 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7034 struct function *child_cfun;
7035 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7036 tree record_type, srecord_type, bind, list;
7037 bool record_needs_remap = false, srecord_needs_remap = false;
7038 splay_tree_node n;
7039 struct omp_taskcopy_context tcctx;
7040 location_t loc = gimple_location (task_stmt);
7042 child_fn = gimple_omp_task_copy_fn (task_stmt);
7043 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7044 gcc_assert (child_cfun->cfg == NULL);
7045 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
7047 /* Reset DECL_CONTEXT on function arguments. */
7048 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7049 DECL_CONTEXT (t) = child_fn;
7051 /* Populate the function. */
7052 push_gimplify_context ();
7053 push_cfun (child_cfun);
7055 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7056 TREE_SIDE_EFFECTS (bind) = 1;
7057 list = NULL;
7058 DECL_SAVED_TREE (child_fn) = bind;
7059 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
7061 /* Remap src and dst argument types if needed. */
7062 record_type = ctx->record_type;
7063 srecord_type = ctx->srecord_type;
7064 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7065 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7067 record_needs_remap = true;
7068 break;
7070 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7071 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7073 srecord_needs_remap = true;
7074 break;
7077 if (record_needs_remap || srecord_needs_remap)
7079 memset (&tcctx, '\0', sizeof (tcctx));
7080 tcctx.cb.src_fn = ctx->cb.src_fn;
7081 tcctx.cb.dst_fn = child_fn;
7082 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7083 gcc_checking_assert (tcctx.cb.src_node);
7084 tcctx.cb.dst_node = tcctx.cb.src_node;
7085 tcctx.cb.src_cfun = ctx->cb.src_cfun;
7086 tcctx.cb.copy_decl = task_copyfn_copy_decl;
7087 tcctx.cb.eh_lp_nr = 0;
7088 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7089 tcctx.cb.decl_map = new hash_map<tree, tree>;
7090 tcctx.ctx = ctx;
7092 if (record_needs_remap)
7093 record_type = task_copyfn_remap_type (&tcctx, record_type);
7094 if (srecord_needs_remap)
7095 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7097 else
7098 tcctx.cb.decl_map = NULL;
7100 arg = DECL_ARGUMENTS (child_fn);
7101 TREE_TYPE (arg) = build_pointer_type (record_type);
7102 sarg = DECL_CHAIN (arg);
7103 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7105 /* First pass: initialize temporaries used in record_type and srecord_type
7106 sizes and field offsets. */
7107 if (tcctx.cb.decl_map)
7108 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7109 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7111 tree *p;
7113 decl = OMP_CLAUSE_DECL (c);
7114 p = tcctx.cb.decl_map->get (decl);
7115 if (p == NULL)
7116 continue;
7117 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7118 sf = (tree) n->value;
7119 sf = *tcctx.cb.decl_map->get (sf);
7120 src = build_simple_mem_ref_loc (loc, sarg);
7121 src = omp_build_component_ref (src, sf);
7122 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7123 append_to_statement_list (t, &list);
7126 /* Second pass: copy shared var pointers and copy construct non-VLA
7127 firstprivate vars. */
7128 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7129 switch (OMP_CLAUSE_CODE (c))
7131 splay_tree_key key;
7132 case OMP_CLAUSE_SHARED:
7133 decl = OMP_CLAUSE_DECL (c);
7134 key = (splay_tree_key) decl;
7135 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7136 key = (splay_tree_key) &DECL_UID (decl);
7137 n = splay_tree_lookup (ctx->field_map, key);
7138 if (n == NULL)
7139 break;
7140 f = (tree) n->value;
7141 if (tcctx.cb.decl_map)
7142 f = *tcctx.cb.decl_map->get (f);
7143 n = splay_tree_lookup (ctx->sfield_map, key);
7144 sf = (tree) n->value;
7145 if (tcctx.cb.decl_map)
7146 sf = *tcctx.cb.decl_map->get (sf);
7147 src = build_simple_mem_ref_loc (loc, sarg);
7148 src = omp_build_component_ref (src, sf);
7149 dst = build_simple_mem_ref_loc (loc, arg);
7150 dst = omp_build_component_ref (dst, f);
7151 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7152 append_to_statement_list (t, &list);
7153 break;
7154 case OMP_CLAUSE_FIRSTPRIVATE:
7155 decl = OMP_CLAUSE_DECL (c);
7156 if (is_variable_sized (decl))
7157 break;
7158 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7159 if (n == NULL)
7160 break;
7161 f = (tree) n->value;
7162 if (tcctx.cb.decl_map)
7163 f = *tcctx.cb.decl_map->get (f);
7164 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7165 if (n != NULL)
7167 sf = (tree) n->value;
7168 if (tcctx.cb.decl_map)
7169 sf = *tcctx.cb.decl_map->get (sf);
7170 src = build_simple_mem_ref_loc (loc, sarg);
7171 src = omp_build_component_ref (src, sf);
7172 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7173 src = build_simple_mem_ref_loc (loc, src);
7175 else
7176 src = decl;
7177 dst = build_simple_mem_ref_loc (loc, arg);
7178 dst = omp_build_component_ref (dst, f);
7179 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7180 append_to_statement_list (t, &list);
7181 break;
7182 case OMP_CLAUSE_PRIVATE:
7183 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7184 break;
7185 decl = OMP_CLAUSE_DECL (c);
7186 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7187 f = (tree) n->value;
7188 if (tcctx.cb.decl_map)
7189 f = *tcctx.cb.decl_map->get (f);
7190 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7191 if (n != NULL)
7193 sf = (tree) n->value;
7194 if (tcctx.cb.decl_map)
7195 sf = *tcctx.cb.decl_map->get (sf);
7196 src = build_simple_mem_ref_loc (loc, sarg);
7197 src = omp_build_component_ref (src, sf);
7198 if (use_pointer_for_field (decl, NULL))
7199 src = build_simple_mem_ref_loc (loc, src);
7201 else
7202 src = decl;
7203 dst = build_simple_mem_ref_loc (loc, arg);
7204 dst = omp_build_component_ref (dst, f);
7205 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7206 append_to_statement_list (t, &list);
7207 break;
7208 default:
7209 break;
7212 /* Last pass: handle VLA firstprivates. */
7213 if (tcctx.cb.decl_map)
7214 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7215 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7217 tree ind, ptr, df;
7219 decl = OMP_CLAUSE_DECL (c);
7220 if (!is_variable_sized (decl))
7221 continue;
7222 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7223 if (n == NULL)
7224 continue;
7225 f = (tree) n->value;
7226 f = *tcctx.cb.decl_map->get (f);
7227 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7228 ind = DECL_VALUE_EXPR (decl);
7229 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7230 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7231 n = splay_tree_lookup (ctx->sfield_map,
7232 (splay_tree_key) TREE_OPERAND (ind, 0));
7233 sf = (tree) n->value;
7234 sf = *tcctx.cb.decl_map->get (sf);
7235 src = build_simple_mem_ref_loc (loc, sarg);
7236 src = omp_build_component_ref (src, sf);
7237 src = build_simple_mem_ref_loc (loc, src);
7238 dst = build_simple_mem_ref_loc (loc, arg);
7239 dst = omp_build_component_ref (dst, f);
7240 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7241 append_to_statement_list (t, &list);
7242 n = splay_tree_lookup (ctx->field_map,
7243 (splay_tree_key) TREE_OPERAND (ind, 0));
7244 df = (tree) n->value;
7245 df = *tcctx.cb.decl_map->get (df);
7246 ptr = build_simple_mem_ref_loc (loc, arg);
7247 ptr = omp_build_component_ref (ptr, df);
7248 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7249 build_fold_addr_expr_loc (loc, dst));
7250 append_to_statement_list (t, &list);
7253 t = build1 (RETURN_EXPR, void_type_node, NULL);
7254 append_to_statement_list (t, &list);
7256 if (tcctx.cb.decl_map)
7257 delete tcctx.cb.decl_map;
7258 pop_gimplify_context (NULL);
7259 BIND_EXPR_BODY (bind) = list;
7260 pop_cfun ();
7263 static void
7264 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7266 tree c, clauses;
7267 gimple *g;
7268 size_t n_in = 0, n_out = 0, idx = 2, i;
7270 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7271 gcc_assert (clauses);
7272 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7273 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7274 switch (OMP_CLAUSE_DEPEND_KIND (c))
7276 case OMP_CLAUSE_DEPEND_IN:
7277 n_in++;
7278 break;
7279 case OMP_CLAUSE_DEPEND_OUT:
7280 case OMP_CLAUSE_DEPEND_INOUT:
7281 n_out++;
7282 break;
7283 case OMP_CLAUSE_DEPEND_SOURCE:
7284 case OMP_CLAUSE_DEPEND_SINK:
7285 /* FALLTHRU */
7286 default:
7287 gcc_unreachable ();
7289 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7290 tree array = create_tmp_var (type);
7291 TREE_ADDRESSABLE (array) = 1;
7292 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7293 NULL_TREE);
7294 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7295 gimple_seq_add_stmt (iseq, g);
7296 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7297 NULL_TREE);
7298 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7299 gimple_seq_add_stmt (iseq, g);
7300 for (i = 0; i < 2; i++)
7302 if ((i ? n_in : n_out) == 0)
7303 continue;
7304 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7305 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7306 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7308 tree t = OMP_CLAUSE_DECL (c);
7309 t = fold_convert (ptr_type_node, t);
7310 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7311 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7312 NULL_TREE, NULL_TREE);
7313 g = gimple_build_assign (r, t);
7314 gimple_seq_add_stmt (iseq, g);
7317 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7318 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7319 OMP_CLAUSE_CHAIN (c) = *pclauses;
7320 *pclauses = c;
7321 tree clobber = build_constructor (type, NULL);
7322 TREE_THIS_VOLATILE (clobber) = 1;
7323 g = gimple_build_assign (array, clobber);
7324 gimple_seq_add_stmt (oseq, g);
7327 /* Lower the OpenMP parallel or task directive in the current statement
7328 in GSI_P. CTX holds context information for the directive. */
7330 static void
7331 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7333 tree clauses;
7334 tree child_fn, t;
7335 gimple *stmt = gsi_stmt (*gsi_p);
7336 gbind *par_bind, *bind, *dep_bind = NULL;
7337 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7338 location_t loc = gimple_location (stmt);
7340 clauses = gimple_omp_taskreg_clauses (stmt);
7341 par_bind
7342 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7343 par_body = gimple_bind_body (par_bind);
7344 child_fn = ctx->cb.dst_fn;
7345 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7346 && !gimple_omp_parallel_combined_p (stmt))
7348 struct walk_stmt_info wi;
7349 int ws_num = 0;
7351 memset (&wi, 0, sizeof (wi));
7352 wi.info = &ws_num;
7353 wi.val_only = true;
7354 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7355 if (ws_num == 1)
7356 gimple_omp_parallel_set_combined_p (stmt, true);
7358 gimple_seq dep_ilist = NULL;
7359 gimple_seq dep_olist = NULL;
7360 if (gimple_code (stmt) == GIMPLE_OMP_TASK
7361 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7363 push_gimplify_context ();
7364 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7365 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7366 &dep_ilist, &dep_olist);
7369 if (ctx->srecord_type)
7370 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7372 push_gimplify_context ();
7374 par_olist = NULL;
7375 par_ilist = NULL;
7376 par_rlist = NULL;
7377 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7378 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7379 if (phony_construct && ctx->record_type)
7381 gcc_checking_assert (!ctx->receiver_decl);
7382 ctx->receiver_decl = create_tmp_var
7383 (build_reference_type (ctx->record_type), ".omp_rec");
7385 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7386 lower_omp (&par_body, ctx);
7387 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7388 lower_reduction_clauses (clauses, &par_rlist, ctx);
7390 /* Declare all the variables created by mapping and the variables
7391 declared in the scope of the parallel body. */
7392 record_vars_into (ctx->block_vars, child_fn);
7393 record_vars_into (gimple_bind_vars (par_bind), child_fn);
7395 if (ctx->record_type)
7397 ctx->sender_decl
7398 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7399 : ctx->record_type, ".omp_data_o");
7400 DECL_NAMELESS (ctx->sender_decl) = 1;
7401 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7402 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7405 olist = NULL;
7406 ilist = NULL;
7407 lower_send_clauses (clauses, &ilist, &olist, ctx);
7408 lower_send_shared_vars (&ilist, &olist, ctx);
7410 if (ctx->record_type)
7412 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7413 TREE_THIS_VOLATILE (clobber) = 1;
7414 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7415 clobber));
7418 /* Once all the expansions are done, sequence all the different
7419 fragments inside gimple_omp_body. */
7421 new_body = NULL;
7423 if (ctx->record_type)
7425 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7426 /* fixup_child_record_type might have changed receiver_decl's type. */
7427 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7428 gimple_seq_add_stmt (&new_body,
7429 gimple_build_assign (ctx->receiver_decl, t));
7432 gimple_seq_add_seq (&new_body, par_ilist);
7433 gimple_seq_add_seq (&new_body, par_body);
7434 gimple_seq_add_seq (&new_body, par_rlist);
7435 if (ctx->cancellable)
7436 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7437 gimple_seq_add_seq (&new_body, par_olist);
7438 new_body = maybe_catch_exception (new_body);
7439 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7440 gimple_seq_add_stmt (&new_body,
7441 gimple_build_omp_continue (integer_zero_node,
7442 integer_zero_node));
7443 if (!phony_construct)
7445 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7446 gimple_omp_set_body (stmt, new_body);
7449 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7450 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7451 gimple_bind_add_seq (bind, ilist);
7452 if (!phony_construct)
7453 gimple_bind_add_stmt (bind, stmt);
7454 else
7455 gimple_bind_add_seq (bind, new_body);
7456 gimple_bind_add_seq (bind, olist);
7458 pop_gimplify_context (NULL);
7460 if (dep_bind)
7462 gimple_bind_add_seq (dep_bind, dep_ilist);
7463 gimple_bind_add_stmt (dep_bind, bind);
7464 gimple_bind_add_seq (dep_bind, dep_olist);
7465 pop_gimplify_context (dep_bind);
7469 /* Lower the GIMPLE_OMP_TARGET in the current statement
7470 in GSI_P. CTX holds context information for the directive. */
7472 static void
7473 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7475 tree clauses;
7476 tree child_fn, t, c;
7477 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7478 gbind *tgt_bind, *bind, *dep_bind = NULL;
7479 gimple_seq tgt_body, olist, ilist, fplist, new_body;
7480 location_t loc = gimple_location (stmt);
7481 bool offloaded, data_region;
7482 unsigned int map_cnt = 0;
7484 offloaded = is_gimple_omp_offloaded (stmt);
7485 switch (gimple_omp_target_kind (stmt))
7487 case GF_OMP_TARGET_KIND_REGION:
7488 case GF_OMP_TARGET_KIND_UPDATE:
7489 case GF_OMP_TARGET_KIND_ENTER_DATA:
7490 case GF_OMP_TARGET_KIND_EXIT_DATA:
7491 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7492 case GF_OMP_TARGET_KIND_OACC_KERNELS:
7493 case GF_OMP_TARGET_KIND_OACC_UPDATE:
7494 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7495 case GF_OMP_TARGET_KIND_OACC_DECLARE:
7496 data_region = false;
7497 break;
7498 case GF_OMP_TARGET_KIND_DATA:
7499 case GF_OMP_TARGET_KIND_OACC_DATA:
7500 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7501 data_region = true;
7502 break;
7503 default:
7504 gcc_unreachable ();
7507 clauses = gimple_omp_target_clauses (stmt);
7509 gimple_seq dep_ilist = NULL;
7510 gimple_seq dep_olist = NULL;
7511 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7513 push_gimplify_context ();
7514 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7515 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7516 &dep_ilist, &dep_olist);
7519 tgt_bind = NULL;
7520 tgt_body = NULL;
7521 if (offloaded)
7523 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7524 tgt_body = gimple_bind_body (tgt_bind);
7526 else if (data_region)
7527 tgt_body = gimple_omp_body (stmt);
7528 child_fn = ctx->cb.dst_fn;
7530 push_gimplify_context ();
7531 fplist = NULL;
7533 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7534 switch (OMP_CLAUSE_CODE (c))
7536 tree var, x;
7538 default:
7539 break;
7540 case OMP_CLAUSE_MAP:
7541 #if CHECKING_P
7542 /* First check what we're prepared to handle in the following. */
7543 switch (OMP_CLAUSE_MAP_KIND (c))
7545 case GOMP_MAP_ALLOC:
7546 case GOMP_MAP_TO:
7547 case GOMP_MAP_FROM:
7548 case GOMP_MAP_TOFROM:
7549 case GOMP_MAP_POINTER:
7550 case GOMP_MAP_TO_PSET:
7551 case GOMP_MAP_DELETE:
7552 case GOMP_MAP_RELEASE:
7553 case GOMP_MAP_ALWAYS_TO:
7554 case GOMP_MAP_ALWAYS_FROM:
7555 case GOMP_MAP_ALWAYS_TOFROM:
7556 case GOMP_MAP_FIRSTPRIVATE_POINTER:
7557 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7558 case GOMP_MAP_STRUCT:
7559 case GOMP_MAP_ALWAYS_POINTER:
7560 break;
7561 case GOMP_MAP_FORCE_ALLOC:
7562 case GOMP_MAP_FORCE_TO:
7563 case GOMP_MAP_FORCE_FROM:
7564 case GOMP_MAP_FORCE_TOFROM:
7565 case GOMP_MAP_FORCE_PRESENT:
7566 case GOMP_MAP_FORCE_DEVICEPTR:
7567 case GOMP_MAP_DEVICE_RESIDENT:
7568 case GOMP_MAP_LINK:
7569 gcc_assert (is_gimple_omp_oacc (stmt));
7570 break;
7571 default:
7572 gcc_unreachable ();
7574 #endif
7575 /* FALLTHRU */
7576 case OMP_CLAUSE_TO:
7577 case OMP_CLAUSE_FROM:
7578 oacc_firstprivate:
7579 var = OMP_CLAUSE_DECL (c);
7580 if (!DECL_P (var))
7582 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7583 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7584 && (OMP_CLAUSE_MAP_KIND (c)
7585 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
7586 map_cnt++;
7587 continue;
7590 if (DECL_SIZE (var)
7591 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7593 tree var2 = DECL_VALUE_EXPR (var);
7594 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7595 var2 = TREE_OPERAND (var2, 0);
7596 gcc_assert (DECL_P (var2));
7597 var = var2;
7600 if (offloaded
7601 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7602 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7603 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7605 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7607 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7608 && varpool_node::get_create (var)->offloadable)
7609 continue;
7611 tree type = build_pointer_type (TREE_TYPE (var));
7612 tree new_var = lookup_decl (var, ctx);
7613 x = create_tmp_var_raw (type, get_name (new_var));
7614 gimple_add_tmp_var (x);
7615 x = build_simple_mem_ref (x);
7616 SET_DECL_VALUE_EXPR (new_var, x);
7617 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7619 continue;
7622 if (!maybe_lookup_field (var, ctx))
7623 continue;
7625 /* Don't remap oacc parallel reduction variables, because the
7626 intermediate result must be local to each gang. */
7627 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7628 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7630 x = build_receiver_ref (var, true, ctx);
7631 tree new_var = lookup_decl (var, ctx);
7633 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7634 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7635 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7636 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7637 x = build_simple_mem_ref (x);
7638 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7640 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7641 if (omp_is_reference (new_var))
7643 /* Create a local object to hold the instance
7644 value. */
7645 tree type = TREE_TYPE (TREE_TYPE (new_var));
7646 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7647 tree inst = create_tmp_var (type, id);
7648 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7649 x = build_fold_addr_expr (inst);
7651 gimplify_assign (new_var, x, &fplist);
7653 else if (DECL_P (new_var))
7655 SET_DECL_VALUE_EXPR (new_var, x);
7656 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7658 else
7659 gcc_unreachable ();
7661 map_cnt++;
7662 break;
7664 case OMP_CLAUSE_FIRSTPRIVATE:
7665 if (is_oacc_parallel (ctx))
7666 goto oacc_firstprivate;
7667 map_cnt++;
7668 var = OMP_CLAUSE_DECL (c);
7669 if (!omp_is_reference (var)
7670 && !is_gimple_reg_type (TREE_TYPE (var)))
7672 tree new_var = lookup_decl (var, ctx);
7673 if (is_variable_sized (var))
7675 tree pvar = DECL_VALUE_EXPR (var);
7676 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7677 pvar = TREE_OPERAND (pvar, 0);
7678 gcc_assert (DECL_P (pvar));
7679 tree new_pvar = lookup_decl (pvar, ctx);
7680 x = build_fold_indirect_ref (new_pvar);
7681 TREE_THIS_NOTRAP (x) = 1;
7683 else
7684 x = build_receiver_ref (var, true, ctx);
7685 SET_DECL_VALUE_EXPR (new_var, x);
7686 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7688 break;
7690 case OMP_CLAUSE_PRIVATE:
7691 if (is_gimple_omp_oacc (ctx->stmt))
7692 break;
7693 var = OMP_CLAUSE_DECL (c);
7694 if (is_variable_sized (var))
7696 tree new_var = lookup_decl (var, ctx);
7697 tree pvar = DECL_VALUE_EXPR (var);
7698 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7699 pvar = TREE_OPERAND (pvar, 0);
7700 gcc_assert (DECL_P (pvar));
7701 tree new_pvar = lookup_decl (pvar, ctx);
7702 x = build_fold_indirect_ref (new_pvar);
7703 TREE_THIS_NOTRAP (x) = 1;
7704 SET_DECL_VALUE_EXPR (new_var, x);
7705 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7707 break;
7709 case OMP_CLAUSE_USE_DEVICE_PTR:
7710 case OMP_CLAUSE_IS_DEVICE_PTR:
7711 var = OMP_CLAUSE_DECL (c);
7712 map_cnt++;
7713 if (is_variable_sized (var))
7715 tree new_var = lookup_decl (var, ctx);
7716 tree pvar = DECL_VALUE_EXPR (var);
7717 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7718 pvar = TREE_OPERAND (pvar, 0);
7719 gcc_assert (DECL_P (pvar));
7720 tree new_pvar = lookup_decl (pvar, ctx);
7721 x = build_fold_indirect_ref (new_pvar);
7722 TREE_THIS_NOTRAP (x) = 1;
7723 SET_DECL_VALUE_EXPR (new_var, x);
7724 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7726 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7728 tree new_var = lookup_decl (var, ctx);
7729 tree type = build_pointer_type (TREE_TYPE (var));
7730 x = create_tmp_var_raw (type, get_name (new_var));
7731 gimple_add_tmp_var (x);
7732 x = build_simple_mem_ref (x);
7733 SET_DECL_VALUE_EXPR (new_var, x);
7734 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7736 else
7738 tree new_var = lookup_decl (var, ctx);
7739 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7740 gimple_add_tmp_var (x);
7741 SET_DECL_VALUE_EXPR (new_var, x);
7742 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7744 break;
7747 if (offloaded)
7749 target_nesting_level++;
7750 lower_omp (&tgt_body, ctx);
7751 target_nesting_level--;
7753 else if (data_region)
7754 lower_omp (&tgt_body, ctx);
7756 if (offloaded)
7758 /* Declare all the variables created by mapping and the variables
7759 declared in the scope of the target body. */
7760 record_vars_into (ctx->block_vars, child_fn);
7761 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7764 olist = NULL;
7765 ilist = NULL;
7766 if (ctx->record_type)
7768 ctx->sender_decl
7769 = create_tmp_var (ctx->record_type, ".omp_data_arr");
7770 DECL_NAMELESS (ctx->sender_decl) = 1;
7771 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7772 t = make_tree_vec (3);
7773 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7774 TREE_VEC_ELT (t, 1)
7775 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7776 ".omp_data_sizes");
7777 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7778 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7779 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7780 tree tkind_type = short_unsigned_type_node;
7781 int talign_shift = 8;
7782 TREE_VEC_ELT (t, 2)
7783 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7784 ".omp_data_kinds");
7785 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7786 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7787 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7788 gimple_omp_target_set_data_arg (stmt, t);
7790 vec<constructor_elt, va_gc> *vsize;
7791 vec<constructor_elt, va_gc> *vkind;
7792 vec_alloc (vsize, map_cnt);
7793 vec_alloc (vkind, map_cnt);
7794 unsigned int map_idx = 0;
7796 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7797 switch (OMP_CLAUSE_CODE (c))
7799 tree ovar, nc, s, purpose, var, x, type;
7800 unsigned int talign;
7802 default:
7803 break;
7805 case OMP_CLAUSE_MAP:
7806 case OMP_CLAUSE_TO:
7807 case OMP_CLAUSE_FROM:
7808 oacc_firstprivate_map:
7809 nc = c;
7810 ovar = OMP_CLAUSE_DECL (c);
7811 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7812 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7813 || (OMP_CLAUSE_MAP_KIND (c)
7814 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7815 break;
7816 if (!DECL_P (ovar))
7818 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7819 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7821 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7822 == get_base_address (ovar));
7823 nc = OMP_CLAUSE_CHAIN (c);
7824 ovar = OMP_CLAUSE_DECL (nc);
7826 else
7828 tree x = build_sender_ref (ovar, ctx);
7829 tree v
7830 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7831 gimplify_assign (x, v, &ilist);
7832 nc = NULL_TREE;
7835 else
7837 if (DECL_SIZE (ovar)
7838 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7840 tree ovar2 = DECL_VALUE_EXPR (ovar);
7841 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7842 ovar2 = TREE_OPERAND (ovar2, 0);
7843 gcc_assert (DECL_P (ovar2));
7844 ovar = ovar2;
7846 if (!maybe_lookup_field (ovar, ctx))
7847 continue;
7850 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7851 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7852 talign = DECL_ALIGN_UNIT (ovar);
7853 if (nc)
7855 var = lookup_decl_in_outer_ctx (ovar, ctx);
7856 x = build_sender_ref (ovar, ctx);
7858 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7859 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7860 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7861 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7863 gcc_assert (offloaded);
7864 tree avar
7865 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7866 mark_addressable (avar);
7867 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7868 talign = DECL_ALIGN_UNIT (avar);
7869 avar = build_fold_addr_expr (avar);
7870 gimplify_assign (x, avar, &ilist);
7872 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7874 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7875 if (!omp_is_reference (var))
7877 if (is_gimple_reg (var)
7878 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7879 TREE_NO_WARNING (var) = 1;
7880 var = build_fold_addr_expr (var);
7882 else
7883 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7884 gimplify_assign (x, var, &ilist);
7886 else if (is_gimple_reg (var))
7888 gcc_assert (offloaded);
7889 tree avar = create_tmp_var (TREE_TYPE (var));
7890 mark_addressable (avar);
7891 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7892 if (GOMP_MAP_COPY_TO_P (map_kind)
7893 || map_kind == GOMP_MAP_POINTER
7894 || map_kind == GOMP_MAP_TO_PSET
7895 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7897 /* If we need to initialize a temporary
7898 with VAR because it is not addressable, and
7899 the variable hasn't been initialized yet, then
7900 we'll get a warning for the store to avar.
7901 Don't warn in that case, the mapping might
7902 be implicit. */
7903 TREE_NO_WARNING (var) = 1;
7904 gimplify_assign (avar, var, &ilist);
7906 avar = build_fold_addr_expr (avar);
7907 gimplify_assign (x, avar, &ilist);
7908 if ((GOMP_MAP_COPY_FROM_P (map_kind)
7909 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7910 && !TYPE_READONLY (TREE_TYPE (var)))
7912 x = unshare_expr (x);
7913 x = build_simple_mem_ref (x);
7914 gimplify_assign (var, x, &olist);
7917 else
7919 var = build_fold_addr_expr (var);
7920 gimplify_assign (x, var, &ilist);
7923 s = NULL_TREE;
7924 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7926 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7927 s = TREE_TYPE (ovar);
7928 if (TREE_CODE (s) == REFERENCE_TYPE)
7929 s = TREE_TYPE (s);
7930 s = TYPE_SIZE_UNIT (s);
7932 else
7933 s = OMP_CLAUSE_SIZE (c);
7934 if (s == NULL_TREE)
7935 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
7936 s = fold_convert (size_type_node, s);
7937 purpose = size_int (map_idx++);
7938 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
7939 if (TREE_CODE (s) != INTEGER_CST)
7940 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
7942 unsigned HOST_WIDE_INT tkind, tkind_zero;
7943 switch (OMP_CLAUSE_CODE (c))
7945 case OMP_CLAUSE_MAP:
7946 tkind = OMP_CLAUSE_MAP_KIND (c);
7947 tkind_zero = tkind;
7948 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
7949 switch (tkind)
7951 case GOMP_MAP_ALLOC:
7952 case GOMP_MAP_TO:
7953 case GOMP_MAP_FROM:
7954 case GOMP_MAP_TOFROM:
7955 case GOMP_MAP_ALWAYS_TO:
7956 case GOMP_MAP_ALWAYS_FROM:
7957 case GOMP_MAP_ALWAYS_TOFROM:
7958 case GOMP_MAP_RELEASE:
7959 case GOMP_MAP_FORCE_TO:
7960 case GOMP_MAP_FORCE_FROM:
7961 case GOMP_MAP_FORCE_TOFROM:
7962 case GOMP_MAP_FORCE_PRESENT:
7963 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
7964 break;
7965 case GOMP_MAP_DELETE:
7966 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
7967 default:
7968 break;
7970 if (tkind_zero != tkind)
7972 if (integer_zerop (s))
7973 tkind = tkind_zero;
7974 else if (integer_nonzerop (s))
7975 tkind_zero = tkind;
7977 break;
7978 case OMP_CLAUSE_FIRSTPRIVATE:
7979 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7980 tkind = GOMP_MAP_TO;
7981 tkind_zero = tkind;
7982 break;
7983 case OMP_CLAUSE_TO:
7984 tkind = GOMP_MAP_TO;
7985 tkind_zero = tkind;
7986 break;
7987 case OMP_CLAUSE_FROM:
7988 tkind = GOMP_MAP_FROM;
7989 tkind_zero = tkind;
7990 break;
7991 default:
7992 gcc_unreachable ();
7994 gcc_checking_assert (tkind
7995 < (HOST_WIDE_INT_C (1U) << talign_shift));
7996 gcc_checking_assert (tkind_zero
7997 < (HOST_WIDE_INT_C (1U) << talign_shift));
7998 talign = ceil_log2 (talign);
7999 tkind |= talign << talign_shift;
8000 tkind_zero |= talign << talign_shift;
8001 gcc_checking_assert (tkind
8002 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8003 gcc_checking_assert (tkind_zero
8004 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8005 if (tkind == tkind_zero)
8006 x = build_int_cstu (tkind_type, tkind);
8007 else
8009 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8010 x = build3 (COND_EXPR, tkind_type,
8011 fold_build2 (EQ_EXPR, boolean_type_node,
8012 unshare_expr (s), size_zero_node),
8013 build_int_cstu (tkind_type, tkind_zero),
8014 build_int_cstu (tkind_type, tkind));
8016 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8017 if (nc && nc != c)
8018 c = nc;
8019 break;
8021 case OMP_CLAUSE_FIRSTPRIVATE:
8022 if (is_oacc_parallel (ctx))
8023 goto oacc_firstprivate_map;
8024 ovar = OMP_CLAUSE_DECL (c);
8025 if (omp_is_reference (ovar))
8026 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8027 else
8028 talign = DECL_ALIGN_UNIT (ovar);
8029 var = lookup_decl_in_outer_ctx (ovar, ctx);
8030 x = build_sender_ref (ovar, ctx);
8031 tkind = GOMP_MAP_FIRSTPRIVATE;
8032 type = TREE_TYPE (ovar);
8033 if (omp_is_reference (ovar))
8034 type = TREE_TYPE (type);
8035 if ((INTEGRAL_TYPE_P (type)
8036 && TYPE_PRECISION (type) <= POINTER_SIZE)
8037 || TREE_CODE (type) == POINTER_TYPE)
8039 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8040 tree t = var;
8041 if (omp_is_reference (var))
8042 t = build_simple_mem_ref (var);
8043 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8044 TREE_NO_WARNING (var) = 1;
8045 if (TREE_CODE (type) != POINTER_TYPE)
8046 t = fold_convert (pointer_sized_int_node, t);
8047 t = fold_convert (TREE_TYPE (x), t);
8048 gimplify_assign (x, t, &ilist);
8050 else if (omp_is_reference (var))
8051 gimplify_assign (x, var, &ilist);
8052 else if (is_gimple_reg (var))
8054 tree avar = create_tmp_var (TREE_TYPE (var));
8055 mark_addressable (avar);
8056 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8057 TREE_NO_WARNING (var) = 1;
8058 gimplify_assign (avar, var, &ilist);
8059 avar = build_fold_addr_expr (avar);
8060 gimplify_assign (x, avar, &ilist);
8062 else
8064 var = build_fold_addr_expr (var);
8065 gimplify_assign (x, var, &ilist);
8067 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8068 s = size_int (0);
8069 else if (omp_is_reference (ovar))
8070 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8071 else
8072 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8073 s = fold_convert (size_type_node, s);
8074 purpose = size_int (map_idx++);
8075 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8076 if (TREE_CODE (s) != INTEGER_CST)
8077 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8079 gcc_checking_assert (tkind
8080 < (HOST_WIDE_INT_C (1U) << talign_shift));
8081 talign = ceil_log2 (talign);
8082 tkind |= talign << talign_shift;
8083 gcc_checking_assert (tkind
8084 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8085 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8086 build_int_cstu (tkind_type, tkind));
8087 break;
8089 case OMP_CLAUSE_USE_DEVICE_PTR:
8090 case OMP_CLAUSE_IS_DEVICE_PTR:
8091 ovar = OMP_CLAUSE_DECL (c);
8092 var = lookup_decl_in_outer_ctx (ovar, ctx);
8093 x = build_sender_ref (ovar, ctx);
8094 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8095 tkind = GOMP_MAP_USE_DEVICE_PTR;
8096 else
8097 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8098 type = TREE_TYPE (ovar);
8099 if (TREE_CODE (type) == ARRAY_TYPE)
8100 var = build_fold_addr_expr (var);
8101 else
8103 if (omp_is_reference (ovar))
8105 type = TREE_TYPE (type);
8106 if (TREE_CODE (type) != ARRAY_TYPE)
8107 var = build_simple_mem_ref (var);
8108 var = fold_convert (TREE_TYPE (x), var);
8111 gimplify_assign (x, var, &ilist);
8112 s = size_int (0);
8113 purpose = size_int (map_idx++);
8114 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8115 gcc_checking_assert (tkind
8116 < (HOST_WIDE_INT_C (1U) << talign_shift));
8117 gcc_checking_assert (tkind
8118 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8119 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8120 build_int_cstu (tkind_type, tkind));
8121 break;
8124 gcc_assert (map_idx == map_cnt);
8126 DECL_INITIAL (TREE_VEC_ELT (t, 1))
8127 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8128 DECL_INITIAL (TREE_VEC_ELT (t, 2))
8129 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8130 for (int i = 1; i <= 2; i++)
8131 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8133 gimple_seq initlist = NULL;
8134 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8135 TREE_VEC_ELT (t, i)),
8136 &initlist, true, NULL_TREE);
8137 gimple_seq_add_seq (&ilist, initlist);
8139 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8140 NULL);
8141 TREE_THIS_VOLATILE (clobber) = 1;
8142 gimple_seq_add_stmt (&olist,
8143 gimple_build_assign (TREE_VEC_ELT (t, i),
8144 clobber));
8147 tree clobber = build_constructor (ctx->record_type, NULL);
8148 TREE_THIS_VOLATILE (clobber) = 1;
8149 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8150 clobber));
8153 /* Once all the expansions are done, sequence all the different
8154 fragments inside gimple_omp_body. */
8156 new_body = NULL;
8158 if (offloaded
8159 && ctx->record_type)
8161 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8162 /* fixup_child_record_type might have changed receiver_decl's type. */
8163 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8164 gimple_seq_add_stmt (&new_body,
8165 gimple_build_assign (ctx->receiver_decl, t));
8167 gimple_seq_add_seq (&new_body, fplist);
8169 if (offloaded || data_region)
8171 tree prev = NULL_TREE;
8172 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8173 switch (OMP_CLAUSE_CODE (c))
8175 tree var, x;
8176 default:
8177 break;
8178 case OMP_CLAUSE_FIRSTPRIVATE:
8179 if (is_gimple_omp_oacc (ctx->stmt))
8180 break;
8181 var = OMP_CLAUSE_DECL (c);
8182 if (omp_is_reference (var)
8183 || is_gimple_reg_type (TREE_TYPE (var)))
8185 tree new_var = lookup_decl (var, ctx);
8186 tree type;
8187 type = TREE_TYPE (var);
8188 if (omp_is_reference (var))
8189 type = TREE_TYPE (type);
8190 if ((INTEGRAL_TYPE_P (type)
8191 && TYPE_PRECISION (type) <= POINTER_SIZE)
8192 || TREE_CODE (type) == POINTER_TYPE)
8194 x = build_receiver_ref (var, false, ctx);
8195 if (TREE_CODE (type) != POINTER_TYPE)
8196 x = fold_convert (pointer_sized_int_node, x);
8197 x = fold_convert (type, x);
8198 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8199 fb_rvalue);
8200 if (omp_is_reference (var))
8202 tree v = create_tmp_var_raw (type, get_name (var));
8203 gimple_add_tmp_var (v);
8204 TREE_ADDRESSABLE (v) = 1;
8205 gimple_seq_add_stmt (&new_body,
8206 gimple_build_assign (v, x));
8207 x = build_fold_addr_expr (v);
8209 gimple_seq_add_stmt (&new_body,
8210 gimple_build_assign (new_var, x));
8212 else
8214 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8215 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8216 fb_rvalue);
8217 gimple_seq_add_stmt (&new_body,
8218 gimple_build_assign (new_var, x));
8221 else if (is_variable_sized (var))
8223 tree pvar = DECL_VALUE_EXPR (var);
8224 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8225 pvar = TREE_OPERAND (pvar, 0);
8226 gcc_assert (DECL_P (pvar));
8227 tree new_var = lookup_decl (pvar, ctx);
8228 x = build_receiver_ref (var, false, ctx);
8229 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8230 gimple_seq_add_stmt (&new_body,
8231 gimple_build_assign (new_var, x));
8233 break;
8234 case OMP_CLAUSE_PRIVATE:
8235 if (is_gimple_omp_oacc (ctx->stmt))
8236 break;
8237 var = OMP_CLAUSE_DECL (c);
8238 if (omp_is_reference (var))
8240 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8241 tree new_var = lookup_decl (var, ctx);
8242 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8243 if (TREE_CONSTANT (x))
8245 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8246 get_name (var));
8247 gimple_add_tmp_var (x);
8248 TREE_ADDRESSABLE (x) = 1;
8249 x = build_fold_addr_expr_loc (clause_loc, x);
8251 else
8252 break;
8254 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8255 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8256 gimple_seq_add_stmt (&new_body,
8257 gimple_build_assign (new_var, x));
8259 break;
8260 case OMP_CLAUSE_USE_DEVICE_PTR:
8261 case OMP_CLAUSE_IS_DEVICE_PTR:
8262 var = OMP_CLAUSE_DECL (c);
8263 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8264 x = build_sender_ref (var, ctx);
8265 else
8266 x = build_receiver_ref (var, false, ctx);
8267 if (is_variable_sized (var))
8269 tree pvar = DECL_VALUE_EXPR (var);
8270 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8271 pvar = TREE_OPERAND (pvar, 0);
8272 gcc_assert (DECL_P (pvar));
8273 tree new_var = lookup_decl (pvar, ctx);
8274 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8275 gimple_seq_add_stmt (&new_body,
8276 gimple_build_assign (new_var, x));
8278 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8280 tree new_var = lookup_decl (var, ctx);
8281 new_var = DECL_VALUE_EXPR (new_var);
8282 gcc_assert (TREE_CODE (new_var) == MEM_REF);
8283 new_var = TREE_OPERAND (new_var, 0);
8284 gcc_assert (DECL_P (new_var));
8285 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8286 gimple_seq_add_stmt (&new_body,
8287 gimple_build_assign (new_var, x));
8289 else
8291 tree type = TREE_TYPE (var);
8292 tree new_var = lookup_decl (var, ctx);
8293 if (omp_is_reference (var))
8295 type = TREE_TYPE (type);
8296 if (TREE_CODE (type) != ARRAY_TYPE)
8298 tree v = create_tmp_var_raw (type, get_name (var));
8299 gimple_add_tmp_var (v);
8300 TREE_ADDRESSABLE (v) = 1;
8301 x = fold_convert (type, x);
8302 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8303 fb_rvalue);
8304 gimple_seq_add_stmt (&new_body,
8305 gimple_build_assign (v, x));
8306 x = build_fold_addr_expr (v);
8309 new_var = DECL_VALUE_EXPR (new_var);
8310 x = fold_convert (TREE_TYPE (new_var), x);
8311 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8312 gimple_seq_add_stmt (&new_body,
8313 gimple_build_assign (new_var, x));
8315 break;
8317 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8318 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8319 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8320 or references to VLAs. */
8321 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8322 switch (OMP_CLAUSE_CODE (c))
8324 tree var;
8325 default:
8326 break;
8327 case OMP_CLAUSE_MAP:
8328 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8329 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8331 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8332 HOST_WIDE_INT offset = 0;
8333 gcc_assert (prev);
8334 var = OMP_CLAUSE_DECL (c);
8335 if (DECL_P (var)
8336 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8337 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8338 ctx))
8339 && varpool_node::get_create (var)->offloadable)
8340 break;
8341 if (TREE_CODE (var) == INDIRECT_REF
8342 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8343 var = TREE_OPERAND (var, 0);
8344 if (TREE_CODE (var) == COMPONENT_REF)
8346 var = get_addr_base_and_unit_offset (var, &offset);
8347 gcc_assert (var != NULL_TREE && DECL_P (var));
8349 else if (DECL_SIZE (var)
8350 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8352 tree var2 = DECL_VALUE_EXPR (var);
8353 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8354 var2 = TREE_OPERAND (var2, 0);
8355 gcc_assert (DECL_P (var2));
8356 var = var2;
8358 tree new_var = lookup_decl (var, ctx), x;
8359 tree type = TREE_TYPE (new_var);
8360 bool is_ref;
8361 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8362 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8363 == COMPONENT_REF))
8365 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8366 is_ref = true;
8367 new_var = build2 (MEM_REF, type,
8368 build_fold_addr_expr (new_var),
8369 build_int_cst (build_pointer_type (type),
8370 offset));
8372 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8374 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8375 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8376 new_var = build2 (MEM_REF, type,
8377 build_fold_addr_expr (new_var),
8378 build_int_cst (build_pointer_type (type),
8379 offset));
8381 else
8382 is_ref = omp_is_reference (var);
8383 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8384 is_ref = false;
8385 bool ref_to_array = false;
8386 if (is_ref)
8388 type = TREE_TYPE (type);
8389 if (TREE_CODE (type) == ARRAY_TYPE)
8391 type = build_pointer_type (type);
8392 ref_to_array = true;
8395 else if (TREE_CODE (type) == ARRAY_TYPE)
8397 tree decl2 = DECL_VALUE_EXPR (new_var);
8398 gcc_assert (TREE_CODE (decl2) == MEM_REF);
8399 decl2 = TREE_OPERAND (decl2, 0);
8400 gcc_assert (DECL_P (decl2));
8401 new_var = decl2;
8402 type = TREE_TYPE (new_var);
8404 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8405 x = fold_convert_loc (clause_loc, type, x);
8406 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8408 tree bias = OMP_CLAUSE_SIZE (c);
8409 if (DECL_P (bias))
8410 bias = lookup_decl (bias, ctx);
8411 bias = fold_convert_loc (clause_loc, sizetype, bias);
8412 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8413 bias);
8414 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8415 TREE_TYPE (x), x, bias);
8417 if (ref_to_array)
8418 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8419 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8420 if (is_ref && !ref_to_array)
8422 tree t = create_tmp_var_raw (type, get_name (var));
8423 gimple_add_tmp_var (t);
8424 TREE_ADDRESSABLE (t) = 1;
8425 gimple_seq_add_stmt (&new_body,
8426 gimple_build_assign (t, x));
8427 x = build_fold_addr_expr_loc (clause_loc, t);
8429 gimple_seq_add_stmt (&new_body,
8430 gimple_build_assign (new_var, x));
8431 prev = NULL_TREE;
8433 else if (OMP_CLAUSE_CHAIN (c)
8434 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8435 == OMP_CLAUSE_MAP
8436 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8437 == GOMP_MAP_FIRSTPRIVATE_POINTER
8438 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8439 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8440 prev = c;
8441 break;
8442 case OMP_CLAUSE_PRIVATE:
8443 var = OMP_CLAUSE_DECL (c);
8444 if (is_variable_sized (var))
8446 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8447 tree new_var = lookup_decl (var, ctx);
8448 tree pvar = DECL_VALUE_EXPR (var);
8449 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8450 pvar = TREE_OPERAND (pvar, 0);
8451 gcc_assert (DECL_P (pvar));
8452 tree new_pvar = lookup_decl (pvar, ctx);
8453 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8454 tree al = size_int (DECL_ALIGN (var));
8455 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8456 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8457 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8458 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8459 gimple_seq_add_stmt (&new_body,
8460 gimple_build_assign (new_pvar, x));
8462 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8464 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8465 tree new_var = lookup_decl (var, ctx);
8466 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8467 if (TREE_CONSTANT (x))
8468 break;
8469 else
8471 tree atmp
8472 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8473 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8474 tree al = size_int (TYPE_ALIGN (rtype));
8475 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8478 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8479 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8480 gimple_seq_add_stmt (&new_body,
8481 gimple_build_assign (new_var, x));
8483 break;
8486 gimple_seq fork_seq = NULL;
8487 gimple_seq join_seq = NULL;
8489 if (is_oacc_parallel (ctx))
8491 /* If there are reductions on the offloaded region itself, treat
8492 them as a dummy GANG loop. */
8493 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8495 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8496 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8499 gimple_seq_add_seq (&new_body, fork_seq);
8500 gimple_seq_add_seq (&new_body, tgt_body);
8501 gimple_seq_add_seq (&new_body, join_seq);
8503 if (offloaded)
8504 new_body = maybe_catch_exception (new_body);
8506 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8507 gimple_omp_set_body (stmt, new_body);
8510 bind = gimple_build_bind (NULL, NULL,
8511 tgt_bind ? gimple_bind_block (tgt_bind)
8512 : NULL_TREE);
8513 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8514 gimple_bind_add_seq (bind, ilist);
8515 gimple_bind_add_stmt (bind, stmt);
8516 gimple_bind_add_seq (bind, olist);
8518 pop_gimplify_context (NULL);
8520 if (dep_bind)
8522 gimple_bind_add_seq (dep_bind, dep_ilist);
8523 gimple_bind_add_stmt (dep_bind, bind);
8524 gimple_bind_add_seq (dep_bind, dep_olist);
8525 pop_gimplify_context (dep_bind);
8529 /* Expand code for an OpenMP teams directive. */
8531 static void
8532 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8534 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8535 push_gimplify_context ();
8537 tree block = make_node (BLOCK);
8538 gbind *bind = gimple_build_bind (NULL, NULL, block);
8539 gsi_replace (gsi_p, bind, true);
8540 gimple_seq bind_body = NULL;
8541 gimple_seq dlist = NULL;
8542 gimple_seq olist = NULL;
8544 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8545 OMP_CLAUSE_NUM_TEAMS);
8546 if (num_teams == NULL_TREE)
8547 num_teams = build_int_cst (unsigned_type_node, 0);
8548 else
8550 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8551 num_teams = fold_convert (unsigned_type_node, num_teams);
8552 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8554 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8555 OMP_CLAUSE_THREAD_LIMIT);
8556 if (thread_limit == NULL_TREE)
8557 thread_limit = build_int_cst (unsigned_type_node, 0);
8558 else
8560 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8561 thread_limit = fold_convert (unsigned_type_node, thread_limit);
8562 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8563 fb_rvalue);
8566 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8567 &bind_body, &dlist, ctx, NULL);
8568 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8569 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8570 if (!gimple_omp_teams_grid_phony (teams_stmt))
8572 gimple_seq_add_stmt (&bind_body, teams_stmt);
8573 location_t loc = gimple_location (teams_stmt);
8574 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8575 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8576 gimple_set_location (call, loc);
8577 gimple_seq_add_stmt (&bind_body, call);
8580 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8581 gimple_omp_set_body (teams_stmt, NULL);
8582 gimple_seq_add_seq (&bind_body, olist);
8583 gimple_seq_add_seq (&bind_body, dlist);
8584 if (!gimple_omp_teams_grid_phony (teams_stmt))
8585 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8586 gimple_bind_set_body (bind, bind_body);
8588 pop_gimplify_context (bind);
8590 gimple_bind_append_vars (bind, ctx->block_vars);
8591 BLOCK_VARS (block) = ctx->block_vars;
8592 if (BLOCK_VARS (block))
8593 TREE_USED (block) = 1;
8596 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
8598 static void
8599 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8601 gimple *stmt = gsi_stmt (*gsi_p);
8602 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8603 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8604 gimple_build_omp_return (false));
8608 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8609 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8610 of OMP context, but with task_shared_vars set. */
8612 static tree
8613 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8614 void *data)
8616 tree t = *tp;
8618 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8619 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8620 return t;
8622 if (task_shared_vars
8623 && DECL_P (t)
8624 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8625 return t;
8627 /* If a global variable has been privatized, TREE_CONSTANT on
8628 ADDR_EXPR might be wrong. */
8629 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8630 recompute_tree_invariant_for_addr_expr (t);
8632 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8633 return NULL_TREE;
8636 /* Data to be communicated between lower_omp_regimplify_operands and
8637 lower_omp_regimplify_operands_p. */
8639 struct lower_omp_regimplify_operands_data
8641 omp_context *ctx;
8642 vec<tree> *decls;
8645 /* Helper function for lower_omp_regimplify_operands. Find
8646 omp_member_access_dummy_var vars and adjust temporarily their
8647 DECL_VALUE_EXPRs if needed. */
8649 static tree
8650 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8651 void *data)
8653 tree t = omp_member_access_dummy_var (*tp);
8654 if (t)
8656 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8657 lower_omp_regimplify_operands_data *ldata
8658 = (lower_omp_regimplify_operands_data *) wi->info;
8659 tree o = maybe_lookup_decl (t, ldata->ctx);
8660 if (o != t)
8662 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8663 ldata->decls->safe_push (*tp);
8664 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8665 SET_DECL_VALUE_EXPR (*tp, v);
8668 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8669 return NULL_TREE;
8672 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8673 of omp_member_access_dummy_var vars during regimplification. */
8675 static void
8676 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8677 gimple_stmt_iterator *gsi_p)
8679 auto_vec<tree, 10> decls;
8680 if (ctx)
8682 struct walk_stmt_info wi;
8683 memset (&wi, '\0', sizeof (wi));
8684 struct lower_omp_regimplify_operands_data data;
8685 data.ctx = ctx;
8686 data.decls = &decls;
8687 wi.info = &data;
8688 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8690 gimple_regimplify_operands (stmt, gsi_p);
8691 while (!decls.is_empty ())
8693 tree t = decls.pop ();
8694 tree v = decls.pop ();
8695 SET_DECL_VALUE_EXPR (t, v);
8699 static void
8700 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8702 gimple *stmt = gsi_stmt (*gsi_p);
8703 struct walk_stmt_info wi;
8704 gcall *call_stmt;
8706 if (gimple_has_location (stmt))
8707 input_location = gimple_location (stmt);
8709 if (task_shared_vars)
8710 memset (&wi, '\0', sizeof (wi));
8712 /* If we have issued syntax errors, avoid doing any heavy lifting.
8713 Just replace the OMP directives with a NOP to avoid
8714 confusing RTL expansion. */
8715 if (seen_error () && is_gimple_omp (stmt))
8717 gsi_replace (gsi_p, gimple_build_nop (), true);
8718 return;
8721 switch (gimple_code (stmt))
8723 case GIMPLE_COND:
8725 gcond *cond_stmt = as_a <gcond *> (stmt);
8726 if ((ctx || task_shared_vars)
8727 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8728 lower_omp_regimplify_p,
8729 ctx ? NULL : &wi, NULL)
8730 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8731 lower_omp_regimplify_p,
8732 ctx ? NULL : &wi, NULL)))
8733 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8735 break;
8736 case GIMPLE_CATCH:
8737 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8738 break;
8739 case GIMPLE_EH_FILTER:
8740 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8741 break;
8742 case GIMPLE_TRY:
8743 lower_omp (gimple_try_eval_ptr (stmt), ctx);
8744 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8745 break;
8746 case GIMPLE_TRANSACTION:
8747 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8748 ctx);
8749 break;
8750 case GIMPLE_BIND:
8751 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8752 break;
8753 case GIMPLE_OMP_PARALLEL:
8754 case GIMPLE_OMP_TASK:
8755 ctx = maybe_lookup_ctx (stmt);
8756 gcc_assert (ctx);
8757 if (ctx->cancellable)
8758 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8759 lower_omp_taskreg (gsi_p, ctx);
8760 break;
8761 case GIMPLE_OMP_FOR:
8762 ctx = maybe_lookup_ctx (stmt);
8763 gcc_assert (ctx);
8764 if (ctx->cancellable)
8765 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8766 lower_omp_for (gsi_p, ctx);
8767 break;
8768 case GIMPLE_OMP_SECTIONS:
8769 ctx = maybe_lookup_ctx (stmt);
8770 gcc_assert (ctx);
8771 if (ctx->cancellable)
8772 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8773 lower_omp_sections (gsi_p, ctx);
8774 break;
8775 case GIMPLE_OMP_SINGLE:
8776 ctx = maybe_lookup_ctx (stmt);
8777 gcc_assert (ctx);
8778 lower_omp_single (gsi_p, ctx);
8779 break;
8780 case GIMPLE_OMP_MASTER:
8781 ctx = maybe_lookup_ctx (stmt);
8782 gcc_assert (ctx);
8783 lower_omp_master (gsi_p, ctx);
8784 break;
8785 case GIMPLE_OMP_TASKGROUP:
8786 ctx = maybe_lookup_ctx (stmt);
8787 gcc_assert (ctx);
8788 lower_omp_taskgroup (gsi_p, ctx);
8789 break;
8790 case GIMPLE_OMP_ORDERED:
8791 ctx = maybe_lookup_ctx (stmt);
8792 gcc_assert (ctx);
8793 lower_omp_ordered (gsi_p, ctx);
8794 break;
8795 case GIMPLE_OMP_CRITICAL:
8796 ctx = maybe_lookup_ctx (stmt);
8797 gcc_assert (ctx);
8798 lower_omp_critical (gsi_p, ctx);
8799 break;
8800 case GIMPLE_OMP_ATOMIC_LOAD:
8801 if ((ctx || task_shared_vars)
8802 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8803 as_a <gomp_atomic_load *> (stmt)),
8804 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8805 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8806 break;
8807 case GIMPLE_OMP_TARGET:
8808 ctx = maybe_lookup_ctx (stmt);
8809 gcc_assert (ctx);
8810 lower_omp_target (gsi_p, ctx);
8811 break;
8812 case GIMPLE_OMP_TEAMS:
8813 ctx = maybe_lookup_ctx (stmt);
8814 gcc_assert (ctx);
8815 lower_omp_teams (gsi_p, ctx);
8816 break;
8817 case GIMPLE_OMP_GRID_BODY:
8818 ctx = maybe_lookup_ctx (stmt);
8819 gcc_assert (ctx);
8820 lower_omp_grid_body (gsi_p, ctx);
8821 break;
8822 case GIMPLE_CALL:
8823 tree fndecl;
8824 call_stmt = as_a <gcall *> (stmt);
8825 fndecl = gimple_call_fndecl (call_stmt);
8826 if (fndecl
8827 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8828 switch (DECL_FUNCTION_CODE (fndecl))
8830 case BUILT_IN_GOMP_BARRIER:
8831 if (ctx == NULL)
8832 break;
8833 /* FALLTHRU */
8834 case BUILT_IN_GOMP_CANCEL:
8835 case BUILT_IN_GOMP_CANCELLATION_POINT:
8836 omp_context *cctx;
8837 cctx = ctx;
8838 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8839 cctx = cctx->outer;
8840 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8841 if (!cctx->cancellable)
8843 if (DECL_FUNCTION_CODE (fndecl)
8844 == BUILT_IN_GOMP_CANCELLATION_POINT)
8846 stmt = gimple_build_nop ();
8847 gsi_replace (gsi_p, stmt, false);
8849 break;
8851 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8853 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8854 gimple_call_set_fndecl (call_stmt, fndecl);
8855 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8857 tree lhs;
8858 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8859 gimple_call_set_lhs (call_stmt, lhs);
8860 tree fallthru_label;
8861 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8862 gimple *g;
8863 g = gimple_build_label (fallthru_label);
8864 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8865 g = gimple_build_cond (NE_EXPR, lhs,
8866 fold_convert (TREE_TYPE (lhs),
8867 boolean_false_node),
8868 cctx->cancel_label, fallthru_label);
8869 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8870 break;
8871 default:
8872 break;
8874 /* FALLTHRU */
8875 default:
8876 if ((ctx || task_shared_vars)
8877 && walk_gimple_op (stmt, lower_omp_regimplify_p,
8878 ctx ? NULL : &wi))
8880 /* Just remove clobbers, this should happen only if we have
8881 "privatized" local addressable variables in SIMD regions,
8882 the clobber isn't needed in that case and gimplifying address
8883 of the ARRAY_REF into a pointer and creating MEM_REF based
8884 clobber would create worse code than we get with the clobber
8885 dropped. */
8886 if (gimple_clobber_p (stmt))
8888 gsi_replace (gsi_p, gimple_build_nop (), true);
8889 break;
8891 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8893 break;
8897 static void
8898 lower_omp (gimple_seq *body, omp_context *ctx)
8900 location_t saved_location = input_location;
8901 gimple_stmt_iterator gsi;
8902 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8903 lower_omp_1 (&gsi, ctx);
8904 /* During gimplification, we haven't folded statments inside offloading
8905 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8906 if (target_nesting_level || taskreg_nesting_level)
8907 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8908 fold_stmt (&gsi);
8909 input_location = saved_location;
8912 /* Main entry point. */
8914 static unsigned int
8915 execute_lower_omp (void)
8917 gimple_seq body;
8918 int i;
8919 omp_context *ctx;
8921 /* This pass always runs, to provide PROP_gimple_lomp.
8922 But often, there is nothing to do. */
8923 if (flag_openacc == 0 && flag_openmp == 0
8924 && flag_openmp_simd == 0)
8925 return 0;
8927 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
8928 delete_omp_context);
8930 body = gimple_body (current_function_decl);
8932 if (hsa_gen_requested_p ())
8933 omp_grid_gridify_all_targets (&body);
8935 scan_omp (&body, NULL);
8936 gcc_assert (taskreg_nesting_level == 0);
8937 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
8938 finish_taskreg_scan (ctx);
8939 taskreg_contexts.release ();
8941 if (all_contexts->root)
8943 if (task_shared_vars)
8944 push_gimplify_context ();
8945 lower_omp (&body, NULL);
8946 if (task_shared_vars)
8947 pop_gimplify_context (NULL);
8950 if (all_contexts)
8952 splay_tree_delete (all_contexts);
8953 all_contexts = NULL;
8955 BITMAP_FREE (task_shared_vars);
8956 return 0;
8959 namespace {
8961 const pass_data pass_data_lower_omp =
8963 GIMPLE_PASS, /* type */
8964 "omplower", /* name */
8965 OPTGROUP_OMP, /* optinfo_flags */
8966 TV_NONE, /* tv_id */
8967 PROP_gimple_any, /* properties_required */
8968 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
8969 0, /* properties_destroyed */
8970 0, /* todo_flags_start */
8971 0, /* todo_flags_finish */
8974 class pass_lower_omp : public gimple_opt_pass
8976 public:
8977 pass_lower_omp (gcc::context *ctxt)
8978 : gimple_opt_pass (pass_data_lower_omp, ctxt)
8981 /* opt_pass methods: */
8982 virtual unsigned int execute (function *) { return execute_lower_omp (); }
8984 }; // class pass_lower_omp
8986 } // anon namespace
8988 gimple_opt_pass *
8989 make_pass_lower_omp (gcc::context *ctxt)
8991 return new pass_lower_omp (ctxt);
8994 /* The following is a utility to diagnose structured block violations.
8995 It is not part of the "omplower" pass, as that's invoked too late. It
8996 should be invoked by the respective front ends after gimplification. */
8998 static splay_tree all_labels;
9000 /* Check for mismatched contexts and generate an error if needed. Return
9001 true if an error is detected. */
9003 static bool
9004 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9005 gimple *branch_ctx, gimple *label_ctx)
9007 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9008 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9010 if (label_ctx == branch_ctx)
9011 return false;
9013 const char* kind = NULL;
9015 if (flag_openacc)
9017 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9018 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9020 gcc_checking_assert (kind == NULL);
9021 kind = "OpenACC";
9024 if (kind == NULL)
9026 gcc_checking_assert (flag_openmp || flag_openmp_simd);
9027 kind = "OpenMP";
9030 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9031 so we could traverse it and issue a correct "exit" or "enter" error
9032 message upon a structured block violation.
9034 We built the context by building a list with tree_cons'ing, but there is
9035 no easy counterpart in gimple tuples. It seems like far too much work
9036 for issuing exit/enter error messages. If someone really misses the
9037 distinct error message... patches welcome. */
9039 #if 0
9040 /* Try to avoid confusing the user by producing and error message
9041 with correct "exit" or "enter" verbiage. We prefer "exit"
9042 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9043 if (branch_ctx == NULL)
9044 exit_p = false;
9045 else
9047 while (label_ctx)
9049 if (TREE_VALUE (label_ctx) == branch_ctx)
9051 exit_p = false;
9052 break;
9054 label_ctx = TREE_CHAIN (label_ctx);
9058 if (exit_p)
9059 error ("invalid exit from %s structured block", kind);
9060 else
9061 error ("invalid entry to %s structured block", kind);
9062 #endif
9064 /* If it's obvious we have an invalid entry, be specific about the error. */
9065 if (branch_ctx == NULL)
9066 error ("invalid entry to %s structured block", kind);
9067 else
9069 /* Otherwise, be vague and lazy, but efficient. */
9070 error ("invalid branch to/from %s structured block", kind);
9073 gsi_replace (gsi_p, gimple_build_nop (), false);
9074 return true;
9077 /* Pass 1: Create a minimal tree of structured blocks, and record
9078 where each label is found. */
9080 static tree
9081 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9082 struct walk_stmt_info *wi)
9084 gimple *context = (gimple *) wi->info;
9085 gimple *inner_context;
9086 gimple *stmt = gsi_stmt (*gsi_p);
9088 *handled_ops_p = true;
9090 switch (gimple_code (stmt))
9092 WALK_SUBSTMTS;
9094 case GIMPLE_OMP_PARALLEL:
9095 case GIMPLE_OMP_TASK:
9096 case GIMPLE_OMP_SECTIONS:
9097 case GIMPLE_OMP_SINGLE:
9098 case GIMPLE_OMP_SECTION:
9099 case GIMPLE_OMP_MASTER:
9100 case GIMPLE_OMP_ORDERED:
9101 case GIMPLE_OMP_CRITICAL:
9102 case GIMPLE_OMP_TARGET:
9103 case GIMPLE_OMP_TEAMS:
9104 case GIMPLE_OMP_TASKGROUP:
9105 /* The minimal context here is just the current OMP construct. */
9106 inner_context = stmt;
9107 wi->info = inner_context;
9108 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9109 wi->info = context;
9110 break;
9112 case GIMPLE_OMP_FOR:
9113 inner_context = stmt;
9114 wi->info = inner_context;
9115 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9116 walk them. */
9117 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9118 diagnose_sb_1, NULL, wi);
9119 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9120 wi->info = context;
9121 break;
9123 case GIMPLE_LABEL:
9124 splay_tree_insert (all_labels,
9125 (splay_tree_key) gimple_label_label (
9126 as_a <glabel *> (stmt)),
9127 (splay_tree_value) context);
9128 break;
9130 default:
9131 break;
9134 return NULL_TREE;
9137 /* Pass 2: Check each branch and see if its context differs from that of
9138 the destination label's context. */
9140 static tree
9141 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9142 struct walk_stmt_info *wi)
9144 gimple *context = (gimple *) wi->info;
9145 splay_tree_node n;
9146 gimple *stmt = gsi_stmt (*gsi_p);
9148 *handled_ops_p = true;
9150 switch (gimple_code (stmt))
9152 WALK_SUBSTMTS;
9154 case GIMPLE_OMP_PARALLEL:
9155 case GIMPLE_OMP_TASK:
9156 case GIMPLE_OMP_SECTIONS:
9157 case GIMPLE_OMP_SINGLE:
9158 case GIMPLE_OMP_SECTION:
9159 case GIMPLE_OMP_MASTER:
9160 case GIMPLE_OMP_ORDERED:
9161 case GIMPLE_OMP_CRITICAL:
9162 case GIMPLE_OMP_TARGET:
9163 case GIMPLE_OMP_TEAMS:
9164 case GIMPLE_OMP_TASKGROUP:
9165 wi->info = stmt;
9166 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9167 wi->info = context;
9168 break;
9170 case GIMPLE_OMP_FOR:
9171 wi->info = stmt;
9172 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9173 walk them. */
9174 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9175 diagnose_sb_2, NULL, wi);
9176 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9177 wi->info = context;
9178 break;
9180 case GIMPLE_COND:
9182 gcond *cond_stmt = as_a <gcond *> (stmt);
9183 tree lab = gimple_cond_true_label (cond_stmt);
9184 if (lab)
9186 n = splay_tree_lookup (all_labels,
9187 (splay_tree_key) lab);
9188 diagnose_sb_0 (gsi_p, context,
9189 n ? (gimple *) n->value : NULL);
9191 lab = gimple_cond_false_label (cond_stmt);
9192 if (lab)
9194 n = splay_tree_lookup (all_labels,
9195 (splay_tree_key) lab);
9196 diagnose_sb_0 (gsi_p, context,
9197 n ? (gimple *) n->value : NULL);
9200 break;
9202 case GIMPLE_GOTO:
9204 tree lab = gimple_goto_dest (stmt);
9205 if (TREE_CODE (lab) != LABEL_DECL)
9206 break;
9208 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9209 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9211 break;
9213 case GIMPLE_SWITCH:
9215 gswitch *switch_stmt = as_a <gswitch *> (stmt);
9216 unsigned int i;
9217 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9219 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9220 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9221 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9222 break;
9225 break;
9227 case GIMPLE_RETURN:
9228 diagnose_sb_0 (gsi_p, context, NULL);
9229 break;
9231 default:
9232 break;
9235 return NULL_TREE;
9238 static unsigned int
9239 diagnose_omp_structured_block_errors (void)
9241 struct walk_stmt_info wi;
9242 gimple_seq body = gimple_body (current_function_decl);
9244 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9246 memset (&wi, 0, sizeof (wi));
9247 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9249 memset (&wi, 0, sizeof (wi));
9250 wi.want_locations = true;
9251 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9253 gimple_set_body (current_function_decl, body);
9255 splay_tree_delete (all_labels);
9256 all_labels = NULL;
9258 return 0;
9261 namespace {
9263 const pass_data pass_data_diagnose_omp_blocks =
9265 GIMPLE_PASS, /* type */
9266 "*diagnose_omp_blocks", /* name */
9267 OPTGROUP_OMP, /* optinfo_flags */
9268 TV_NONE, /* tv_id */
9269 PROP_gimple_any, /* properties_required */
9270 0, /* properties_provided */
9271 0, /* properties_destroyed */
9272 0, /* todo_flags_start */
9273 0, /* todo_flags_finish */
9276 class pass_diagnose_omp_blocks : public gimple_opt_pass
9278 public:
9279 pass_diagnose_omp_blocks (gcc::context *ctxt)
9280 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9283 /* opt_pass methods: */
9284 virtual bool gate (function *)
9286 return flag_openacc || flag_openmp || flag_openmp_simd;
9288 virtual unsigned int execute (function *)
9290 return diagnose_omp_structured_block_errors ();
9293 }; // class pass_diagnose_omp_blocks
9295 } // anon namespace
9297 gimple_opt_pass *
9298 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9300 return new pass_diagnose_omp_blocks (ctxt);
9304 #include "gt-omp-low.h"