objc-act.c (synth_module_prologue): Use TREE_NO_WARNING instead of DECL_IN_SYSTEM_HEADER.
[official-gcc.git] / gcc / omp-low.c
blob2f9aedeac39933f5a8b12aa2339fd7b22d32917f
1 /* Lowering pass for OpenMP directives. Converts OpenMP directives
2 into explicit calls to the runtime library (libgomp) and data
3 marshalling to implement data sharing and copying clauses.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 Copyright (C) 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "rtl.h"
30 #include "tree-gimple.h"
31 #include "tree-inline.h"
32 #include "langhooks.h"
33 #include "diagnostic.h"
34 #include "tree-flow.h"
35 #include "timevar.h"
36 #include "flags.h"
37 #include "function.h"
38 #include "expr.h"
39 #include "toplev.h"
40 #include "tree-pass.h"
41 #include "ggc.h"
42 #include "except.h"
43 #include "splay-tree.h"
44 #include "optabs.h"
45 #include "cfgloop.h"
47 /* Lowering of OpenMP parallel and workshare constructs proceeds in two
48 phases. The first phase scans the function looking for OMP statements
49 and then for variables that must be replaced to satisfy data sharing
50 clauses. The second phase expands code for the constructs, as well as
51 re-gimplifying things when variables have been replaced with complex
52 expressions.
54 Final code generation is done by pass_expand_omp. The flowgraph is
55 scanned for parallel regions which are then moved to a new
56 function, to be invoked by the thread library. */
58 /* Context structure. Used to store information about each parallel
59 directive in the code. */
61 typedef struct omp_context
63 /* This field must be at the beginning, as we do "inheritance": Some
64 callback functions for tree-inline.c (e.g., omp_copy_decl)
65 receive a copy_body_data pointer that is up-casted to an
66 omp_context pointer. */
67 copy_body_data cb;
69 /* The tree of contexts corresponding to the encountered constructs. */
70 struct omp_context *outer;
71 tree stmt;
73 /* Map variables to fields in a structure that allows communication
74 between sending and receiving threads. */
75 splay_tree field_map;
76 tree record_type;
77 tree sender_decl;
78 tree receiver_decl;
80 /* These are used just by task contexts, if task firstprivate fn is
81 needed. srecord_type is used to communicate from the thread
82 that encountered the task construct to task firstprivate fn,
83 record_type is allocated by GOMP_task, initialized by task firstprivate
84 fn and passed to the task body fn. */
85 splay_tree sfield_map;
86 tree srecord_type;
88 /* A chain of variables to add to the top-level block surrounding the
89 construct. In the case of a parallel, this is in the child function. */
90 tree block_vars;
92 /* What to do with variables with implicitly determined sharing
93 attributes. */
94 enum omp_clause_default_kind default_kind;
96 /* Nesting depth of this context. Used to beautify error messages re
97 invalid gotos. The outermost ctx is depth 1, with depth 0 being
98 reserved for the main body of the function. */
99 int depth;
101 /* True if this parallel directive is nested within another. */
102 bool is_nested;
103 } omp_context;
106 struct omp_for_data_loop
108 tree v, n1, n2, step;
109 enum tree_code cond_code;
112 /* A structure describing the main elements of a parallel loop. */
114 struct omp_for_data
116 struct omp_for_data_loop loop;
117 tree chunk_size, for_stmt;
118 tree pre, iter_type;
119 int collapse;
120 bool have_nowait, have_ordered;
121 enum omp_clause_schedule_kind sched_kind;
122 struct omp_for_data_loop *loops;
126 static splay_tree all_contexts;
127 static int taskreg_nesting_level;
128 struct omp_region *root_omp_region;
129 static bitmap task_shared_vars;
131 static void scan_omp (tree *, omp_context *);
132 static void lower_omp (tree *, omp_context *);
133 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
134 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
136 /* Find an OpenMP clause of type KIND within CLAUSES. */
138 tree
139 find_omp_clause (tree clauses, enum tree_code kind)
141 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
142 if (OMP_CLAUSE_CODE (clauses) == kind)
143 return clauses;
145 return NULL_TREE;
148 /* Return true if CTX is for an omp parallel. */
150 static inline bool
151 is_parallel_ctx (omp_context *ctx)
153 return TREE_CODE (ctx->stmt) == OMP_PARALLEL;
157 /* Return true if CTX is for an omp task. */
159 static inline bool
160 is_task_ctx (omp_context *ctx)
162 return TREE_CODE (ctx->stmt) == OMP_TASK;
166 /* Return true if CTX is for an omp parallel or omp task. */
168 static inline bool
169 is_taskreg_ctx (omp_context *ctx)
171 return TREE_CODE (ctx->stmt) == OMP_PARALLEL
172 || TREE_CODE (ctx->stmt) == OMP_TASK;
176 /* Return true if REGION is a combined parallel+workshare region. */
178 static inline bool
179 is_combined_parallel (struct omp_region *region)
181 return region->is_combined_parallel;
185 /* Extract the header elements of parallel loop FOR_STMT and store
186 them into *FD. */
188 static void
189 extract_omp_for_data (tree for_stmt, struct omp_for_data *fd,
190 struct omp_for_data_loop *loops)
192 tree t, var, *collapse_iter, *collapse_count;
193 tree count = NULL_TREE, iter_type = long_integer_type_node;
194 struct omp_for_data_loop *loop;
195 int i;
196 struct omp_for_data_loop dummy_loop;
198 fd->for_stmt = for_stmt;
199 fd->pre = NULL;
200 fd->collapse = TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt));
201 if (fd->collapse > 1)
202 fd->loops = loops;
203 else
204 fd->loops = &fd->loop;
206 fd->have_nowait = fd->have_ordered = false;
207 fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
208 fd->chunk_size = NULL_TREE;
209 collapse_iter = NULL;
210 collapse_count = NULL;
212 for (t = OMP_FOR_CLAUSES (for_stmt); t ; t = OMP_CLAUSE_CHAIN (t))
213 switch (OMP_CLAUSE_CODE (t))
215 case OMP_CLAUSE_NOWAIT:
216 fd->have_nowait = true;
217 break;
218 case OMP_CLAUSE_ORDERED:
219 fd->have_ordered = true;
220 break;
221 case OMP_CLAUSE_SCHEDULE:
222 fd->sched_kind = OMP_CLAUSE_SCHEDULE_KIND (t);
223 fd->chunk_size = OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (t);
224 break;
225 case OMP_CLAUSE_COLLAPSE:
226 if (fd->collapse > 1)
228 collapse_iter = &OMP_CLAUSE_COLLAPSE_ITERVAR (t);
229 collapse_count = &OMP_CLAUSE_COLLAPSE_COUNT (t);
231 default:
232 break;
235 /* FIXME: for now map schedule(auto) to schedule(static).
236 There should be analysis to determine whether all iterations
237 are approximately the same amount of work (then schedule(static)
238 is best) or if it varies (then schedule(dynamic,N) is better). */
239 if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_AUTO)
241 fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
242 gcc_assert (fd->chunk_size == NULL);
244 gcc_assert (fd->collapse == 1 || collapse_iter != NULL);
245 if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
246 gcc_assert (fd->chunk_size == NULL);
247 else if (fd->chunk_size == NULL)
249 /* We only need to compute a default chunk size for ordered
250 static loops and dynamic loops. */
251 if (fd->sched_kind != OMP_CLAUSE_SCHEDULE_STATIC
252 || fd->have_ordered
253 || fd->collapse > 1)
254 fd->chunk_size = (fd->sched_kind == OMP_CLAUSE_SCHEDULE_STATIC)
255 ? integer_zero_node : integer_one_node;
258 for (i = 0; i < fd->collapse; i++)
260 if (fd->collapse == 1)
261 loop = &fd->loop;
262 else if (loops != NULL)
263 loop = loops + i;
264 else
265 loop = &dummy_loop;
267 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
268 gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
269 loop->v = GIMPLE_STMT_OPERAND (t, 0);
270 gcc_assert (SSA_VAR_P (loop->v));
271 gcc_assert (TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
272 || TREE_CODE (TREE_TYPE (loop->v)) == POINTER_TYPE);
273 var = TREE_CODE (loop->v) == SSA_NAME ? SSA_NAME_VAR (loop->v) : loop->v;
274 loop->n1 = GIMPLE_STMT_OPERAND (t, 1);
276 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
277 loop->cond_code = TREE_CODE (t);
278 gcc_assert (TREE_OPERAND (t, 0) == var);
279 loop->n2 = TREE_OPERAND (t, 1);
280 switch (loop->cond_code)
282 case LT_EXPR:
283 case GT_EXPR:
284 break;
285 case LE_EXPR:
286 if (POINTER_TYPE_P (TREE_TYPE (loop->n2)))
287 loop->n2 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (loop->n2),
288 loop->n2, size_one_node);
289 else
290 loop->n2 = fold_build2 (PLUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
291 build_int_cst (TREE_TYPE (loop->n2), 1));
292 loop->cond_code = LT_EXPR;
293 break;
294 case GE_EXPR:
295 if (POINTER_TYPE_P (TREE_TYPE (loop->n2)))
296 loop->n2 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (loop->n2),
297 loop->n2, size_int (-1));
298 else
299 loop->n2 = fold_build2 (MINUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
300 build_int_cst (TREE_TYPE (loop->n2), 1));
301 loop->cond_code = GT_EXPR;
302 break;
303 default:
304 gcc_unreachable ();
307 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
308 gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
309 gcc_assert (GIMPLE_STMT_OPERAND (t, 0) == var);
310 t = GIMPLE_STMT_OPERAND (t, 1);
311 gcc_assert (TREE_OPERAND (t, 0) == var);
312 switch (TREE_CODE (t))
314 case PLUS_EXPR:
315 case POINTER_PLUS_EXPR:
316 loop->step = TREE_OPERAND (t, 1);
317 break;
318 case MINUS_EXPR:
319 loop->step = TREE_OPERAND (t, 1);
320 loop->step = fold_build1 (NEGATE_EXPR, TREE_TYPE (loop->step),
321 loop->step);
322 break;
323 default:
324 gcc_unreachable ();
327 if (iter_type != long_long_unsigned_type_node)
329 if (POINTER_TYPE_P (TREE_TYPE (loop->v)))
330 iter_type = long_long_unsigned_type_node;
331 else if (TYPE_UNSIGNED (TREE_TYPE (loop->v))
332 && TYPE_PRECISION (TREE_TYPE (loop->v))
333 >= TYPE_PRECISION (iter_type))
335 tree n;
337 if (loop->cond_code == LT_EXPR)
338 n = fold_build2 (PLUS_EXPR, TREE_TYPE (loop->v),
339 loop->n2, loop->step);
340 else
341 n = loop->n1;
342 if (TREE_CODE (n) != INTEGER_CST
343 || tree_int_cst_lt (TYPE_MAX_VALUE (iter_type), n))
344 iter_type = long_long_unsigned_type_node;
346 else if (TYPE_PRECISION (TREE_TYPE (loop->v))
347 > TYPE_PRECISION (iter_type))
349 tree n1, n2;
351 if (loop->cond_code == LT_EXPR)
353 n1 = loop->n1;
354 n2 = fold_build2 (PLUS_EXPR, TREE_TYPE (loop->v),
355 loop->n2, loop->step);
357 else
359 n1 = fold_build2 (MINUS_EXPR, TREE_TYPE (loop->v),
360 loop->n2, loop->step);
361 n2 = loop->n1;
363 if (TREE_CODE (n1) != INTEGER_CST
364 || TREE_CODE (n2) != INTEGER_CST
365 || !tree_int_cst_lt (TYPE_MIN_VALUE (iter_type), n1)
366 || !tree_int_cst_lt (n2, TYPE_MAX_VALUE (iter_type)))
367 iter_type = long_long_unsigned_type_node;
371 if (collapse_count && *collapse_count == NULL)
373 if ((i == 0 || count != NULL_TREE)
374 && TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
375 && TREE_CONSTANT (loop->n1)
376 && TREE_CONSTANT (loop->n2)
377 && TREE_CODE (loop->step) == INTEGER_CST)
379 tree itype = TREE_TYPE (loop->v);
381 if (POINTER_TYPE_P (itype))
382 itype
383 = lang_hooks.types.type_for_size (TYPE_PRECISION (itype), 0);
384 t = build_int_cst (itype, (loop->cond_code == LT_EXPR ? -1 : 1));
385 t = fold_build2 (PLUS_EXPR, itype,
386 fold_convert (itype, loop->step), t);
387 t = fold_build2 (PLUS_EXPR, itype, t,
388 fold_convert (itype, loop->n2));
389 t = fold_build2 (MINUS_EXPR, itype, t,
390 fold_convert (itype, loop->n1));
391 if (TYPE_UNSIGNED (itype) && loop->cond_code == GT_EXPR)
392 t = fold_build2 (TRUNC_DIV_EXPR, itype,
393 fold_build1 (NEGATE_EXPR, itype, t),
394 fold_build1 (NEGATE_EXPR, itype,
395 fold_convert (itype,
396 loop->step)));
397 else
398 t = fold_build2 (TRUNC_DIV_EXPR, itype, t,
399 fold_convert (itype, loop->step));
400 t = fold_convert (long_long_unsigned_type_node, t);
401 if (count != NULL_TREE)
402 count = fold_build2 (MULT_EXPR, long_long_unsigned_type_node,
403 count, t);
404 else
405 count = t;
406 if (TREE_CODE (count) != INTEGER_CST)
407 count = NULL_TREE;
409 else
410 count = NULL_TREE;
414 if (count)
416 if (!tree_int_cst_lt (count, TYPE_MAX_VALUE (long_integer_type_node)))
417 iter_type = long_long_unsigned_type_node;
418 else
419 iter_type = long_integer_type_node;
421 else if (collapse_iter && *collapse_iter != NULL)
422 iter_type = TREE_TYPE (*collapse_iter);
423 fd->iter_type = iter_type;
424 if (collapse_iter && *collapse_iter == NULL)
425 *collapse_iter = create_tmp_var (iter_type, ".iter");
426 if (collapse_count && *collapse_count == NULL)
428 if (count)
429 *collapse_count = fold_convert (iter_type, count);
430 else
431 *collapse_count = create_tmp_var (iter_type, ".count");
434 if (fd->collapse > 1)
436 fd->loop.v = *collapse_iter;
437 fd->loop.n1 = build_int_cst (TREE_TYPE (fd->loop.v), 0);
438 fd->loop.n2 = *collapse_count;
439 fd->loop.step = build_int_cst (TREE_TYPE (fd->loop.v), 1);
440 fd->loop.cond_code = LT_EXPR;
445 /* Given two blocks PAR_ENTRY_BB and WS_ENTRY_BB such that WS_ENTRY_BB
446 is the immediate dominator of PAR_ENTRY_BB, return true if there
447 are no data dependencies that would prevent expanding the parallel
448 directive at PAR_ENTRY_BB as a combined parallel+workshare region.
450 When expanding a combined parallel+workshare region, the call to
451 the child function may need additional arguments in the case of
452 OMP_FOR regions. In some cases, these arguments are computed out
453 of variables passed in from the parent to the child via 'struct
454 .omp_data_s'. For instance:
456 #pragma omp parallel for schedule (guided, i * 4)
457 for (j ...)
459 Is lowered into:
461 # BLOCK 2 (PAR_ENTRY_BB)
462 .omp_data_o.i = i;
463 #pragma omp parallel [child fn: bar.omp_fn.0 ( ..., D.1598)
465 # BLOCK 3 (WS_ENTRY_BB)
466 .omp_data_i = &.omp_data_o;
467 D.1667 = .omp_data_i->i;
468 D.1598 = D.1667 * 4;
469 #pragma omp for schedule (guided, D.1598)
471 When we outline the parallel region, the call to the child function
472 'bar.omp_fn.0' will need the value D.1598 in its argument list, but
473 that value is computed *after* the call site. So, in principle we
474 cannot do the transformation.
476 To see whether the code in WS_ENTRY_BB blocks the combined
477 parallel+workshare call, we collect all the variables used in the
478 OMP_FOR header check whether they appear on the LHS of any
479 statement in WS_ENTRY_BB. If so, then we cannot emit the combined
480 call.
482 FIXME. If we had the SSA form built at this point, we could merely
483 hoist the code in block 3 into block 2 and be done with it. But at
484 this point we don't have dataflow information and though we could
485 hack something up here, it is really not worth the aggravation. */
487 static bool
488 workshare_safe_to_combine_p (basic_block par_entry_bb, basic_block ws_entry_bb)
490 struct omp_for_data fd;
491 tree par_stmt, ws_stmt;
493 par_stmt = last_stmt (par_entry_bb);
494 ws_stmt = last_stmt (ws_entry_bb);
496 if (TREE_CODE (ws_stmt) == OMP_SECTIONS)
497 return true;
499 gcc_assert (TREE_CODE (ws_stmt) == OMP_FOR);
501 extract_omp_for_data (ws_stmt, &fd, NULL);
503 if (fd.collapse > 1 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
504 return false;
505 if (fd.iter_type != long_integer_type_node)
506 return false;
508 /* FIXME. We give up too easily here. If any of these arguments
509 are not constants, they will likely involve variables that have
510 been mapped into fields of .omp_data_s for sharing with the child
511 function. With appropriate data flow, it would be possible to
512 see through this. */
513 if (!is_gimple_min_invariant (fd.loop.n1)
514 || !is_gimple_min_invariant (fd.loop.n2)
515 || !is_gimple_min_invariant (fd.loop.step)
516 || (fd.chunk_size && !is_gimple_min_invariant (fd.chunk_size)))
517 return false;
519 return true;
523 /* Collect additional arguments needed to emit a combined
524 parallel+workshare call. WS_STMT is the workshare directive being
525 expanded. */
527 static tree
528 get_ws_args_for (tree ws_stmt)
530 tree t;
532 if (TREE_CODE (ws_stmt) == OMP_FOR)
534 struct omp_for_data fd;
535 tree ws_args;
537 extract_omp_for_data (ws_stmt, &fd, NULL);
539 ws_args = NULL_TREE;
540 if (fd.chunk_size)
542 t = fold_convert (long_integer_type_node, fd.chunk_size);
543 ws_args = tree_cons (NULL, t, ws_args);
546 t = fold_convert (long_integer_type_node, fd.loop.step);
547 ws_args = tree_cons (NULL, t, ws_args);
549 t = fold_convert (long_integer_type_node, fd.loop.n2);
550 ws_args = tree_cons (NULL, t, ws_args);
552 t = fold_convert (long_integer_type_node, fd.loop.n1);
553 ws_args = tree_cons (NULL, t, ws_args);
555 return ws_args;
557 else if (TREE_CODE (ws_stmt) == OMP_SECTIONS)
559 /* Number of sections is equal to the number of edges from the
560 OMP_SECTIONS_SWITCH statement, except for the one to the exit
561 of the sections region. */
562 basic_block bb = single_succ (bb_for_stmt (ws_stmt));
563 t = build_int_cst (unsigned_type_node, EDGE_COUNT (bb->succs) - 1);
564 t = tree_cons (NULL, t, NULL);
565 return t;
568 gcc_unreachable ();
572 /* Discover whether REGION is a combined parallel+workshare region. */
574 static void
575 determine_parallel_type (struct omp_region *region)
577 basic_block par_entry_bb, par_exit_bb;
578 basic_block ws_entry_bb, ws_exit_bb;
580 if (region == NULL || region->inner == NULL
581 || region->exit == NULL || region->inner->exit == NULL
582 || region->inner->cont == NULL)
583 return;
585 /* We only support parallel+for and parallel+sections. */
586 if (region->type != OMP_PARALLEL
587 || (region->inner->type != OMP_FOR
588 && region->inner->type != OMP_SECTIONS))
589 return;
591 /* Check for perfect nesting PAR_ENTRY_BB -> WS_ENTRY_BB and
592 WS_EXIT_BB -> PAR_EXIT_BB. */
593 par_entry_bb = region->entry;
594 par_exit_bb = region->exit;
595 ws_entry_bb = region->inner->entry;
596 ws_exit_bb = region->inner->exit;
598 if (single_succ (par_entry_bb) == ws_entry_bb
599 && single_succ (ws_exit_bb) == par_exit_bb
600 && workshare_safe_to_combine_p (par_entry_bb, ws_entry_bb)
601 && (OMP_PARALLEL_COMBINED (last_stmt (par_entry_bb))
602 || (last_and_only_stmt (ws_entry_bb)
603 && last_and_only_stmt (par_exit_bb))))
605 tree ws_stmt = last_stmt (ws_entry_bb);
607 if (region->inner->type == OMP_FOR)
609 /* If this is a combined parallel loop, we need to determine
610 whether or not to use the combined library calls. There
611 are two cases where we do not apply the transformation:
612 static loops and any kind of ordered loop. In the first
613 case, we already open code the loop so there is no need
614 to do anything else. In the latter case, the combined
615 parallel loop call would still need extra synchronization
616 to implement ordered semantics, so there would not be any
617 gain in using the combined call. */
618 tree clauses = OMP_FOR_CLAUSES (ws_stmt);
619 tree c = find_omp_clause (clauses, OMP_CLAUSE_SCHEDULE);
620 if (c == NULL
621 || OMP_CLAUSE_SCHEDULE_KIND (c) == OMP_CLAUSE_SCHEDULE_STATIC
622 || find_omp_clause (clauses, OMP_CLAUSE_ORDERED))
624 region->is_combined_parallel = false;
625 region->inner->is_combined_parallel = false;
626 return;
630 region->is_combined_parallel = true;
631 region->inner->is_combined_parallel = true;
632 region->ws_args = get_ws_args_for (ws_stmt);
637 /* Return true if EXPR is variable sized. */
639 static inline bool
640 is_variable_sized (const_tree expr)
642 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
645 /* Return true if DECL is a reference type. */
647 static inline bool
648 is_reference (tree decl)
650 return lang_hooks.decls.omp_privatize_by_reference (decl);
653 /* Lookup variables in the decl or field splay trees. The "maybe" form
654 allows for the variable form to not have been entered, otherwise we
655 assert that the variable must have been entered. */
657 static inline tree
658 lookup_decl (tree var, omp_context *ctx)
660 tree *n;
661 n = (tree *) pointer_map_contains (ctx->cb.decl_map, var);
662 return *n;
665 static inline tree
666 maybe_lookup_decl (const_tree var, omp_context *ctx)
668 tree *n;
669 n = (tree *) pointer_map_contains (ctx->cb.decl_map, var);
670 return n ? *n : NULL_TREE;
673 static inline tree
674 lookup_field (tree var, omp_context *ctx)
676 splay_tree_node n;
677 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
678 return (tree) n->value;
681 static inline tree
682 lookup_sfield (tree var, omp_context *ctx)
684 splay_tree_node n;
685 n = splay_tree_lookup (ctx->sfield_map
686 ? ctx->sfield_map : ctx->field_map,
687 (splay_tree_key) var);
688 return (tree) n->value;
691 static inline tree
692 maybe_lookup_field (tree var, omp_context *ctx)
694 splay_tree_node n;
695 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
696 return n ? (tree) n->value : NULL_TREE;
699 /* Return true if DECL should be copied by pointer. SHARED_CTX is
700 the parallel context if DECL is to be shared. */
702 static bool
703 use_pointer_for_field (tree decl, omp_context *shared_ctx)
705 if (AGGREGATE_TYPE_P (TREE_TYPE (decl)))
706 return true;
708 /* We can only use copy-in/copy-out semantics for shared variables
709 when we know the value is not accessible from an outer scope. */
710 if (shared_ctx)
712 /* ??? Trivially accessible from anywhere. But why would we even
713 be passing an address in this case? Should we simply assert
714 this to be false, or should we have a cleanup pass that removes
715 these from the list of mappings? */
716 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
717 return true;
719 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
720 without analyzing the expression whether or not its location
721 is accessible to anyone else. In the case of nested parallel
722 regions it certainly may be. */
723 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
724 return true;
726 /* Do not use copy-in/copy-out for variables that have their
727 address taken. */
728 if (TREE_ADDRESSABLE (decl))
729 return true;
731 /* Disallow copy-in/out in nested parallel if
732 decl is shared in outer parallel, otherwise
733 each thread could store the shared variable
734 in its own copy-in location, making the
735 variable no longer really shared. */
736 if (!TREE_READONLY (decl) && shared_ctx->is_nested)
738 omp_context *up;
740 for (up = shared_ctx->outer; up; up = up->outer)
741 if (maybe_lookup_decl (decl, up))
742 break;
744 if (up && is_taskreg_ctx (up))
746 tree c;
748 for (c = OMP_TASKREG_CLAUSES (up->stmt);
749 c; c = OMP_CLAUSE_CHAIN (c))
750 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
751 && OMP_CLAUSE_DECL (c) == decl)
752 break;
754 if (c)
755 return true;
759 /* For tasks avoid using copy-in/out, unless they are readonly
760 (in which case just copy-in is used). As tasks can be
761 deferred or executed in different thread, when GOMP_task
762 returns, the task hasn't necessarily terminated. */
763 if (!TREE_READONLY (decl) && is_task_ctx (shared_ctx))
765 tree outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
766 if (is_gimple_reg (outer))
768 /* Taking address of OUTER in lower_send_shared_vars
769 might need regimplification of everything that uses the
770 variable. */
771 if (!task_shared_vars)
772 task_shared_vars = BITMAP_ALLOC (NULL);
773 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
774 TREE_ADDRESSABLE (outer) = 1;
776 return true;
780 return false;
783 /* Create a new VAR_DECL and copy information from VAR to it. */
785 tree
786 copy_var_decl (tree var, tree name, tree type)
788 tree copy = build_decl (VAR_DECL, name, type);
790 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (var);
791 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (var);
792 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (var);
793 DECL_NO_TBAA_P (copy) = DECL_NO_TBAA_P (var);
794 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (var);
795 DECL_IGNORED_P (copy) = DECL_IGNORED_P (var);
796 DECL_CONTEXT (copy) = DECL_CONTEXT (var);
797 DECL_SOURCE_LOCATION (copy) = DECL_SOURCE_LOCATION (var);
798 TREE_USED (copy) = 1;
799 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
801 return copy;
804 /* Construct a new automatic decl similar to VAR. */
806 static tree
807 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
809 tree copy = copy_var_decl (var, name, type);
811 DECL_CONTEXT (copy) = current_function_decl;
812 TREE_CHAIN (copy) = ctx->block_vars;
813 ctx->block_vars = copy;
815 return copy;
818 static tree
819 omp_copy_decl_1 (tree var, omp_context *ctx)
821 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
824 /* Build tree nodes to access the field for VAR on the receiver side. */
826 static tree
827 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
829 tree x, field = lookup_field (var, ctx);
831 /* If the receiver record type was remapped in the child function,
832 remap the field into the new record type. */
833 x = maybe_lookup_field (field, ctx);
834 if (x != NULL)
835 field = x;
837 x = build_fold_indirect_ref (ctx->receiver_decl);
838 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL);
839 if (by_ref)
840 x = build_fold_indirect_ref (x);
842 return x;
845 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
846 of a parallel, this is a component reference; for workshare constructs
847 this is some variable. */
849 static tree
850 build_outer_var_ref (tree var, omp_context *ctx)
852 tree x;
854 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
855 x = var;
856 else if (is_variable_sized (var))
858 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
859 x = build_outer_var_ref (x, ctx);
860 x = build_fold_indirect_ref (x);
862 else if (is_taskreg_ctx (ctx))
864 bool by_ref = use_pointer_for_field (var, NULL);
865 x = build_receiver_ref (var, by_ref, ctx);
867 else if (ctx->outer)
868 x = lookup_decl (var, ctx->outer);
869 else if (is_reference (var))
870 /* This can happen with orphaned constructs. If var is reference, it is
871 possible it is shared and as such valid. */
872 x = var;
873 else
874 gcc_unreachable ();
876 if (is_reference (var))
877 x = build_fold_indirect_ref (x);
879 return x;
882 /* Build tree nodes to access the field for VAR on the sender side. */
884 static tree
885 build_sender_ref (tree var, omp_context *ctx)
887 tree field = lookup_sfield (var, ctx);
888 return build3 (COMPONENT_REF, TREE_TYPE (field),
889 ctx->sender_decl, field, NULL);
892 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. */
894 static void
895 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
897 tree field, type, sfield = NULL_TREE;
899 gcc_assert ((mask & 1) == 0
900 || !splay_tree_lookup (ctx->field_map, (splay_tree_key) var));
901 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
902 || !splay_tree_lookup (ctx->sfield_map, (splay_tree_key) var));
904 type = TREE_TYPE (var);
905 if (by_ref)
906 type = build_pointer_type (type);
907 else if ((mask & 3) == 1 && is_reference (var))
908 type = TREE_TYPE (type);
910 field = build_decl (FIELD_DECL, DECL_NAME (var), type);
912 /* Remember what variable this field was created for. This does have a
913 side effect of making dwarf2out ignore this member, so for helpful
914 debugging we clear it later in delete_omp_context. */
915 DECL_ABSTRACT_ORIGIN (field) = var;
916 if (type == TREE_TYPE (var))
918 DECL_ALIGN (field) = DECL_ALIGN (var);
919 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
920 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
922 else
923 DECL_ALIGN (field) = TYPE_ALIGN (type);
925 if ((mask & 3) == 3)
927 insert_field_into_struct (ctx->record_type, field);
928 if (ctx->srecord_type)
930 sfield = build_decl (FIELD_DECL, DECL_NAME (var), type);
931 DECL_ABSTRACT_ORIGIN (sfield) = var;
932 DECL_ALIGN (sfield) = DECL_ALIGN (field);
933 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
934 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
935 insert_field_into_struct (ctx->srecord_type, sfield);
938 else
940 if (ctx->srecord_type == NULL_TREE)
942 tree t;
944 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
945 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
946 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
948 sfield = build_decl (FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
949 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
950 insert_field_into_struct (ctx->srecord_type, sfield);
951 splay_tree_insert (ctx->sfield_map,
952 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
953 (splay_tree_value) sfield);
956 sfield = field;
957 insert_field_into_struct ((mask & 1) ? ctx->record_type
958 : ctx->srecord_type, field);
961 if (mask & 1)
962 splay_tree_insert (ctx->field_map, (splay_tree_key) var,
963 (splay_tree_value) field);
964 if ((mask & 2) && ctx->sfield_map)
965 splay_tree_insert (ctx->sfield_map, (splay_tree_key) var,
966 (splay_tree_value) sfield);
969 static tree
970 install_var_local (tree var, omp_context *ctx)
972 tree new_var = omp_copy_decl_1 (var, ctx);
973 insert_decl_map (&ctx->cb, var, new_var);
974 return new_var;
977 /* Adjust the replacement for DECL in CTX for the new context. This means
978 copying the DECL_VALUE_EXPR, and fixing up the type. */
980 static void
981 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
983 tree new_decl, size;
985 new_decl = lookup_decl (decl, ctx);
987 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
989 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
990 && DECL_HAS_VALUE_EXPR_P (decl))
992 tree ve = DECL_VALUE_EXPR (decl);
993 walk_tree (&ve, copy_body_r, &ctx->cb, NULL);
994 SET_DECL_VALUE_EXPR (new_decl, ve);
995 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
998 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
1000 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
1001 if (size == error_mark_node)
1002 size = TYPE_SIZE (TREE_TYPE (new_decl));
1003 DECL_SIZE (new_decl) = size;
1005 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
1006 if (size == error_mark_node)
1007 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
1008 DECL_SIZE_UNIT (new_decl) = size;
1012 /* The callback for remap_decl. Search all containing contexts for a
1013 mapping of the variable; this avoids having to duplicate the splay
1014 tree ahead of time. We know a mapping doesn't already exist in the
1015 given context. Create new mappings to implement default semantics. */
1017 static tree
1018 omp_copy_decl (tree var, copy_body_data *cb)
1020 omp_context *ctx = (omp_context *) cb;
1021 tree new_var;
1023 if (TREE_CODE (var) == LABEL_DECL)
1025 new_var = create_artificial_label ();
1026 DECL_CONTEXT (new_var) = current_function_decl;
1027 insert_decl_map (&ctx->cb, var, new_var);
1028 return new_var;
1031 while (!is_taskreg_ctx (ctx))
1033 ctx = ctx->outer;
1034 if (ctx == NULL)
1035 return var;
1036 new_var = maybe_lookup_decl (var, ctx);
1037 if (new_var)
1038 return new_var;
1041 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
1042 return var;
1044 return error_mark_node;
1048 /* Return the parallel region associated with STMT. */
1050 /* Debugging dumps for parallel regions. */
1051 void dump_omp_region (FILE *, struct omp_region *, int);
1052 void debug_omp_region (struct omp_region *);
1053 void debug_all_omp_regions (void);
1055 /* Dump the parallel region tree rooted at REGION. */
1057 void
1058 dump_omp_region (FILE *file, struct omp_region *region, int indent)
1060 fprintf (file, "%*sbb %d: %s\n", indent, "", region->entry->index,
1061 tree_code_name[region->type]);
1063 if (region->inner)
1064 dump_omp_region (file, region->inner, indent + 4);
1066 if (region->cont)
1068 fprintf (file, "%*sbb %d: OMP_CONTINUE\n", indent, "",
1069 region->cont->index);
1072 if (region->exit)
1073 fprintf (file, "%*sbb %d: OMP_RETURN\n", indent, "",
1074 region->exit->index);
1075 else
1076 fprintf (file, "%*s[no exit marker]\n", indent, "");
1078 if (region->next)
1079 dump_omp_region (file, region->next, indent);
1082 void
1083 debug_omp_region (struct omp_region *region)
1085 dump_omp_region (stderr, region, 0);
1088 void
1089 debug_all_omp_regions (void)
1091 dump_omp_region (stderr, root_omp_region, 0);
1095 /* Create a new parallel region starting at STMT inside region PARENT. */
1097 struct omp_region *
1098 new_omp_region (basic_block bb, enum tree_code type, struct omp_region *parent)
1100 struct omp_region *region = XCNEW (struct omp_region);
1102 region->outer = parent;
1103 region->entry = bb;
1104 region->type = type;
1106 if (parent)
1108 /* This is a nested region. Add it to the list of inner
1109 regions in PARENT. */
1110 region->next = parent->inner;
1111 parent->inner = region;
1113 else
1115 /* This is a toplevel region. Add it to the list of toplevel
1116 regions in ROOT_OMP_REGION. */
1117 region->next = root_omp_region;
1118 root_omp_region = region;
1121 return region;
1124 /* Release the memory associated with the region tree rooted at REGION. */
1126 static void
1127 free_omp_region_1 (struct omp_region *region)
1129 struct omp_region *i, *n;
1131 for (i = region->inner; i ; i = n)
1133 n = i->next;
1134 free_omp_region_1 (i);
1137 free (region);
1140 /* Release the memory for the entire omp region tree. */
1142 void
1143 free_omp_regions (void)
1145 struct omp_region *r, *n;
1146 for (r = root_omp_region; r ; r = n)
1148 n = r->next;
1149 free_omp_region_1 (r);
1151 root_omp_region = NULL;
1155 /* Create a new context, with OUTER_CTX being the surrounding context. */
1157 static omp_context *
1158 new_omp_context (tree stmt, omp_context *outer_ctx)
1160 omp_context *ctx = XCNEW (omp_context);
1162 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
1163 (splay_tree_value) ctx);
1164 ctx->stmt = stmt;
1166 if (outer_ctx)
1168 ctx->outer = outer_ctx;
1169 ctx->cb = outer_ctx->cb;
1170 ctx->cb.block = NULL;
1171 ctx->depth = outer_ctx->depth + 1;
1173 else
1175 ctx->cb.src_fn = current_function_decl;
1176 ctx->cb.dst_fn = current_function_decl;
1177 ctx->cb.src_node = cgraph_node (current_function_decl);
1178 ctx->cb.dst_node = ctx->cb.src_node;
1179 ctx->cb.src_cfun = cfun;
1180 ctx->cb.copy_decl = omp_copy_decl;
1181 ctx->cb.eh_region = -1;
1182 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
1183 ctx->depth = 1;
1186 ctx->cb.decl_map = pointer_map_create ();
1188 return ctx;
1191 static void maybe_catch_exception (tree *stmt_p);
1193 /* Finalize task copyfn. */
1195 static void
1196 finalize_task_copyfn (tree task_stmt)
1198 struct function *child_cfun;
1199 tree child_fn, old_fn;
1201 child_fn = OMP_TASK_COPYFN (task_stmt);
1202 if (child_fn == NULL_TREE)
1203 return;
1205 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1207 /* Inform the callgraph about the new function. */
1208 DECL_STRUCT_FUNCTION (child_fn)->curr_properties
1209 = cfun->curr_properties;
1211 old_fn = current_function_decl;
1212 push_cfun (child_cfun);
1213 current_function_decl = child_fn;
1214 gimplify_body (&DECL_SAVED_TREE (child_fn), child_fn, false);
1215 maybe_catch_exception (&BIND_EXPR_BODY (DECL_SAVED_TREE (child_fn)));
1216 pop_cfun ();
1217 current_function_decl = old_fn;
1219 cgraph_add_new_function (child_fn, false);
1222 /* Destroy a omp_context data structures. Called through the splay tree
1223 value delete callback. */
1225 static void
1226 delete_omp_context (splay_tree_value value)
1228 omp_context *ctx = (omp_context *) value;
1230 pointer_map_destroy (ctx->cb.decl_map);
1232 if (ctx->field_map)
1233 splay_tree_delete (ctx->field_map);
1234 if (ctx->sfield_map)
1235 splay_tree_delete (ctx->sfield_map);
1237 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1238 it produces corrupt debug information. */
1239 if (ctx->record_type)
1241 tree t;
1242 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
1243 DECL_ABSTRACT_ORIGIN (t) = NULL;
1245 if (ctx->srecord_type)
1247 tree t;
1248 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = TREE_CHAIN (t))
1249 DECL_ABSTRACT_ORIGIN (t) = NULL;
1252 if (is_task_ctx (ctx))
1253 finalize_task_copyfn (ctx->stmt);
1255 XDELETE (ctx);
1258 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1259 context. */
1261 static void
1262 fixup_child_record_type (omp_context *ctx)
1264 tree f, type = ctx->record_type;
1266 /* ??? It isn't sufficient to just call remap_type here, because
1267 variably_modified_type_p doesn't work the way we expect for
1268 record types. Testing each field for whether it needs remapping
1269 and creating a new record by hand works, however. */
1270 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
1271 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1272 break;
1273 if (f)
1275 tree name, new_fields = NULL;
1277 type = lang_hooks.types.make_type (RECORD_TYPE);
1278 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1279 name = build_decl (TYPE_DECL, name, type);
1280 TYPE_NAME (type) = name;
1282 for (f = TYPE_FIELDS (ctx->record_type); f ; f = TREE_CHAIN (f))
1284 tree new_f = copy_node (f);
1285 DECL_CONTEXT (new_f) = type;
1286 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1287 TREE_CHAIN (new_f) = new_fields;
1288 walk_tree (&DECL_SIZE (new_f), copy_body_r, &ctx->cb, NULL);
1289 walk_tree (&DECL_SIZE_UNIT (new_f), copy_body_r, &ctx->cb, NULL);
1290 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_body_r, &ctx->cb, NULL);
1291 new_fields = new_f;
1293 /* Arrange to be able to look up the receiver field
1294 given the sender field. */
1295 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1296 (splay_tree_value) new_f);
1298 TYPE_FIELDS (type) = nreverse (new_fields);
1299 layout_type (type);
1302 TREE_TYPE (ctx->receiver_decl) = build_pointer_type (type);
1305 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1306 specified by CLAUSES. */
1308 static void
1309 scan_sharing_clauses (tree clauses, omp_context *ctx)
1311 tree c, decl;
1312 bool scan_array_reductions = false;
1314 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1316 bool by_ref;
1318 switch (OMP_CLAUSE_CODE (c))
1320 case OMP_CLAUSE_PRIVATE:
1321 decl = OMP_CLAUSE_DECL (c);
1322 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1323 goto do_private;
1324 else if (!is_variable_sized (decl))
1325 install_var_local (decl, ctx);
1326 break;
1328 case OMP_CLAUSE_SHARED:
1329 gcc_assert (is_taskreg_ctx (ctx));
1330 decl = OMP_CLAUSE_DECL (c);
1331 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1332 || !is_variable_sized (decl));
1333 /* Global variables don't need to be copied,
1334 the receiver side will use them directly. */
1335 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1336 break;
1337 by_ref = use_pointer_for_field (decl, ctx);
1338 if (! TREE_READONLY (decl)
1339 || TREE_ADDRESSABLE (decl)
1340 || by_ref
1341 || is_reference (decl))
1343 install_var_field (decl, by_ref, 3, ctx);
1344 install_var_local (decl, ctx);
1345 break;
1347 /* We don't need to copy const scalar vars back. */
1348 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1349 goto do_private;
1351 case OMP_CLAUSE_LASTPRIVATE:
1352 /* Let the corresponding firstprivate clause create
1353 the variable. */
1354 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1355 break;
1356 /* FALLTHRU */
1358 case OMP_CLAUSE_FIRSTPRIVATE:
1359 case OMP_CLAUSE_REDUCTION:
1360 decl = OMP_CLAUSE_DECL (c);
1361 do_private:
1362 if (is_variable_sized (decl))
1364 if (is_task_ctx (ctx))
1365 install_var_field (decl, false, 1, ctx);
1366 break;
1368 else if (is_taskreg_ctx (ctx))
1370 bool global
1371 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1372 by_ref = use_pointer_for_field (decl, NULL);
1374 if (is_task_ctx (ctx)
1375 && (global || by_ref || is_reference (decl)))
1377 install_var_field (decl, false, 1, ctx);
1378 if (!global)
1379 install_var_field (decl, by_ref, 2, ctx);
1381 else if (!global)
1382 install_var_field (decl, by_ref, 3, ctx);
1384 install_var_local (decl, ctx);
1385 break;
1387 case OMP_CLAUSE_COPYPRIVATE:
1388 if (ctx->outer)
1389 scan_omp (&OMP_CLAUSE_DECL (c), ctx->outer);
1390 /* FALLTHRU */
1392 case OMP_CLAUSE_COPYIN:
1393 decl = OMP_CLAUSE_DECL (c);
1394 by_ref = use_pointer_for_field (decl, NULL);
1395 install_var_field (decl, by_ref, 3, ctx);
1396 break;
1398 case OMP_CLAUSE_DEFAULT:
1399 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
1400 break;
1402 case OMP_CLAUSE_IF:
1403 case OMP_CLAUSE_NUM_THREADS:
1404 case OMP_CLAUSE_SCHEDULE:
1405 if (ctx->outer)
1406 scan_omp (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1407 break;
1409 case OMP_CLAUSE_NOWAIT:
1410 case OMP_CLAUSE_ORDERED:
1411 case OMP_CLAUSE_COLLAPSE:
1412 case OMP_CLAUSE_UNTIED:
1413 break;
1415 default:
1416 gcc_unreachable ();
1420 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1422 switch (OMP_CLAUSE_CODE (c))
1424 case OMP_CLAUSE_LASTPRIVATE:
1425 /* Let the corresponding firstprivate clause create
1426 the variable. */
1427 if (OMP_CLAUSE_LASTPRIVATE_STMT (c))
1428 scan_array_reductions = true;
1429 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1430 break;
1431 /* FALLTHRU */
1433 case OMP_CLAUSE_PRIVATE:
1434 case OMP_CLAUSE_FIRSTPRIVATE:
1435 case OMP_CLAUSE_REDUCTION:
1436 decl = OMP_CLAUSE_DECL (c);
1437 if (is_variable_sized (decl))
1438 install_var_local (decl, ctx);
1439 fixup_remapped_decl (decl, ctx,
1440 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1441 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1442 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1443 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1444 scan_array_reductions = true;
1445 break;
1447 case OMP_CLAUSE_SHARED:
1448 decl = OMP_CLAUSE_DECL (c);
1449 if (! is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1450 fixup_remapped_decl (decl, ctx, false);
1451 break;
1453 case OMP_CLAUSE_COPYPRIVATE:
1454 case OMP_CLAUSE_COPYIN:
1455 case OMP_CLAUSE_DEFAULT:
1456 case OMP_CLAUSE_IF:
1457 case OMP_CLAUSE_NUM_THREADS:
1458 case OMP_CLAUSE_SCHEDULE:
1459 case OMP_CLAUSE_NOWAIT:
1460 case OMP_CLAUSE_ORDERED:
1461 case OMP_CLAUSE_COLLAPSE:
1462 case OMP_CLAUSE_UNTIED:
1463 break;
1465 default:
1466 gcc_unreachable ();
1470 if (scan_array_reductions)
1471 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1472 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1473 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1475 scan_omp (&OMP_CLAUSE_REDUCTION_INIT (c), ctx);
1476 scan_omp (&OMP_CLAUSE_REDUCTION_MERGE (c), ctx);
1478 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1479 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
1480 scan_omp (&OMP_CLAUSE_LASTPRIVATE_STMT (c), ctx);
1483 /* Create a new name for omp child function. Returns an identifier. */
1485 static GTY(()) unsigned int tmp_ompfn_id_num;
1487 static tree
1488 create_omp_child_function_name (bool task_copy)
1490 tree name = DECL_ASSEMBLER_NAME (current_function_decl);
1491 size_t len = IDENTIFIER_LENGTH (name);
1492 char *tmp_name, *prefix;
1493 const char *suffix;
1495 suffix = task_copy ? "_omp_cpyfn" : "_omp_fn";
1496 prefix = XALLOCAVEC (char, len + strlen (suffix) + 1);
1497 memcpy (prefix, IDENTIFIER_POINTER (name), len);
1498 strcpy (prefix + len, suffix);
1499 #ifndef NO_DOT_IN_LABEL
1500 prefix[len] = '.';
1501 #elif !defined NO_DOLLAR_IN_LABEL
1502 prefix[len] = '$';
1503 #endif
1504 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, tmp_ompfn_id_num++);
1505 return get_identifier (tmp_name);
1508 /* Build a decl for the omp child function. It'll not contain a body
1509 yet, just the bare decl. */
1511 static void
1512 create_omp_child_function (omp_context *ctx, bool task_copy)
1514 tree decl, type, name, t;
1516 name = create_omp_child_function_name (task_copy);
1517 if (task_copy)
1518 type = build_function_type_list (void_type_node, ptr_type_node,
1519 ptr_type_node, NULL_TREE);
1520 else
1521 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1523 decl = build_decl (FUNCTION_DECL, name, type);
1524 decl = lang_hooks.decls.pushdecl (decl);
1526 if (!task_copy)
1527 ctx->cb.dst_fn = decl;
1528 else
1529 OMP_TASK_COPYFN (ctx->stmt) = decl;
1531 TREE_STATIC (decl) = 1;
1532 TREE_USED (decl) = 1;
1533 DECL_ARTIFICIAL (decl) = 1;
1534 DECL_IGNORED_P (decl) = 0;
1535 TREE_PUBLIC (decl) = 0;
1536 DECL_UNINLINABLE (decl) = 1;
1537 DECL_EXTERNAL (decl) = 0;
1538 DECL_CONTEXT (decl) = NULL_TREE;
1539 DECL_INITIAL (decl) = make_node (BLOCK);
1541 t = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1542 DECL_ARTIFICIAL (t) = 1;
1543 DECL_IGNORED_P (t) = 1;
1544 DECL_RESULT (decl) = t;
1546 t = build_decl (PARM_DECL, get_identifier (".omp_data_i"), ptr_type_node);
1547 DECL_ARTIFICIAL (t) = 1;
1548 DECL_ARG_TYPE (t) = ptr_type_node;
1549 DECL_CONTEXT (t) = current_function_decl;
1550 TREE_USED (t) = 1;
1551 DECL_ARGUMENTS (decl) = t;
1552 if (!task_copy)
1553 ctx->receiver_decl = t;
1554 else
1556 t = build_decl (PARM_DECL, get_identifier (".omp_data_o"),
1557 ptr_type_node);
1558 DECL_ARTIFICIAL (t) = 1;
1559 DECL_ARG_TYPE (t) = ptr_type_node;
1560 DECL_CONTEXT (t) = current_function_decl;
1561 TREE_USED (t) = 1;
1562 TREE_CHAIN (t) = DECL_ARGUMENTS (decl);
1563 DECL_ARGUMENTS (decl) = t;
1566 /* Allocate memory for the function structure. The call to
1567 allocate_struct_function clobbers CFUN, so we need to restore
1568 it afterward. */
1569 push_struct_function (decl);
1570 DECL_SOURCE_LOCATION (decl) = EXPR_LOCATION (ctx->stmt);
1571 cfun->function_end_locus = EXPR_LOCATION (ctx->stmt);
1572 pop_cfun ();
1576 /* Scan an OpenMP parallel directive. */
1578 static void
1579 scan_omp_parallel (tree *stmt_p, omp_context *outer_ctx)
1581 omp_context *ctx;
1582 tree name;
1584 /* Ignore parallel directives with empty bodies, unless there
1585 are copyin clauses. */
1586 if (optimize > 0
1587 && empty_body_p (OMP_PARALLEL_BODY (*stmt_p))
1588 && find_omp_clause (OMP_CLAUSES (*stmt_p), OMP_CLAUSE_COPYIN) == NULL)
1590 *stmt_p = build_empty_stmt ();
1591 return;
1594 ctx = new_omp_context (*stmt_p, outer_ctx);
1595 if (taskreg_nesting_level > 1)
1596 ctx->is_nested = true;
1597 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1598 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1599 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1600 name = create_tmp_var_name (".omp_data_s");
1601 name = build_decl (TYPE_DECL, name, ctx->record_type);
1602 TYPE_NAME (ctx->record_type) = name;
1603 create_omp_child_function (ctx, false);
1604 OMP_PARALLEL_FN (*stmt_p) = ctx->cb.dst_fn;
1606 scan_sharing_clauses (OMP_PARALLEL_CLAUSES (*stmt_p), ctx);
1607 scan_omp (&OMP_PARALLEL_BODY (*stmt_p), ctx);
1609 if (TYPE_FIELDS (ctx->record_type) == NULL)
1610 ctx->record_type = ctx->receiver_decl = NULL;
1611 else
1613 layout_type (ctx->record_type);
1614 fixup_child_record_type (ctx);
1618 /* Scan an OpenMP task directive. */
1620 static void
1621 scan_omp_task (tree *stmt_p, omp_context *outer_ctx)
1623 omp_context *ctx;
1624 tree name;
1626 /* Ignore task directives with empty bodies. */
1627 if (optimize > 0
1628 && empty_body_p (OMP_TASK_BODY (*stmt_p)))
1630 *stmt_p = build_empty_stmt ();
1631 return;
1634 ctx = new_omp_context (*stmt_p, outer_ctx);
1635 if (taskreg_nesting_level > 1)
1636 ctx->is_nested = true;
1637 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1638 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1639 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1640 name = create_tmp_var_name (".omp_data_s");
1641 name = build_decl (TYPE_DECL, name, ctx->record_type);
1642 TYPE_NAME (ctx->record_type) = name;
1643 create_omp_child_function (ctx, false);
1644 OMP_TASK_FN (*stmt_p) = ctx->cb.dst_fn;
1646 scan_sharing_clauses (OMP_TASK_CLAUSES (*stmt_p), ctx);
1648 if (ctx->srecord_type)
1650 name = create_tmp_var_name (".omp_data_a");
1651 name = build_decl (TYPE_DECL, name, ctx->srecord_type);
1652 TYPE_NAME (ctx->srecord_type) = name;
1653 create_omp_child_function (ctx, true);
1656 scan_omp (&OMP_TASK_BODY (*stmt_p), ctx);
1658 if (TYPE_FIELDS (ctx->record_type) == NULL)
1660 ctx->record_type = ctx->receiver_decl = NULL;
1661 OMP_TASK_ARG_SIZE (*stmt_p)
1662 = build_int_cst (long_integer_type_node, 0);
1663 OMP_TASK_ARG_ALIGN (*stmt_p)
1664 = build_int_cst (long_integer_type_node, 1);
1666 else
1668 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1669 /* Move VLA fields to the end. */
1670 p = &TYPE_FIELDS (ctx->record_type);
1671 while (*p)
1672 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1673 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1675 *q = *p;
1676 *p = TREE_CHAIN (*p);
1677 TREE_CHAIN (*q) = NULL_TREE;
1678 q = &TREE_CHAIN (*q);
1680 else
1681 p = &TREE_CHAIN (*p);
1682 *p = vla_fields;
1683 layout_type (ctx->record_type);
1684 fixup_child_record_type (ctx);
1685 if (ctx->srecord_type)
1686 layout_type (ctx->srecord_type);
1687 OMP_TASK_ARG_SIZE (*stmt_p)
1688 = fold_convert (long_integer_type_node,
1689 TYPE_SIZE_UNIT (ctx->record_type));
1690 OMP_TASK_ARG_ALIGN (*stmt_p)
1691 = build_int_cst (long_integer_type_node,
1692 TYPE_ALIGN_UNIT (ctx->record_type));
1697 /* Scan an OpenMP loop directive. */
1699 static void
1700 scan_omp_for (tree *stmt_p, omp_context *outer_ctx)
1702 omp_context *ctx;
1703 tree stmt;
1704 int i;
1706 stmt = *stmt_p;
1707 ctx = new_omp_context (stmt, outer_ctx);
1709 scan_sharing_clauses (OMP_FOR_CLAUSES (stmt), ctx);
1711 scan_omp (&OMP_FOR_PRE_BODY (stmt), ctx);
1712 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (stmt)); i++)
1714 scan_omp (&TREE_VEC_ELT (OMP_FOR_INIT (stmt), i), ctx);
1715 scan_omp (&TREE_VEC_ELT (OMP_FOR_COND (stmt), i), ctx);
1716 scan_omp (&TREE_VEC_ELT (OMP_FOR_INCR (stmt), i), ctx);
1718 scan_omp (&OMP_FOR_BODY (stmt), ctx);
1721 /* Scan an OpenMP sections directive. */
1723 static void
1724 scan_omp_sections (tree *stmt_p, omp_context *outer_ctx)
1726 tree stmt;
1727 omp_context *ctx;
1729 stmt = *stmt_p;
1730 ctx = new_omp_context (stmt, outer_ctx);
1731 scan_sharing_clauses (OMP_SECTIONS_CLAUSES (stmt), ctx);
1732 scan_omp (&OMP_SECTIONS_BODY (stmt), ctx);
1735 /* Scan an OpenMP single directive. */
1737 static void
1738 scan_omp_single (tree *stmt_p, omp_context *outer_ctx)
1740 tree stmt = *stmt_p;
1741 omp_context *ctx;
1742 tree name;
1744 ctx = new_omp_context (stmt, outer_ctx);
1745 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1746 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1747 name = create_tmp_var_name (".omp_copy_s");
1748 name = build_decl (TYPE_DECL, name, ctx->record_type);
1749 TYPE_NAME (ctx->record_type) = name;
1751 scan_sharing_clauses (OMP_SINGLE_CLAUSES (stmt), ctx);
1752 scan_omp (&OMP_SINGLE_BODY (stmt), ctx);
1754 if (TYPE_FIELDS (ctx->record_type) == NULL)
1755 ctx->record_type = NULL;
1756 else
1757 layout_type (ctx->record_type);
1761 /* Check OpenMP nesting restrictions. */
1762 static void
1763 check_omp_nesting_restrictions (tree t, omp_context *ctx)
1765 switch (TREE_CODE (t))
1767 case OMP_FOR:
1768 case OMP_SECTIONS:
1769 case OMP_SINGLE:
1770 case CALL_EXPR:
1771 for (; ctx != NULL; ctx = ctx->outer)
1772 switch (TREE_CODE (ctx->stmt))
1774 case OMP_FOR:
1775 case OMP_SECTIONS:
1776 case OMP_SINGLE:
1777 case OMP_ORDERED:
1778 case OMP_MASTER:
1779 case OMP_TASK:
1780 if (TREE_CODE (t) == CALL_EXPR)
1782 warning (0, "barrier region may not be closely nested inside "
1783 "of work-sharing, critical, ordered, master or "
1784 "explicit task region");
1785 return;
1787 warning (0, "work-sharing region may not be closely nested inside "
1788 "of work-sharing, critical, ordered, master or explicit "
1789 "task region");
1790 return;
1791 case OMP_PARALLEL:
1792 return;
1793 default:
1794 break;
1796 break;
1797 case OMP_MASTER:
1798 for (; ctx != NULL; ctx = ctx->outer)
1799 switch (TREE_CODE (ctx->stmt))
1801 case OMP_FOR:
1802 case OMP_SECTIONS:
1803 case OMP_SINGLE:
1804 case OMP_TASK:
1805 warning (0, "master region may not be closely nested inside "
1806 "of work-sharing or explicit task region");
1807 return;
1808 case OMP_PARALLEL:
1809 return;
1810 default:
1811 break;
1813 break;
1814 case OMP_ORDERED:
1815 for (; ctx != NULL; ctx = ctx->outer)
1816 switch (TREE_CODE (ctx->stmt))
1818 case OMP_CRITICAL:
1819 case OMP_TASK:
1820 warning (0, "ordered region may not be closely nested inside "
1821 "of critical or explicit task region");
1822 return;
1823 case OMP_FOR:
1824 if (find_omp_clause (OMP_CLAUSES (ctx->stmt),
1825 OMP_CLAUSE_ORDERED) == NULL)
1826 warning (0, "ordered region must be closely nested inside "
1827 "a loop region with an ordered clause");
1828 return;
1829 case OMP_PARALLEL:
1830 return;
1831 default:
1832 break;
1834 break;
1835 case OMP_CRITICAL:
1836 for (; ctx != NULL; ctx = ctx->outer)
1837 if (TREE_CODE (ctx->stmt) == OMP_CRITICAL
1838 && OMP_CRITICAL_NAME (t) == OMP_CRITICAL_NAME (ctx->stmt))
1840 warning (0, "critical region may not be nested inside a critical "
1841 "region with the same name");
1842 return;
1844 break;
1845 default:
1846 break;
1851 /* Callback for walk_stmts used to scan for OpenMP directives at TP. */
1853 static tree
1854 scan_omp_1 (tree *tp, int *walk_subtrees, void *data)
1856 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1857 omp_context *ctx = (omp_context *) wi->info;
1858 tree t = *tp;
1860 if (EXPR_HAS_LOCATION (t))
1861 input_location = EXPR_LOCATION (t);
1863 /* Check the OpenMP nesting restrictions. */
1864 if (ctx != NULL)
1866 if (OMP_DIRECTIVE_P (t))
1867 check_omp_nesting_restrictions (t, ctx);
1868 else if (TREE_CODE (t) == CALL_EXPR)
1870 tree fndecl = get_callee_fndecl (t);
1871 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1872 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
1873 check_omp_nesting_restrictions (t, ctx);
1877 *walk_subtrees = 0;
1878 switch (TREE_CODE (t))
1880 case OMP_PARALLEL:
1881 taskreg_nesting_level++;
1882 scan_omp_parallel (tp, ctx);
1883 taskreg_nesting_level--;
1884 break;
1886 case OMP_TASK:
1887 taskreg_nesting_level++;
1888 scan_omp_task (tp, ctx);
1889 taskreg_nesting_level--;
1890 break;
1892 case OMP_FOR:
1893 scan_omp_for (tp, ctx);
1894 break;
1896 case OMP_SECTIONS:
1897 scan_omp_sections (tp, ctx);
1898 break;
1900 case OMP_SINGLE:
1901 scan_omp_single (tp, ctx);
1902 break;
1904 case OMP_SECTION:
1905 case OMP_MASTER:
1906 case OMP_ORDERED:
1907 case OMP_CRITICAL:
1908 ctx = new_omp_context (*tp, ctx);
1909 scan_omp (&OMP_BODY (*tp), ctx);
1910 break;
1912 case BIND_EXPR:
1914 tree var;
1915 *walk_subtrees = 1;
1917 for (var = BIND_EXPR_VARS (t); var ; var = TREE_CHAIN (var))
1918 insert_decl_map (&ctx->cb, var, var);
1920 break;
1922 case VAR_DECL:
1923 case PARM_DECL:
1924 case LABEL_DECL:
1925 case RESULT_DECL:
1926 if (ctx)
1927 *tp = remap_decl (t, &ctx->cb);
1928 break;
1930 default:
1931 if (ctx && TYPE_P (t))
1932 *tp = remap_type (t, &ctx->cb);
1933 else if (!DECL_P (t))
1934 *walk_subtrees = 1;
1935 break;
1938 return NULL_TREE;
1942 /* Scan all the statements starting at STMT_P. CTX contains context
1943 information about the OpenMP directives and clauses found during
1944 the scan. */
1946 static void
1947 scan_omp (tree *stmt_p, omp_context *ctx)
1949 location_t saved_location;
1950 struct walk_stmt_info wi;
1952 memset (&wi, 0, sizeof (wi));
1953 wi.callback = scan_omp_1;
1954 wi.info = ctx;
1955 wi.want_bind_expr = (ctx != NULL);
1956 wi.want_locations = true;
1958 saved_location = input_location;
1959 walk_stmts (&wi, stmt_p);
1960 input_location = saved_location;
1963 /* Re-gimplification and code generation routines. */
1965 /* Build a call to GOMP_barrier. */
1967 static tree
1968 build_omp_barrier (void)
1970 return build_call_expr (built_in_decls[BUILT_IN_GOMP_BARRIER], 0);
1973 /* If a context was created for STMT when it was scanned, return it. */
1975 static omp_context *
1976 maybe_lookup_ctx (tree stmt)
1978 splay_tree_node n;
1979 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
1980 return n ? (omp_context *) n->value : NULL;
1984 /* Find the mapping for DECL in CTX or the immediately enclosing
1985 context that has a mapping for DECL.
1987 If CTX is a nested parallel directive, we may have to use the decl
1988 mappings created in CTX's parent context. Suppose that we have the
1989 following parallel nesting (variable UIDs showed for clarity):
1991 iD.1562 = 0;
1992 #omp parallel shared(iD.1562) -> outer parallel
1993 iD.1562 = iD.1562 + 1;
1995 #omp parallel shared (iD.1562) -> inner parallel
1996 iD.1562 = iD.1562 - 1;
1998 Each parallel structure will create a distinct .omp_data_s structure
1999 for copying iD.1562 in/out of the directive:
2001 outer parallel .omp_data_s.1.i -> iD.1562
2002 inner parallel .omp_data_s.2.i -> iD.1562
2004 A shared variable mapping will produce a copy-out operation before
2005 the parallel directive and a copy-in operation after it. So, in
2006 this case we would have:
2008 iD.1562 = 0;
2009 .omp_data_o.1.i = iD.1562;
2010 #omp parallel shared(iD.1562) -> outer parallel
2011 .omp_data_i.1 = &.omp_data_o.1
2012 .omp_data_i.1->i = .omp_data_i.1->i + 1;
2014 .omp_data_o.2.i = iD.1562; -> **
2015 #omp parallel shared(iD.1562) -> inner parallel
2016 .omp_data_i.2 = &.omp_data_o.2
2017 .omp_data_i.2->i = .omp_data_i.2->i - 1;
2020 ** This is a problem. The symbol iD.1562 cannot be referenced
2021 inside the body of the outer parallel region. But since we are
2022 emitting this copy operation while expanding the inner parallel
2023 directive, we need to access the CTX structure of the outer
2024 parallel directive to get the correct mapping:
2026 .omp_data_o.2.i = .omp_data_i.1->i
2028 Since there may be other workshare or parallel directives enclosing
2029 the parallel directive, it may be necessary to walk up the context
2030 parent chain. This is not a problem in general because nested
2031 parallelism happens only rarely. */
2033 static tree
2034 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
2036 tree t;
2037 omp_context *up;
2039 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
2040 t = maybe_lookup_decl (decl, up);
2042 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
2044 return t ? t : decl;
2048 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
2049 in outer contexts. */
2051 static tree
2052 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
2054 tree t = NULL;
2055 omp_context *up;
2057 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
2058 t = maybe_lookup_decl (decl, up);
2060 return t ? t : decl;
2064 /* Construct the initialization value for reduction CLAUSE. */
2066 tree
2067 omp_reduction_init (tree clause, tree type)
2069 switch (OMP_CLAUSE_REDUCTION_CODE (clause))
2071 case PLUS_EXPR:
2072 case MINUS_EXPR:
2073 case BIT_IOR_EXPR:
2074 case BIT_XOR_EXPR:
2075 case TRUTH_OR_EXPR:
2076 case TRUTH_ORIF_EXPR:
2077 case TRUTH_XOR_EXPR:
2078 case NE_EXPR:
2079 return fold_convert (type, integer_zero_node);
2081 case MULT_EXPR:
2082 case TRUTH_AND_EXPR:
2083 case TRUTH_ANDIF_EXPR:
2084 case EQ_EXPR:
2085 return fold_convert (type, integer_one_node);
2087 case BIT_AND_EXPR:
2088 return fold_convert (type, integer_minus_one_node);
2090 case MAX_EXPR:
2091 if (SCALAR_FLOAT_TYPE_P (type))
2093 REAL_VALUE_TYPE max, min;
2094 if (HONOR_INFINITIES (TYPE_MODE (type)))
2096 real_inf (&max);
2097 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
2099 else
2100 real_maxval (&min, 1, TYPE_MODE (type));
2101 return build_real (type, min);
2103 else
2105 gcc_assert (INTEGRAL_TYPE_P (type));
2106 return TYPE_MIN_VALUE (type);
2109 case MIN_EXPR:
2110 if (SCALAR_FLOAT_TYPE_P (type))
2112 REAL_VALUE_TYPE max;
2113 if (HONOR_INFINITIES (TYPE_MODE (type)))
2114 real_inf (&max);
2115 else
2116 real_maxval (&max, 0, TYPE_MODE (type));
2117 return build_real (type, max);
2119 else
2121 gcc_assert (INTEGRAL_TYPE_P (type));
2122 return TYPE_MAX_VALUE (type);
2125 default:
2126 gcc_unreachable ();
2130 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
2131 from the receiver (aka child) side and initializers for REFERENCE_TYPE
2132 private variables. Initialization statements go in ILIST, while calls
2133 to destructors go in DLIST. */
2135 static void
2136 lower_rec_input_clauses (tree clauses, tree *ilist, tree *dlist,
2137 omp_context *ctx)
2139 tree_stmt_iterator diter;
2140 tree c, dtor, copyin_seq, x, ptr;
2141 bool copyin_by_ref = false;
2142 bool lastprivate_firstprivate = false;
2143 int pass;
2145 *dlist = alloc_stmt_list ();
2146 diter = tsi_start (*dlist);
2147 copyin_seq = NULL;
2149 /* Do all the fixed sized types in the first pass, and the variable sized
2150 types in the second pass. This makes sure that the scalar arguments to
2151 the variable sized types are processed before we use them in the
2152 variable sized operations. */
2153 for (pass = 0; pass < 2; ++pass)
2155 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
2157 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
2158 tree var, new_var;
2159 bool by_ref;
2161 switch (c_kind)
2163 case OMP_CLAUSE_PRIVATE:
2164 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
2165 continue;
2166 break;
2167 case OMP_CLAUSE_SHARED:
2168 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
2170 gcc_assert (is_global_var (OMP_CLAUSE_DECL (c)));
2171 continue;
2173 case OMP_CLAUSE_FIRSTPRIVATE:
2174 case OMP_CLAUSE_COPYIN:
2175 case OMP_CLAUSE_REDUCTION:
2176 break;
2177 case OMP_CLAUSE_LASTPRIVATE:
2178 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
2180 lastprivate_firstprivate = true;
2181 if (pass != 0)
2182 continue;
2184 break;
2185 default:
2186 continue;
2189 new_var = var = OMP_CLAUSE_DECL (c);
2190 if (c_kind != OMP_CLAUSE_COPYIN)
2191 new_var = lookup_decl (var, ctx);
2193 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
2195 if (pass != 0)
2196 continue;
2198 else if (is_variable_sized (var))
2200 /* For variable sized types, we need to allocate the
2201 actual storage here. Call alloca and store the
2202 result in the pointer decl that we created elsewhere. */
2203 if (pass == 0)
2204 continue;
2206 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
2208 ptr = DECL_VALUE_EXPR (new_var);
2209 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
2210 ptr = TREE_OPERAND (ptr, 0);
2211 gcc_assert (DECL_P (ptr));
2212 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
2213 x = build_call_expr (built_in_decls[BUILT_IN_ALLOCA], 1, x);
2214 x = fold_convert (TREE_TYPE (ptr), x);
2215 x = build_gimple_modify_stmt (ptr, x);
2216 gimplify_and_add (x, ilist);
2219 else if (is_reference (var))
2221 /* For references that are being privatized for Fortran,
2222 allocate new backing storage for the new pointer
2223 variable. This allows us to avoid changing all the
2224 code that expects a pointer to something that expects
2225 a direct variable. Note that this doesn't apply to
2226 C++, since reference types are disallowed in data
2227 sharing clauses there, except for NRV optimized
2228 return values. */
2229 if (pass == 0)
2230 continue;
2232 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
2233 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
2235 x = build_receiver_ref (var, false, ctx);
2236 x = build_fold_addr_expr (x);
2238 else if (TREE_CONSTANT (x))
2240 const char *name = NULL;
2241 if (DECL_NAME (var))
2242 name = IDENTIFIER_POINTER (DECL_NAME (new_var));
2244 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
2245 name);
2246 gimple_add_tmp_var (x);
2247 x = build_fold_addr_expr_with_type (x, TREE_TYPE (new_var));
2249 else
2251 x = build_call_expr (built_in_decls[BUILT_IN_ALLOCA], 1, x);
2252 x = fold_convert (TREE_TYPE (new_var), x);
2255 x = build_gimple_modify_stmt (new_var, x);
2256 gimplify_and_add (x, ilist);
2258 new_var = build_fold_indirect_ref (new_var);
2260 else if (c_kind == OMP_CLAUSE_REDUCTION
2261 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
2263 if (pass == 0)
2264 continue;
2266 else if (pass != 0)
2267 continue;
2269 switch (OMP_CLAUSE_CODE (c))
2271 case OMP_CLAUSE_SHARED:
2272 /* Shared global vars are just accessed directly. */
2273 if (is_global_var (new_var))
2274 break;
2275 /* Set up the DECL_VALUE_EXPR for shared variables now. This
2276 needs to be delayed until after fixup_child_record_type so
2277 that we get the correct type during the dereference. */
2278 by_ref = use_pointer_for_field (var, ctx);
2279 x = build_receiver_ref (var, by_ref, ctx);
2280 SET_DECL_VALUE_EXPR (new_var, x);
2281 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
2283 /* ??? If VAR is not passed by reference, and the variable
2284 hasn't been initialized yet, then we'll get a warning for
2285 the store into the omp_data_s structure. Ideally, we'd be
2286 able to notice this and not store anything at all, but
2287 we're generating code too early. Suppress the warning. */
2288 if (!by_ref)
2289 TREE_NO_WARNING (var) = 1;
2290 break;
2292 case OMP_CLAUSE_LASTPRIVATE:
2293 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
2294 break;
2295 /* FALLTHRU */
2297 case OMP_CLAUSE_PRIVATE:
2298 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
2299 x = build_outer_var_ref (var, ctx);
2300 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
2302 if (is_task_ctx (ctx))
2303 x = build_receiver_ref (var, false, ctx);
2304 else
2305 x = build_outer_var_ref (var, ctx);
2307 else
2308 x = NULL;
2309 x = lang_hooks.decls.omp_clause_default_ctor (c, new_var, x);
2310 if (x)
2311 gimplify_and_add (x, ilist);
2312 /* FALLTHRU */
2314 do_dtor:
2315 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
2316 if (x)
2318 dtor = x;
2319 gimplify_stmt (&dtor);
2320 tsi_link_before (&diter, dtor, TSI_SAME_STMT);
2322 break;
2324 case OMP_CLAUSE_FIRSTPRIVATE:
2325 if (is_task_ctx (ctx))
2327 if (is_reference (var) || is_variable_sized (var))
2328 goto do_dtor;
2329 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
2330 ctx))
2331 || use_pointer_for_field (var, NULL))
2333 x = build_receiver_ref (var, false, ctx);
2334 SET_DECL_VALUE_EXPR (new_var, x);
2335 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
2336 goto do_dtor;
2339 x = build_outer_var_ref (var, ctx);
2340 x = lang_hooks.decls.omp_clause_copy_ctor (c, new_var, x);
2341 gimplify_and_add (x, ilist);
2342 goto do_dtor;
2343 break;
2345 case OMP_CLAUSE_COPYIN:
2346 by_ref = use_pointer_for_field (var, NULL);
2347 x = build_receiver_ref (var, by_ref, ctx);
2348 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
2349 append_to_statement_list (x, &copyin_seq);
2350 copyin_by_ref |= by_ref;
2351 break;
2353 case OMP_CLAUSE_REDUCTION:
2354 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
2356 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
2357 x = build_outer_var_ref (var, ctx);
2359 if (is_reference (var))
2360 x = build_fold_addr_expr (x);
2361 SET_DECL_VALUE_EXPR (placeholder, x);
2362 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
2363 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c), ilist);
2364 OMP_CLAUSE_REDUCTION_INIT (c) = NULL;
2365 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
2367 else
2369 x = omp_reduction_init (c, TREE_TYPE (new_var));
2370 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
2371 x = build_gimple_modify_stmt (new_var, x);
2372 gimplify_and_add (x, ilist);
2374 break;
2376 default:
2377 gcc_unreachable ();
2382 /* The copyin sequence is not to be executed by the main thread, since
2383 that would result in self-copies. Perhaps not visible to scalars,
2384 but it certainly is to C++ operator=. */
2385 if (copyin_seq)
2387 x = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
2388 x = build2 (NE_EXPR, boolean_type_node, x,
2389 build_int_cst (TREE_TYPE (x), 0));
2390 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
2391 gimplify_and_add (x, ilist);
2394 /* If any copyin variable is passed by reference, we must ensure the
2395 master thread doesn't modify it before it is copied over in all
2396 threads. Similarly for variables in both firstprivate and
2397 lastprivate clauses we need to ensure the lastprivate copying
2398 happens after firstprivate copying in all threads. */
2399 if (copyin_by_ref || lastprivate_firstprivate)
2400 gimplify_and_add (build_omp_barrier (), ilist);
2404 /* Generate code to implement the LASTPRIVATE clauses. This is used for
2405 both parallel and workshare constructs. PREDICATE may be NULL if it's
2406 always true. */
2408 static void
2409 lower_lastprivate_clauses (tree clauses, tree predicate, tree *stmt_list,
2410 omp_context *ctx)
2412 tree sub_list, x, c;
2413 bool par_clauses = false;
2415 /* Early exit if there are no lastprivate clauses. */
2416 clauses = find_omp_clause (clauses, OMP_CLAUSE_LASTPRIVATE);
2417 if (clauses == NULL)
2419 /* If this was a workshare clause, see if it had been combined
2420 with its parallel. In that case, look for the clauses on the
2421 parallel statement itself. */
2422 if (is_parallel_ctx (ctx))
2423 return;
2425 ctx = ctx->outer;
2426 if (ctx == NULL || !is_parallel_ctx (ctx))
2427 return;
2429 clauses = find_omp_clause (OMP_PARALLEL_CLAUSES (ctx->stmt),
2430 OMP_CLAUSE_LASTPRIVATE);
2431 if (clauses == NULL)
2432 return;
2433 par_clauses = true;
2436 sub_list = alloc_stmt_list ();
2438 for (c = clauses; c ;)
2440 tree var, new_var;
2442 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
2444 var = OMP_CLAUSE_DECL (c);
2445 new_var = lookup_decl (var, ctx);
2447 if (OMP_CLAUSE_LASTPRIVATE_STMT (c))
2448 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c), &sub_list);
2449 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL;
2451 x = build_outer_var_ref (var, ctx);
2452 if (is_reference (var))
2453 new_var = build_fold_indirect_ref (new_var);
2454 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
2455 append_to_statement_list (x, &sub_list);
2457 c = OMP_CLAUSE_CHAIN (c);
2458 if (c == NULL && !par_clauses)
2460 /* If this was a workshare clause, see if it had been combined
2461 with its parallel. In that case, continue looking for the
2462 clauses also on the parallel statement itself. */
2463 if (is_parallel_ctx (ctx))
2464 break;
2466 ctx = ctx->outer;
2467 if (ctx == NULL || !is_parallel_ctx (ctx))
2468 break;
2470 c = find_omp_clause (OMP_PARALLEL_CLAUSES (ctx->stmt),
2471 OMP_CLAUSE_LASTPRIVATE);
2472 par_clauses = true;
2476 if (predicate)
2477 x = build3 (COND_EXPR, void_type_node, predicate, sub_list, NULL);
2478 else
2479 x = sub_list;
2481 gimplify_and_add (x, stmt_list);
2485 /* Generate code to implement the REDUCTION clauses. */
2487 static void
2488 lower_reduction_clauses (tree clauses, tree *stmt_list, omp_context *ctx)
2490 tree sub_list = NULL, x, c;
2491 int count = 0;
2493 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
2494 update in that case, otherwise use a lock. */
2495 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
2496 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2498 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
2500 /* Never use OMP_ATOMIC for array reductions. */
2501 count = -1;
2502 break;
2504 count++;
2507 if (count == 0)
2508 return;
2510 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
2512 tree var, ref, new_var;
2513 enum tree_code code;
2515 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
2516 continue;
2518 var = OMP_CLAUSE_DECL (c);
2519 new_var = lookup_decl (var, ctx);
2520 if (is_reference (var))
2521 new_var = build_fold_indirect_ref (new_var);
2522 ref = build_outer_var_ref (var, ctx);
2523 code = OMP_CLAUSE_REDUCTION_CODE (c);
2525 /* reduction(-:var) sums up the partial results, so it acts
2526 identically to reduction(+:var). */
2527 if (code == MINUS_EXPR)
2528 code = PLUS_EXPR;
2530 if (count == 1)
2532 tree addr = build_fold_addr_expr (ref);
2534 addr = save_expr (addr);
2535 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
2536 x = fold_build2 (code, TREE_TYPE (ref), ref, new_var);
2537 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
2538 gimplify_and_add (x, stmt_list);
2539 return;
2542 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
2544 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
2546 if (is_reference (var))
2547 ref = build_fold_addr_expr (ref);
2548 SET_DECL_VALUE_EXPR (placeholder, ref);
2549 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
2550 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c), &sub_list);
2551 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL;
2552 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
2554 else
2556 x = build2 (code, TREE_TYPE (ref), ref, new_var);
2557 ref = build_outer_var_ref (var, ctx);
2558 x = build_gimple_modify_stmt (ref, x);
2559 append_to_statement_list (x, &sub_list);
2563 x = build_call_expr (built_in_decls[BUILT_IN_GOMP_ATOMIC_START], 0);
2564 gimplify_and_add (x, stmt_list);
2566 gimplify_and_add (sub_list, stmt_list);
2568 x = build_call_expr (built_in_decls[BUILT_IN_GOMP_ATOMIC_END], 0);
2569 gimplify_and_add (x, stmt_list);
2573 /* Generate code to implement the COPYPRIVATE clauses. */
2575 static void
2576 lower_copyprivate_clauses (tree clauses, tree *slist, tree *rlist,
2577 omp_context *ctx)
2579 tree c;
2581 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
2583 tree var, ref, x;
2584 bool by_ref;
2586 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
2587 continue;
2589 var = OMP_CLAUSE_DECL (c);
2590 by_ref = use_pointer_for_field (var, NULL);
2592 ref = build_sender_ref (var, ctx);
2593 x = lookup_decl_in_outer_ctx (var, ctx);
2594 x = by_ref ? build_fold_addr_expr (x) : x;
2595 x = build_gimple_modify_stmt (ref, x);
2596 gimplify_and_add (x, slist);
2598 ref = build_receiver_ref (var, by_ref, ctx);
2599 if (is_reference (var))
2601 ref = build_fold_indirect_ref (ref);
2602 var = build_fold_indirect_ref (var);
2604 x = lang_hooks.decls.omp_clause_assign_op (c, var, ref);
2605 gimplify_and_add (x, rlist);
2610 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
2611 and REDUCTION from the sender (aka parent) side. */
2613 static void
2614 lower_send_clauses (tree clauses, tree *ilist, tree *olist, omp_context *ctx)
2616 tree c;
2618 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
2620 tree val, ref, x, var;
2621 bool by_ref, do_in = false, do_out = false;
2623 switch (OMP_CLAUSE_CODE (c))
2625 case OMP_CLAUSE_PRIVATE:
2626 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
2627 break;
2628 continue;
2629 case OMP_CLAUSE_FIRSTPRIVATE:
2630 case OMP_CLAUSE_COPYIN:
2631 case OMP_CLAUSE_LASTPRIVATE:
2632 case OMP_CLAUSE_REDUCTION:
2633 break;
2634 default:
2635 continue;
2638 val = OMP_CLAUSE_DECL (c);
2639 var = lookup_decl_in_outer_ctx (val, ctx);
2641 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
2642 && is_global_var (var))
2643 continue;
2644 if (is_variable_sized (val))
2645 continue;
2646 by_ref = use_pointer_for_field (val, NULL);
2648 switch (OMP_CLAUSE_CODE (c))
2650 case OMP_CLAUSE_PRIVATE:
2651 case OMP_CLAUSE_FIRSTPRIVATE:
2652 case OMP_CLAUSE_COPYIN:
2653 do_in = true;
2654 break;
2656 case OMP_CLAUSE_LASTPRIVATE:
2657 if (by_ref || is_reference (val))
2659 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
2660 continue;
2661 do_in = true;
2663 else
2665 do_out = true;
2666 if (lang_hooks.decls.omp_private_outer_ref (val))
2667 do_in = true;
2669 break;
2671 case OMP_CLAUSE_REDUCTION:
2672 do_in = true;
2673 do_out = !(by_ref || is_reference (val));
2674 break;
2676 default:
2677 gcc_unreachable ();
2680 if (do_in)
2682 ref = build_sender_ref (val, ctx);
2683 x = by_ref ? build_fold_addr_expr (var) : var;
2684 x = build_gimple_modify_stmt (ref, x);
2685 gimplify_and_add (x, ilist);
2686 if (is_task_ctx (ctx))
2687 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
2690 if (do_out)
2692 ref = build_sender_ref (val, ctx);
2693 x = build_gimple_modify_stmt (var, ref);
2694 gimplify_and_add (x, olist);
2699 /* Generate code to implement SHARED from the sender (aka parent) side.
2700 This is trickier, since OMP_PARALLEL_CLAUSES doesn't list things that
2701 got automatically shared. */
2703 static void
2704 lower_send_shared_vars (tree *ilist, tree *olist, omp_context *ctx)
2706 tree var, ovar, nvar, f, x, record_type;
2708 if (ctx->record_type == NULL)
2709 return;
2711 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
2712 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
2714 ovar = DECL_ABSTRACT_ORIGIN (f);
2715 nvar = maybe_lookup_decl (ovar, ctx);
2716 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
2717 continue;
2719 /* If CTX is a nested parallel directive. Find the immediately
2720 enclosing parallel or workshare construct that contains a
2721 mapping for OVAR. */
2722 var = lookup_decl_in_outer_ctx (ovar, ctx);
2724 if (use_pointer_for_field (ovar, ctx))
2726 x = build_sender_ref (ovar, ctx);
2727 var = build_fold_addr_expr (var);
2728 x = build_gimple_modify_stmt (x, var);
2729 gimplify_and_add (x, ilist);
2731 else
2733 x = build_sender_ref (ovar, ctx);
2734 x = build_gimple_modify_stmt (x, var);
2735 gimplify_and_add (x, ilist);
2737 if (!TREE_READONLY (var))
2739 x = build_sender_ref (ovar, ctx);
2740 x = build_gimple_modify_stmt (var, x);
2741 gimplify_and_add (x, olist);
2747 /* Build the function calls to GOMP_parallel_start etc to actually
2748 generate the parallel operation. REGION is the parallel region
2749 being expanded. BB is the block where to insert the code. WS_ARGS
2750 will be set if this is a call to a combined parallel+workshare
2751 construct, it contains the list of additional arguments needed by
2752 the workshare construct. */
2754 static void
2755 expand_parallel_call (struct omp_region *region, basic_block bb,
2756 tree entry_stmt, tree ws_args)
2758 tree t, t1, t2, val, cond, c, clauses;
2759 block_stmt_iterator si;
2760 int start_ix;
2762 clauses = OMP_PARALLEL_CLAUSES (entry_stmt);
2764 /* Determine what flavor of GOMP_parallel_start we will be
2765 emitting. */
2766 start_ix = BUILT_IN_GOMP_PARALLEL_START;
2767 if (is_combined_parallel (region))
2769 switch (region->inner->type)
2771 case OMP_FOR:
2772 gcc_assert (region->inner->sched_kind != OMP_CLAUSE_SCHEDULE_AUTO);
2773 start_ix = BUILT_IN_GOMP_PARALLEL_LOOP_STATIC_START
2774 + (region->inner->sched_kind
2775 == OMP_CLAUSE_SCHEDULE_RUNTIME
2776 ? 3 : region->inner->sched_kind);
2777 break;
2778 case OMP_SECTIONS:
2779 start_ix = BUILT_IN_GOMP_PARALLEL_SECTIONS_START;
2780 break;
2781 default:
2782 gcc_unreachable ();
2786 /* By default, the value of NUM_THREADS is zero (selected at run time)
2787 and there is no conditional. */
2788 cond = NULL_TREE;
2789 val = build_int_cst (unsigned_type_node, 0);
2791 c = find_omp_clause (clauses, OMP_CLAUSE_IF);
2792 if (c)
2793 cond = OMP_CLAUSE_IF_EXPR (c);
2795 c = find_omp_clause (clauses, OMP_CLAUSE_NUM_THREADS);
2796 if (c)
2797 val = OMP_CLAUSE_NUM_THREADS_EXPR (c);
2799 /* Ensure 'val' is of the correct type. */
2800 val = fold_convert (unsigned_type_node, val);
2802 /* If we found the clause 'if (cond)', build either
2803 (cond != 0) or (cond ? val : 1u). */
2804 if (cond)
2806 block_stmt_iterator si;
2808 cond = gimple_boolify (cond);
2810 if (integer_zerop (val))
2811 val = fold_build2 (EQ_EXPR, unsigned_type_node, cond,
2812 build_int_cst (TREE_TYPE (cond), 0));
2813 else
2815 basic_block cond_bb, then_bb, else_bb;
2816 edge e, e_then, e_else;
2817 tree t, tmp_then, tmp_else, tmp_join, tmp_var;
2819 tmp_var = create_tmp_var (TREE_TYPE (val), NULL);
2820 if (gimple_in_ssa_p (cfun))
2822 tmp_then = make_ssa_name (tmp_var, NULL_TREE);
2823 tmp_else = make_ssa_name (tmp_var, NULL_TREE);
2824 tmp_join = make_ssa_name (tmp_var, NULL_TREE);
2826 else
2828 tmp_then = tmp_var;
2829 tmp_else = tmp_var;
2830 tmp_join = tmp_var;
2833 e = split_block (bb, NULL);
2834 cond_bb = e->src;
2835 bb = e->dest;
2836 remove_edge (e);
2838 then_bb = create_empty_bb (cond_bb);
2839 else_bb = create_empty_bb (then_bb);
2840 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
2841 set_immediate_dominator (CDI_DOMINATORS, else_bb, cond_bb);
2843 t = build3 (COND_EXPR, void_type_node,
2844 cond, NULL_TREE, NULL_TREE);
2846 si = bsi_start (cond_bb);
2847 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
2849 si = bsi_start (then_bb);
2850 t = build_gimple_modify_stmt (tmp_then, val);
2851 if (gimple_in_ssa_p (cfun))
2852 SSA_NAME_DEF_STMT (tmp_then) = t;
2853 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
2855 si = bsi_start (else_bb);
2856 t = build_gimple_modify_stmt (tmp_else,
2857 build_int_cst (unsigned_type_node, 1));
2858 if (gimple_in_ssa_p (cfun))
2859 SSA_NAME_DEF_STMT (tmp_else) = t;
2860 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
2862 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
2863 make_edge (cond_bb, else_bb, EDGE_FALSE_VALUE);
2864 e_then = make_edge (then_bb, bb, EDGE_FALLTHRU);
2865 e_else = make_edge (else_bb, bb, EDGE_FALLTHRU);
2867 if (gimple_in_ssa_p (cfun))
2869 tree phi = create_phi_node (tmp_join, bb);
2870 SSA_NAME_DEF_STMT (tmp_join) = phi;
2871 add_phi_arg (phi, tmp_then, e_then);
2872 add_phi_arg (phi, tmp_else, e_else);
2875 val = tmp_join;
2878 si = bsi_start (bb);
2879 val = force_gimple_operand_bsi (&si, val, true, NULL_TREE,
2880 false, BSI_CONTINUE_LINKING);
2883 si = bsi_last (bb);
2884 t = OMP_PARALLEL_DATA_ARG (entry_stmt);
2885 if (t == NULL)
2886 t1 = null_pointer_node;
2887 else
2888 t1 = build_fold_addr_expr (t);
2889 t2 = build_fold_addr_expr (OMP_PARALLEL_FN (entry_stmt));
2891 if (ws_args)
2893 tree args = tree_cons (NULL, t2,
2894 tree_cons (NULL, t1,
2895 tree_cons (NULL, val, ws_args)));
2896 t = build_function_call_expr (built_in_decls[start_ix], args);
2898 else
2899 t = build_call_expr (built_in_decls[start_ix], 3, t2, t1, val);
2901 force_gimple_operand_bsi (&si, t, true, NULL_TREE,
2902 false, BSI_CONTINUE_LINKING);
2904 t = OMP_PARALLEL_DATA_ARG (entry_stmt);
2905 if (t == NULL)
2906 t = null_pointer_node;
2907 else
2908 t = build_fold_addr_expr (t);
2909 t = build_call_expr (OMP_PARALLEL_FN (entry_stmt), 1, t);
2910 force_gimple_operand_bsi (&si, t, true, NULL_TREE,
2911 false, BSI_CONTINUE_LINKING);
2913 t = build_call_expr (built_in_decls[BUILT_IN_GOMP_PARALLEL_END], 0);
2914 force_gimple_operand_bsi (&si, t, true, NULL_TREE,
2915 false, BSI_CONTINUE_LINKING);
2919 /* Build the function call to GOMP_task to actually
2920 generate the task operation. BB is the block where to insert the code. */
2922 static void
2923 expand_task_call (basic_block bb, tree entry_stmt)
2925 tree t, t1, t2, t3, flags, cond, c, clauses;
2926 block_stmt_iterator si;
2928 clauses = OMP_TASK_CLAUSES (entry_stmt);
2930 c = find_omp_clause (clauses, OMP_CLAUSE_IF);
2931 if (c)
2932 cond = gimple_boolify (OMP_CLAUSE_IF_EXPR (c));
2933 else
2934 cond = boolean_true_node;
2936 c = find_omp_clause (clauses, OMP_CLAUSE_UNTIED);
2937 flags = build_int_cst (unsigned_type_node, (c ? 1 : 0));
2939 si = bsi_last (bb);
2940 t = OMP_TASK_DATA_ARG (entry_stmt);
2941 if (t == NULL)
2942 t2 = null_pointer_node;
2943 else
2944 t2 = build_fold_addr_expr (t);
2945 t1 = build_fold_addr_expr (OMP_TASK_FN (entry_stmt));
2946 t = OMP_TASK_COPYFN (entry_stmt);
2947 if (t == NULL)
2948 t3 = null_pointer_node;
2949 else
2950 t3 = build_fold_addr_expr (t);
2952 t = build_call_expr (built_in_decls[BUILT_IN_GOMP_TASK], 7, t1, t2, t3,
2953 OMP_TASK_ARG_SIZE (entry_stmt),
2954 OMP_TASK_ARG_ALIGN (entry_stmt), cond, flags);
2956 force_gimple_operand_bsi (&si, t, true, NULL_TREE,
2957 false, BSI_CONTINUE_LINKING);
2961 /* If exceptions are enabled, wrap *STMT_P in a MUST_NOT_THROW catch
2962 handler. This prevents programs from violating the structured
2963 block semantics with throws. */
2965 static void
2966 maybe_catch_exception (tree *stmt_p)
2968 tree f, t;
2970 if (!flag_exceptions)
2971 return;
2973 if (lang_protect_cleanup_actions)
2974 t = lang_protect_cleanup_actions ();
2975 else
2976 t = build_call_expr (built_in_decls[BUILT_IN_TRAP], 0);
2977 f = build2 (EH_FILTER_EXPR, void_type_node, NULL, NULL);
2978 EH_FILTER_MUST_NOT_THROW (f) = 1;
2979 gimplify_and_add (t, &EH_FILTER_FAILURE (f));
2981 t = build2 (TRY_CATCH_EXPR, void_type_node, *stmt_p, NULL);
2982 append_to_statement_list (f, &TREE_OPERAND (t, 1));
2984 *stmt_p = NULL;
2985 append_to_statement_list (t, stmt_p);
2988 /* Chain all the DECLs in LIST by their TREE_CHAIN fields. */
2990 static tree
2991 list2chain (tree list)
2993 tree t;
2995 for (t = list; t; t = TREE_CHAIN (t))
2997 tree var = TREE_VALUE (t);
2998 if (TREE_CHAIN (t))
2999 TREE_CHAIN (var) = TREE_VALUE (TREE_CHAIN (t));
3000 else
3001 TREE_CHAIN (var) = NULL_TREE;
3004 return list ? TREE_VALUE (list) : NULL_TREE;
3008 /* Remove barriers in REGION->EXIT's block. Note that this is only
3009 valid for OMP_PARALLEL regions. Since the end of a parallel region
3010 is an implicit barrier, any workshare inside the OMP_PARALLEL that
3011 left a barrier at the end of the OMP_PARALLEL region can now be
3012 removed. */
3014 static void
3015 remove_exit_barrier (struct omp_region *region)
3017 block_stmt_iterator si;
3018 basic_block exit_bb;
3019 edge_iterator ei;
3020 edge e;
3021 tree t;
3023 exit_bb = region->exit;
3025 /* If the parallel region doesn't return, we don't have REGION->EXIT
3026 block at all. */
3027 if (! exit_bb)
3028 return;
3030 /* The last insn in the block will be the parallel's OMP_RETURN. The
3031 workshare's OMP_RETURN will be in a preceding block. The kinds of
3032 statements that can appear in between are extremely limited -- no
3033 memory operations at all. Here, we allow nothing at all, so the
3034 only thing we allow to precede this OMP_RETURN is a label. */
3035 si = bsi_last (exit_bb);
3036 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_RETURN);
3037 bsi_prev (&si);
3038 if (!bsi_end_p (si) && TREE_CODE (bsi_stmt (si)) != LABEL_EXPR)
3039 return;
3041 FOR_EACH_EDGE (e, ei, exit_bb->preds)
3043 si = bsi_last (e->src);
3044 if (bsi_end_p (si))
3045 continue;
3046 t = bsi_stmt (si);
3047 if (TREE_CODE (t) == OMP_RETURN)
3048 OMP_RETURN_NOWAIT (t) = 1;
3052 static void
3053 remove_exit_barriers (struct omp_region *region)
3055 if (region->type == OMP_PARALLEL)
3056 remove_exit_barrier (region);
3058 if (region->inner)
3060 region = region->inner;
3061 remove_exit_barriers (region);
3062 while (region->next)
3064 region = region->next;
3065 remove_exit_barriers (region);
3070 /* Optimize omp_get_thread_num () and omp_get_num_threads ()
3071 calls. These can't be declared as const functions, but
3072 within one parallel body they are constant, so they can be
3073 transformed there into __builtin_omp_get_{thread_num,num_threads} ()
3074 which are declared const. Similarly for task body, except
3075 that in untied task omp_get_thread_num () can change at any task
3076 scheduling point. */
3078 static void
3079 optimize_omp_library_calls (tree entry_stmt)
3081 basic_block bb;
3082 block_stmt_iterator bsi;
3083 tree thr_num_id
3084 = DECL_ASSEMBLER_NAME (built_in_decls [BUILT_IN_OMP_GET_THREAD_NUM]);
3085 tree num_thr_id
3086 = DECL_ASSEMBLER_NAME (built_in_decls [BUILT_IN_OMP_GET_NUM_THREADS]);
3087 bool untied_task = (TREE_CODE (entry_stmt) == OMP_TASK
3088 && find_omp_clause (OMP_TASK_CLAUSES (entry_stmt),
3089 OMP_CLAUSE_UNTIED) != NULL);
3091 FOR_EACH_BB (bb)
3092 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3094 tree stmt = bsi_stmt (bsi);
3095 tree call = get_call_expr_in (stmt);
3096 tree decl;
3098 if (call
3099 && (decl = get_callee_fndecl (call))
3100 && DECL_EXTERNAL (decl)
3101 && TREE_PUBLIC (decl)
3102 && DECL_INITIAL (decl) == NULL)
3104 tree built_in;
3106 if (DECL_NAME (decl) == thr_num_id)
3108 /* In #pragma omp task untied omp_get_thread_num () can change
3109 during the execution of the task region. */
3110 if (untied_task)
3111 continue;
3112 built_in = built_in_decls [BUILT_IN_OMP_GET_THREAD_NUM];
3114 else if (DECL_NAME (decl) == num_thr_id)
3115 built_in = built_in_decls [BUILT_IN_OMP_GET_NUM_THREADS];
3116 else
3117 continue;
3119 if (DECL_ASSEMBLER_NAME (decl) != DECL_ASSEMBLER_NAME (built_in)
3120 || call_expr_nargs (call) != 0)
3121 continue;
3123 if (flag_exceptions && !TREE_NOTHROW (decl))
3124 continue;
3126 if (TREE_CODE (TREE_TYPE (decl)) != FUNCTION_TYPE
3127 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (decl)))
3128 != TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (built_in))))
3129 continue;
3131 CALL_EXPR_FN (call) = build_fold_addr_expr (built_in);
3136 /* Expand the OpenMP parallel or task directive starting at REGION. */
3138 static void
3139 expand_omp_taskreg (struct omp_region *region)
3141 basic_block entry_bb, exit_bb, new_bb;
3142 struct function *child_cfun;
3143 tree child_fn, block, t, ws_args, *tp;
3144 block_stmt_iterator si;
3145 tree entry_stmt;
3146 edge e;
3148 entry_stmt = last_stmt (region->entry);
3149 child_fn = OMP_TASKREG_FN (entry_stmt);
3150 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
3151 /* If this function has been already instrumented, make sure
3152 the child function isn't instrumented again. */
3153 child_cfun->after_tree_profile = cfun->after_tree_profile;
3155 entry_bb = region->entry;
3156 exit_bb = region->exit;
3158 if (is_combined_parallel (region))
3159 ws_args = region->ws_args;
3160 else
3161 ws_args = NULL_TREE;
3163 if (child_cfun->cfg)
3165 /* Due to inlining, it may happen that we have already outlined
3166 the region, in which case all we need to do is make the
3167 sub-graph unreachable and emit the parallel call. */
3168 edge entry_succ_e, exit_succ_e;
3169 block_stmt_iterator si;
3171 entry_succ_e = single_succ_edge (entry_bb);
3173 si = bsi_last (entry_bb);
3174 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_PARALLEL
3175 || TREE_CODE (bsi_stmt (si)) == OMP_TASK);
3176 bsi_remove (&si, true);
3178 new_bb = entry_bb;
3179 if (exit_bb)
3181 exit_succ_e = single_succ_edge (exit_bb);
3182 make_edge (new_bb, exit_succ_e->dest, EDGE_FALLTHRU);
3184 remove_edge_and_dominated_blocks (entry_succ_e);
3186 else
3188 /* If the parallel region needs data sent from the parent
3189 function, then the very first statement (except possible
3190 tree profile counter updates) of the parallel body
3191 is a copy assignment .OMP_DATA_I = &.OMP_DATA_O. Since
3192 &.OMP_DATA_O is passed as an argument to the child function,
3193 we need to replace it with the argument as seen by the child
3194 function.
3196 In most cases, this will end up being the identity assignment
3197 .OMP_DATA_I = .OMP_DATA_I. However, if the parallel body had
3198 a function call that has been inlined, the original PARM_DECL
3199 .OMP_DATA_I may have been converted into a different local
3200 variable. In which case, we need to keep the assignment. */
3201 if (OMP_TASKREG_DATA_ARG (entry_stmt))
3203 basic_block entry_succ_bb = single_succ (entry_bb);
3204 block_stmt_iterator si;
3205 tree parcopy_stmt = NULL_TREE, arg, narg;
3207 for (si = bsi_start (entry_succ_bb); ; bsi_next (&si))
3209 tree stmt, arg;
3211 gcc_assert (!bsi_end_p (si));
3212 stmt = bsi_stmt (si);
3213 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
3214 continue;
3216 arg = GIMPLE_STMT_OPERAND (stmt, 1);
3217 STRIP_NOPS (arg);
3218 if (TREE_CODE (arg) == ADDR_EXPR
3219 && TREE_OPERAND (arg, 0)
3220 == OMP_TASKREG_DATA_ARG (entry_stmt))
3222 parcopy_stmt = stmt;
3223 break;
3227 gcc_assert (parcopy_stmt != NULL_TREE);
3228 arg = DECL_ARGUMENTS (child_fn);
3230 if (!gimple_in_ssa_p (cfun))
3232 if (GIMPLE_STMT_OPERAND (parcopy_stmt, 0) == arg)
3233 bsi_remove (&si, true);
3234 else
3235 GIMPLE_STMT_OPERAND (parcopy_stmt, 1) = arg;
3237 else
3239 /* If we are in ssa form, we must load the value from the default
3240 definition of the argument. That should not be defined now,
3241 since the argument is not used uninitialized. */
3242 gcc_assert (gimple_default_def (cfun, arg) == NULL);
3243 narg = make_ssa_name (arg, build_empty_stmt ());
3244 set_default_def (arg, narg);
3245 GIMPLE_STMT_OPERAND (parcopy_stmt, 1) = narg;
3246 update_stmt (parcopy_stmt);
3250 /* Declare local variables needed in CHILD_CFUN. */
3251 block = DECL_INITIAL (child_fn);
3252 BLOCK_VARS (block) = list2chain (child_cfun->local_decls);
3253 DECL_SAVED_TREE (child_fn) = bb_stmt_list (single_succ (entry_bb));
3254 TREE_USED (block) = 1;
3256 /* Reset DECL_CONTEXT on function arguments. */
3257 for (t = DECL_ARGUMENTS (child_fn); t; t = TREE_CHAIN (t))
3258 DECL_CONTEXT (t) = child_fn;
3260 /* Split ENTRY_BB at OMP_PARALLEL or OMP_TASK, so that it can be
3261 moved to the child function. */
3262 si = bsi_last (entry_bb);
3263 t = bsi_stmt (si);
3264 gcc_assert (t && (TREE_CODE (t) == OMP_PARALLEL
3265 || TREE_CODE (t) == OMP_TASK));
3266 bsi_remove (&si, true);
3267 e = split_block (entry_bb, t);
3268 entry_bb = e->dest;
3269 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
3271 /* Convert OMP_RETURN into a RETURN_EXPR. */
3272 if (exit_bb)
3274 si = bsi_last (exit_bb);
3275 gcc_assert (!bsi_end_p (si)
3276 && TREE_CODE (bsi_stmt (si)) == OMP_RETURN);
3277 t = build1 (RETURN_EXPR, void_type_node, NULL);
3278 bsi_insert_after (&si, t, BSI_SAME_STMT);
3279 bsi_remove (&si, true);
3282 /* Move the parallel region into CHILD_CFUN. */
3284 if (gimple_in_ssa_p (cfun))
3286 push_cfun (child_cfun);
3287 init_tree_ssa (child_cfun);
3288 init_ssa_operands ();
3289 cfun->gimple_df->in_ssa_p = true;
3290 pop_cfun ();
3291 block = NULL_TREE;
3293 else
3294 block = TREE_BLOCK (entry_stmt);
3296 new_bb = move_sese_region_to_fn (child_cfun, entry_bb, exit_bb, block);
3297 if (exit_bb)
3298 single_succ_edge (new_bb)->flags = EDGE_FALLTHRU;
3300 /* Remove non-local VAR_DECLs from child_cfun->local_decls list. */
3301 for (tp = &child_cfun->local_decls; *tp; )
3302 if (DECL_CONTEXT (TREE_VALUE (*tp)) != cfun->decl)
3303 tp = &TREE_CHAIN (*tp);
3304 else
3305 *tp = TREE_CHAIN (*tp);
3307 /* Inform the callgraph about the new function. */
3308 DECL_STRUCT_FUNCTION (child_fn)->curr_properties
3309 = cfun->curr_properties;
3310 cgraph_add_new_function (child_fn, true);
3312 /* Fix the callgraph edges for child_cfun. Those for cfun will be
3313 fixed in a following pass. */
3314 push_cfun (child_cfun);
3315 if (optimize)
3316 optimize_omp_library_calls (entry_stmt);
3317 rebuild_cgraph_edges ();
3319 /* Some EH regions might become dead, see PR34608. If
3320 pass_cleanup_cfg isn't the first pass to happen with the
3321 new child, these dead EH edges might cause problems.
3322 Clean them up now. */
3323 if (flag_exceptions)
3325 basic_block bb;
3326 tree save_current = current_function_decl;
3327 bool changed = false;
3329 current_function_decl = child_fn;
3330 FOR_EACH_BB (bb)
3331 changed |= tree_purge_dead_eh_edges (bb);
3332 if (changed)
3333 cleanup_tree_cfg ();
3334 current_function_decl = save_current;
3336 pop_cfun ();
3339 /* Emit a library call to launch the children threads. */
3340 if (TREE_CODE (entry_stmt) == OMP_PARALLEL)
3341 expand_parallel_call (region, new_bb, entry_stmt, ws_args);
3342 else
3343 expand_task_call (new_bb, entry_stmt);
3344 update_ssa (TODO_update_ssa_only_virtuals);
3348 /* A subroutine of expand_omp_for. Generate code for a parallel
3349 loop with any schedule. Given parameters:
3351 for (V = N1; V cond N2; V += STEP) BODY;
3353 where COND is "<" or ">", we generate pseudocode
3355 more = GOMP_loop_foo_start (N1, N2, STEP, CHUNK, &istart0, &iend0);
3356 if (more) goto L0; else goto L3;
3358 V = istart0;
3359 iend = iend0;
3361 BODY;
3362 V += STEP;
3363 if (V cond iend) goto L1; else goto L2;
3365 if (GOMP_loop_foo_next (&istart0, &iend0)) goto L0; else goto L3;
3368 If this is a combined omp parallel loop, instead of the call to
3369 GOMP_loop_foo_start, we call GOMP_loop_foo_next.
3371 For collapsed loops, given parameters:
3372 collapse(3)
3373 for (V1 = N11; V1 cond1 N12; V1 += STEP1)
3374 for (V2 = N21; V2 cond2 N22; V2 += STEP2)
3375 for (V3 = N31; V3 cond3 N32; V3 += STEP3)
3376 BODY;
3378 we generate pseudocode
3380 if (cond3 is <)
3381 adj = STEP3 - 1;
3382 else
3383 adj = STEP3 + 1;
3384 count3 = (adj + N32 - N31) / STEP3;
3385 if (cond2 is <)
3386 adj = STEP2 - 1;
3387 else
3388 adj = STEP2 + 1;
3389 count2 = (adj + N22 - N21) / STEP2;
3390 if (cond1 is <)
3391 adj = STEP1 - 1;
3392 else
3393 adj = STEP1 + 1;
3394 count1 = (adj + N12 - N11) / STEP1;
3395 count = count1 * count2 * count3;
3396 more = GOMP_loop_foo_start (0, count, 1, CHUNK, &istart0, &iend0);
3397 if (more) goto L0; else goto L3;
3399 V = istart0;
3400 T = V;
3401 V3 = N31 + (T % count3) * STEP3;
3402 T = T / count3;
3403 V2 = N21 + (T % count2) * STEP2;
3404 T = T / count2;
3405 V1 = N11 + T * STEP1;
3406 iend = iend0;
3408 BODY;
3409 V += 1;
3410 if (V < iend) goto L10; else goto L2;
3411 L10:
3412 V3 += STEP3;
3413 if (V3 cond3 N32) goto L1; else goto L11;
3414 L11:
3415 V3 = N31;
3416 V2 += STEP2;
3417 if (V2 cond2 N22) goto L1; else goto L12;
3418 L12:
3419 V2 = N21;
3420 V1 += STEP1;
3421 goto L1;
3423 if (GOMP_loop_foo_next (&istart0, &iend0)) goto L0; else goto L3;
3428 static void
3429 expand_omp_for_generic (struct omp_region *region,
3430 struct omp_for_data *fd,
3431 enum built_in_function start_fn,
3432 enum built_in_function next_fn)
3434 tree type, istart0, iend0, iend, phi;
3435 tree t, vmain, vback, bias = NULL_TREE;
3436 basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb, collapse_bb;
3437 basic_block l2_bb = NULL, l3_bb = NULL;
3438 block_stmt_iterator si;
3439 bool in_combined_parallel = is_combined_parallel (region);
3440 bool broken_loop = region->cont == NULL;
3441 edge e, ne;
3442 tree *counts = NULL;
3443 int i;
3445 gcc_assert (!broken_loop || !in_combined_parallel);
3446 gcc_assert (fd->iter_type == long_integer_type_node
3447 || !in_combined_parallel);
3449 type = TREE_TYPE (fd->loop.v);
3450 istart0 = create_tmp_var (fd->iter_type, ".istart0");
3451 iend0 = create_tmp_var (fd->iter_type, ".iend0");
3452 TREE_ADDRESSABLE (istart0) = 1;
3453 TREE_ADDRESSABLE (iend0) = 1;
3454 if (gimple_in_ssa_p (cfun))
3456 add_referenced_var (istart0);
3457 add_referenced_var (iend0);
3460 /* See if we need to bias by LLONG_MIN. */
3461 if (fd->iter_type == long_long_unsigned_type_node
3462 && TREE_CODE (type) == INTEGER_TYPE
3463 && !TYPE_UNSIGNED (type))
3465 tree n1, n2;
3467 if (fd->loop.cond_code == LT_EXPR)
3469 n1 = fd->loop.n1;
3470 n2 = fold_build2 (PLUS_EXPR, type, fd->loop.n2, fd->loop.step);
3472 else
3474 n1 = fold_build2 (MINUS_EXPR, type, fd->loop.n2, fd->loop.step);
3475 n2 = fd->loop.n1;
3477 if (TREE_CODE (n1) != INTEGER_CST
3478 || TREE_CODE (n2) != INTEGER_CST
3479 || ((tree_int_cst_sgn (n1) < 0) ^ (tree_int_cst_sgn (n2) < 0)))
3480 bias = fold_convert (fd->iter_type, TYPE_MIN_VALUE (type));
3483 entry_bb = region->entry;
3484 cont_bb = region->cont;
3485 collapse_bb = NULL;
3486 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
3487 gcc_assert (broken_loop
3488 || BRANCH_EDGE (entry_bb)->dest == FALLTHRU_EDGE (cont_bb)->dest);
3489 l0_bb = split_edge (FALLTHRU_EDGE (entry_bb));
3490 l1_bb = single_succ (l0_bb);
3491 if (!broken_loop)
3493 l2_bb = create_empty_bb (cont_bb);
3494 gcc_assert (BRANCH_EDGE (cont_bb)->dest == l1_bb);
3495 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
3497 else
3498 l2_bb = NULL;
3499 l3_bb = BRANCH_EDGE (entry_bb)->dest;
3500 exit_bb = region->exit;
3502 si = bsi_last (entry_bb);
3504 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_FOR);
3505 if (fd->collapse > 1)
3507 /* collapsed loops need work for expansion in SSA form. */
3508 gcc_assert (!gimple_in_ssa_p (cfun));
3509 counts = (tree *) alloca (fd->collapse * sizeof (tree));
3510 for (i = 0; i < fd->collapse; i++)
3512 tree itype = TREE_TYPE (fd->loops[i].v);
3514 if (POINTER_TYPE_P (itype))
3515 itype = lang_hooks.types.type_for_size (TYPE_PRECISION (itype), 0);
3516 t = build_int_cst (itype, (fd->loops[i].cond_code == LT_EXPR
3517 ? -1 : 1));
3518 t = fold_build2 (PLUS_EXPR, itype,
3519 fold_convert (itype, fd->loops[i].step), t);
3520 t = fold_build2 (PLUS_EXPR, itype, t,
3521 fold_convert (itype, fd->loops[i].n2));
3522 t = fold_build2 (MINUS_EXPR, itype, t,
3523 fold_convert (itype, fd->loops[i].n1));
3524 if (TYPE_UNSIGNED (itype) && fd->loops[i].cond_code == GT_EXPR)
3525 t = fold_build2 (TRUNC_DIV_EXPR, itype,
3526 fold_build1 (NEGATE_EXPR, itype, t),
3527 fold_build1 (NEGATE_EXPR, itype,
3528 fold_convert (itype,
3529 fd->loops[i].step)));
3530 else
3531 t = fold_build2 (TRUNC_DIV_EXPR, itype, t,
3532 fold_convert (itype, fd->loops[i].step));
3533 t = fold_convert (type, t);
3534 if (TREE_CODE (t) == INTEGER_CST)
3535 counts[i] = t;
3536 else
3538 counts[i] = create_tmp_var (type, ".count");
3539 t = build_gimple_modify_stmt (counts[i], t);
3540 force_gimple_operand_bsi (&si, t, true, NULL_TREE,
3541 true, BSI_SAME_STMT);
3543 if (SSA_VAR_P (fd->loop.n2))
3545 if (i == 0)
3546 t = build_gimple_modify_stmt (fd->loop.n2, counts[0]);
3547 else
3549 t = fold_build2 (MULT_EXPR, type, fd->loop.n2, counts[i]);
3550 t = build_gimple_modify_stmt (fd->loop.n2, t);
3552 force_gimple_operand_bsi (&si, t, true, NULL_TREE,
3553 true, BSI_SAME_STMT);
3557 if (in_combined_parallel)
3559 /* In a combined parallel loop, emit a call to
3560 GOMP_loop_foo_next. */
3561 t = build_call_expr (built_in_decls[next_fn], 2,
3562 build_fold_addr_expr (istart0),
3563 build_fold_addr_expr (iend0));
3565 else
3567 tree t0, t1, t2, t3, t4;
3568 /* If this is not a combined parallel loop, emit a call to
3569 GOMP_loop_foo_start in ENTRY_BB. */
3570 t4 = build_fold_addr_expr (iend0);
3571 t3 = build_fold_addr_expr (istart0);
3572 t2 = fold_convert (fd->iter_type, fd->loop.step);
3573 t1 = fold_convert (fd->iter_type, fd->loop.n2);
3574 t0 = fold_convert (fd->iter_type, fd->loop.n1);
3575 if (bias)
3577 t1 = fold_build2 (PLUS_EXPR, fd->iter_type, t1, bias);
3578 t0 = fold_build2 (PLUS_EXPR, fd->iter_type, t0, bias);
3580 if (fd->iter_type == long_integer_type_node)
3582 if (fd->chunk_size)
3584 t = fold_convert (fd->iter_type, fd->chunk_size);
3585 t = build_call_expr (built_in_decls[start_fn], 6,
3586 t0, t1, t2, t, t3, t4);
3588 else
3589 t = build_call_expr (built_in_decls[start_fn], 5,
3590 t0, t1, t2, t3, t4);
3592 else
3594 tree t5;
3595 tree c_bool_type;
3597 /* The GOMP_loop_ull_*start functions have additional boolean
3598 argument, true for < loops and false for > loops.
3599 In Fortran, the C bool type can be different from
3600 boolean_type_node. */
3601 c_bool_type = TREE_TYPE (TREE_TYPE (built_in_decls[start_fn]));
3602 t5 = build_int_cst (c_bool_type,
3603 fd->loop.cond_code == LT_EXPR ? 1 : 0);
3604 if (fd->chunk_size)
3606 t = fold_convert (fd->iter_type, fd->chunk_size);
3607 t = build_call_expr (built_in_decls[start_fn], 7,
3608 t5, t0, t1, t2, t, t3, t4);
3610 else
3611 t = build_call_expr (built_in_decls[start_fn], 6,
3612 t5, t0, t1, t2, t3, t4);
3615 if (TREE_TYPE (t) != boolean_type_node)
3616 t = fold_build2 (NE_EXPR, boolean_type_node,
3617 t, build_int_cst (TREE_TYPE (t), 0));
3618 t = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
3619 true, BSI_SAME_STMT);
3620 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
3621 bsi_insert_after (&si, t, BSI_SAME_STMT);
3623 /* Remove the OMP_FOR statement. */
3624 bsi_remove (&si, true);
3626 /* Iteration setup for sequential loop goes in L0_BB. */
3627 si = bsi_start (l0_bb);
3628 if (bias)
3629 t = fold_convert (type, fold_build2 (MINUS_EXPR, fd->iter_type,
3630 istart0, bias));
3631 else
3632 t = fold_convert (type, istart0);
3633 t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
3634 false, BSI_CONTINUE_LINKING);
3635 t = build_gimple_modify_stmt (fd->loop.v, t);
3636 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
3637 if (gimple_in_ssa_p (cfun))
3638 SSA_NAME_DEF_STMT (fd->loop.v) = t;
3640 if (bias)
3641 t = fold_convert (type, fold_build2 (MINUS_EXPR, fd->iter_type,
3642 iend0, bias));
3643 else
3644 t = fold_convert (type, iend0);
3645 iend = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
3646 false, BSI_CONTINUE_LINKING);
3647 if (fd->collapse > 1)
3649 tree tem = create_tmp_var (type, ".tem");
3651 t = build_gimple_modify_stmt (tem, fd->loop.v);
3652 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
3653 for (i = fd->collapse - 1; i >= 0; i--)
3655 tree vtype = TREE_TYPE (fd->loops[i].v), itype;
3656 itype = vtype;
3657 if (POINTER_TYPE_P (vtype))
3658 itype = lang_hooks.types.type_for_size (TYPE_PRECISION (vtype), 0);
3659 t = fold_build2 (TRUNC_MOD_EXPR, type, tem, counts[i]);
3660 t = fold_convert (itype, t);
3661 t = fold_build2 (MULT_EXPR, itype, t, fd->loops[i].step);
3662 if (POINTER_TYPE_P (vtype))
3663 t = fold_build2 (POINTER_PLUS_EXPR, vtype,
3664 fd->loops[i].n1, fold_convert (sizetype, t));
3665 else
3666 t = fold_build2 (PLUS_EXPR, itype, fd->loops[i].n1, t);
3667 t = build_gimple_modify_stmt (fd->loops[i].v, t);
3668 force_gimple_operand_bsi (&si, t, true, NULL_TREE,
3669 false, BSI_CONTINUE_LINKING);
3670 if (i != 0)
3672 t = fold_build2 (TRUNC_DIV_EXPR, type, tem, counts[i]);
3673 t = build_gimple_modify_stmt (tem, t);
3674 force_gimple_operand_bsi (&si, t, true, NULL_TREE,
3675 false, BSI_CONTINUE_LINKING);
3680 if (!broken_loop)
3682 /* Code to control the increment and predicate for the sequential
3683 loop goes in the CONT_BB. */
3684 si = bsi_last (cont_bb);
3685 t = bsi_stmt (si);
3686 gcc_assert (TREE_CODE (t) == OMP_CONTINUE);
3687 vmain = TREE_OPERAND (t, 1);
3688 vback = TREE_OPERAND (t, 0);
3690 if (POINTER_TYPE_P (type))
3691 t = fold_build2 (POINTER_PLUS_EXPR, type, vmain,
3692 fold_convert (sizetype, fd->loop.step));
3693 else
3694 t = fold_build2 (PLUS_EXPR, type, vmain, fd->loop.step);
3695 t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
3696 true, BSI_SAME_STMT);
3697 t = build_gimple_modify_stmt (vback, t);
3698 bsi_insert_before (&si, t, BSI_SAME_STMT);
3699 if (gimple_in_ssa_p (cfun))
3700 SSA_NAME_DEF_STMT (vback) = t;
3702 t = build2 (fd->loop.cond_code, boolean_type_node, vback, iend);
3703 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
3704 bsi_insert_before (&si, t, BSI_SAME_STMT);
3706 /* Remove OMP_CONTINUE. */
3707 bsi_remove (&si, true);
3709 if (fd->collapse > 1)
3711 basic_block last_bb, bb;
3713 last_bb = cont_bb;
3714 for (i = fd->collapse - 1; i >= 0; i--)
3716 tree vtype = TREE_TYPE (fd->loops[i].v);
3718 bb = create_empty_bb (last_bb);
3719 si = bsi_start (bb);
3721 if (i < fd->collapse - 1)
3723 e = make_edge (last_bb, bb, EDGE_FALSE_VALUE);
3724 e->probability = REG_BR_PROB_BASE / 8;
3726 t = build_gimple_modify_stmt (fd->loops[i + 1].v,
3727 fd->loops[i + 1].n1);
3728 force_gimple_operand_bsi (&si, t, true, NULL_TREE,
3729 false, BSI_CONTINUE_LINKING);
3731 else
3732 collapse_bb = bb;
3734 set_immediate_dominator (CDI_DOMINATORS, bb, last_bb);
3736 if (POINTER_TYPE_P (vtype))
3737 t = fold_build2 (POINTER_PLUS_EXPR, vtype,
3738 fd->loops[i].v,
3739 fold_convert (sizetype, fd->loops[i].step));
3740 else
3741 t = fold_build2 (PLUS_EXPR, vtype, fd->loops[i].v,
3742 fd->loops[i].step);
3743 t = build_gimple_modify_stmt (fd->loops[i].v, t);
3744 force_gimple_operand_bsi (&si, t, true, NULL_TREE,
3745 false, BSI_CONTINUE_LINKING);
3747 if (i > 0)
3749 t = fold_build2 (fd->loops[i].cond_code, boolean_type_node,
3750 fd->loops[i].v, fd->loops[i].n2);
3751 t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
3752 false, BSI_CONTINUE_LINKING);
3753 t = build3 (COND_EXPR, void_type_node, t,
3754 NULL_TREE, NULL_TREE);
3755 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
3756 e = make_edge (bb, l1_bb, EDGE_TRUE_VALUE);
3757 e->probability = REG_BR_PROB_BASE * 7 / 8;
3759 else
3760 make_edge (bb, l1_bb, EDGE_FALLTHRU);
3761 last_bb = bb;
3765 /* Emit code to get the next parallel iteration in L2_BB. */
3766 si = bsi_start (l2_bb);
3768 t = build_call_expr (built_in_decls[next_fn], 2,
3769 build_fold_addr_expr (istart0),
3770 build_fold_addr_expr (iend0));
3771 if (TREE_TYPE (t) != boolean_type_node)
3772 t = fold_build2 (NE_EXPR, boolean_type_node,
3773 t, build_int_cst (TREE_TYPE (t), 0));
3774 t = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
3775 false, BSI_CONTINUE_LINKING);
3776 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
3777 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
3780 /* Add the loop cleanup function. */
3781 si = bsi_last (exit_bb);
3782 if (OMP_RETURN_NOWAIT (bsi_stmt (si)))
3783 t = built_in_decls[BUILT_IN_GOMP_LOOP_END_NOWAIT];
3784 else
3785 t = built_in_decls[BUILT_IN_GOMP_LOOP_END];
3786 t = build_call_expr (t, 0);
3787 bsi_insert_after (&si, t, BSI_SAME_STMT);
3788 bsi_remove (&si, true);
3790 /* Connect the new blocks. */
3791 find_edge (entry_bb, l0_bb)->flags = EDGE_TRUE_VALUE;
3792 find_edge (entry_bb, l3_bb)->flags = EDGE_FALSE_VALUE;
3794 if (!broken_loop)
3796 e = find_edge (cont_bb, l3_bb);
3797 ne = make_edge (l2_bb, l3_bb, EDGE_FALSE_VALUE);
3799 for (phi = phi_nodes (l3_bb); phi; phi = PHI_CHAIN (phi))
3800 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, ne),
3801 PHI_ARG_DEF_FROM_EDGE (phi, e));
3802 remove_edge (e);
3804 make_edge (cont_bb, l2_bb, EDGE_FALSE_VALUE);
3805 if (fd->collapse > 1)
3807 e = find_edge (cont_bb, l1_bb);
3808 remove_edge (e);
3809 e = make_edge (cont_bb, collapse_bb, EDGE_TRUE_VALUE);
3811 else
3813 e = find_edge (cont_bb, l1_bb);
3814 e->flags = EDGE_TRUE_VALUE;
3816 e->probability = REG_BR_PROB_BASE * 7 / 8;
3817 find_edge (cont_bb, l2_bb)->probability = REG_BR_PROB_BASE / 8;
3818 make_edge (l2_bb, l0_bb, EDGE_TRUE_VALUE);
3820 set_immediate_dominator (CDI_DOMINATORS, l2_bb,
3821 recompute_dominator (CDI_DOMINATORS, l2_bb));
3822 set_immediate_dominator (CDI_DOMINATORS, l3_bb,
3823 recompute_dominator (CDI_DOMINATORS, l3_bb));
3824 set_immediate_dominator (CDI_DOMINATORS, l0_bb,
3825 recompute_dominator (CDI_DOMINATORS, l0_bb));
3826 set_immediate_dominator (CDI_DOMINATORS, l1_bb,
3827 recompute_dominator (CDI_DOMINATORS, l1_bb));
3832 /* A subroutine of expand_omp_for. Generate code for a parallel
3833 loop with static schedule and no specified chunk size. Given
3834 parameters:
3836 for (V = N1; V cond N2; V += STEP) BODY;
3838 where COND is "<" or ">", we generate pseudocode
3840 if (cond is <)
3841 adj = STEP - 1;
3842 else
3843 adj = STEP + 1;
3844 if ((__typeof (V)) -1 > 0 && cond is >)
3845 n = -(adj + N2 - N1) / -STEP;
3846 else
3847 n = (adj + N2 - N1) / STEP;
3848 q = n / nthreads;
3849 q += (q * nthreads != n);
3850 s0 = q * threadid;
3851 e0 = min(s0 + q, n);
3852 V = s0 * STEP + N1;
3853 if (s0 >= e0) goto L2; else goto L0;
3855 e = e0 * STEP + N1;
3857 BODY;
3858 V += STEP;
3859 if (V cond e) goto L1;
3863 static void
3864 expand_omp_for_static_nochunk (struct omp_region *region,
3865 struct omp_for_data *fd)
3867 tree n, q, s0, e0, e, t, nthreads, threadid;
3868 tree type, itype, vmain, vback;
3869 basic_block entry_bb, exit_bb, seq_start_bb, body_bb, cont_bb;
3870 basic_block fin_bb;
3871 block_stmt_iterator si;
3873 itype = type = TREE_TYPE (fd->loop.v);
3874 if (POINTER_TYPE_P (type))
3875 itype = lang_hooks.types.type_for_size (TYPE_PRECISION (type), 0);
3877 entry_bb = region->entry;
3878 cont_bb = region->cont;
3879 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
3880 gcc_assert (BRANCH_EDGE (entry_bb)->dest == FALLTHRU_EDGE (cont_bb)->dest);
3881 seq_start_bb = split_edge (FALLTHRU_EDGE (entry_bb));
3882 body_bb = single_succ (seq_start_bb);
3883 gcc_assert (BRANCH_EDGE (cont_bb)->dest == body_bb);
3884 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
3885 fin_bb = FALLTHRU_EDGE (cont_bb)->dest;
3886 exit_bb = region->exit;
3888 /* Iteration space partitioning goes in ENTRY_BB. */
3889 si = bsi_last (entry_bb);
3890 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_FOR);
3892 t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_NUM_THREADS], 0);
3893 t = fold_convert (itype, t);
3894 nthreads = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
3895 true, BSI_SAME_STMT);
3897 t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
3898 t = fold_convert (itype, t);
3899 threadid = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
3900 true, BSI_SAME_STMT);
3902 fd->loop.n1
3903 = force_gimple_operand_bsi (&si, fold_convert (type, fd->loop.n1),
3904 true, NULL_TREE, true, BSI_SAME_STMT);
3905 fd->loop.n2
3906 = force_gimple_operand_bsi (&si, fold_convert (itype, fd->loop.n2),
3907 true, NULL_TREE, true, BSI_SAME_STMT);
3908 fd->loop.step
3909 = force_gimple_operand_bsi (&si, fold_convert (itype, fd->loop.step),
3910 true, NULL_TREE, true, BSI_SAME_STMT);
3912 t = build_int_cst (itype, (fd->loop.cond_code == LT_EXPR ? -1 : 1));
3913 t = fold_build2 (PLUS_EXPR, itype, fd->loop.step, t);
3914 t = fold_build2 (PLUS_EXPR, itype, t, fd->loop.n2);
3915 t = fold_build2 (MINUS_EXPR, itype, t, fold_convert (itype, fd->loop.n1));
3916 if (TYPE_UNSIGNED (itype) && fd->loop.cond_code == GT_EXPR)
3917 t = fold_build2 (TRUNC_DIV_EXPR, itype,
3918 fold_build1 (NEGATE_EXPR, itype, t),
3919 fold_build1 (NEGATE_EXPR, itype, fd->loop.step));
3920 else
3921 t = fold_build2 (TRUNC_DIV_EXPR, itype, t, fd->loop.step);
3922 t = fold_convert (itype, t);
3923 n = force_gimple_operand_bsi (&si, t, true, NULL_TREE, true, BSI_SAME_STMT);
3925 t = fold_build2 (TRUNC_DIV_EXPR, itype, n, nthreads);
3926 q = force_gimple_operand_bsi (&si, t, true, NULL_TREE, true, BSI_SAME_STMT);
3928 t = fold_build2 (MULT_EXPR, itype, q, nthreads);
3929 t = fold_build2 (NE_EXPR, itype, t, n);
3930 t = fold_build2 (PLUS_EXPR, itype, q, t);
3931 q = force_gimple_operand_bsi (&si, t, true, NULL_TREE, true, BSI_SAME_STMT);
3933 t = build2 (MULT_EXPR, itype, q, threadid);
3934 s0 = force_gimple_operand_bsi (&si, t, true, NULL_TREE, true, BSI_SAME_STMT);
3936 t = fold_build2 (PLUS_EXPR, itype, s0, q);
3937 t = fold_build2 (MIN_EXPR, itype, t, n);
3938 e0 = force_gimple_operand_bsi (&si, t, true, NULL_TREE, true, BSI_SAME_STMT);
3940 t = build2 (GE_EXPR, boolean_type_node, s0, e0);
3941 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
3942 bsi_insert_before (&si, t, BSI_SAME_STMT);
3944 /* Remove the OMP_FOR statement. */
3945 bsi_remove (&si, true);
3947 /* Setup code for sequential iteration goes in SEQ_START_BB. */
3948 si = bsi_start (seq_start_bb);
3950 t = fold_convert (itype, s0);
3951 t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
3952 if (POINTER_TYPE_P (type))
3953 t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
3954 fold_convert (sizetype, t));
3955 else
3956 t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
3957 t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
3958 false, BSI_CONTINUE_LINKING);
3959 t = build_gimple_modify_stmt (fd->loop.v, t);
3960 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
3961 if (gimple_in_ssa_p (cfun))
3962 SSA_NAME_DEF_STMT (fd->loop.v) = t;
3964 t = fold_convert (itype, e0);
3965 t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
3966 if (POINTER_TYPE_P (type))
3967 t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
3968 fold_convert (sizetype, t));
3969 else
3970 t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
3971 e = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
3972 false, BSI_CONTINUE_LINKING);
3974 /* The code controlling the sequential loop replaces the OMP_CONTINUE. */
3975 si = bsi_last (cont_bb);
3976 t = bsi_stmt (si);
3977 gcc_assert (TREE_CODE (t) == OMP_CONTINUE);
3978 vmain = TREE_OPERAND (t, 1);
3979 vback = TREE_OPERAND (t, 0);
3981 if (POINTER_TYPE_P (type))
3982 t = fold_build2 (POINTER_PLUS_EXPR, type, vmain,
3983 fold_convert (sizetype, fd->loop.step));
3984 else
3985 t = fold_build2 (PLUS_EXPR, type, vmain, fd->loop.step);
3986 t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
3987 true, BSI_SAME_STMT);
3988 t = build_gimple_modify_stmt (vback, t);
3989 bsi_insert_before (&si, t, BSI_SAME_STMT);
3990 if (gimple_in_ssa_p (cfun))
3991 SSA_NAME_DEF_STMT (vback) = t;
3993 t = build2 (fd->loop.cond_code, boolean_type_node, vback, e);
3994 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
3995 bsi_insert_before (&si, t, BSI_SAME_STMT);
3997 /* Remove the OMP_CONTINUE statement. */
3998 bsi_remove (&si, true);
4000 /* Replace the OMP_RETURN with a barrier, or nothing. */
4001 si = bsi_last (exit_bb);
4002 if (!OMP_RETURN_NOWAIT (bsi_stmt (si)))
4003 force_gimple_operand_bsi (&si, build_omp_barrier (), false, NULL_TREE,
4004 false, BSI_SAME_STMT);
4005 bsi_remove (&si, true);
4007 /* Connect all the blocks. */
4008 find_edge (entry_bb, seq_start_bb)->flags = EDGE_FALSE_VALUE;
4009 find_edge (entry_bb, fin_bb)->flags = EDGE_TRUE_VALUE;
4011 find_edge (cont_bb, body_bb)->flags = EDGE_TRUE_VALUE;
4012 find_edge (cont_bb, fin_bb)->flags = EDGE_FALSE_VALUE;
4014 set_immediate_dominator (CDI_DOMINATORS, seq_start_bb, entry_bb);
4015 set_immediate_dominator (CDI_DOMINATORS, body_bb,
4016 recompute_dominator (CDI_DOMINATORS, body_bb));
4017 set_immediate_dominator (CDI_DOMINATORS, fin_bb,
4018 recompute_dominator (CDI_DOMINATORS, fin_bb));
4022 /* A subroutine of expand_omp_for. Generate code for a parallel
4023 loop with static schedule and a specified chunk size. Given
4024 parameters:
4026 for (V = N1; V cond N2; V += STEP) BODY;
4028 where COND is "<" or ">", we generate pseudocode
4030 if (cond is <)
4031 adj = STEP - 1;
4032 else
4033 adj = STEP + 1;
4034 if ((__typeof (V)) -1 > 0 && cond is >)
4035 n = -(adj + N2 - N1) / -STEP;
4036 else
4037 n = (adj + N2 - N1) / STEP;
4038 trip = 0;
4039 V = threadid * CHUNK * STEP + N1; -- this extra definition of V is
4040 here so that V is defined
4041 if the loop is not entered
4043 s0 = (trip * nthreads + threadid) * CHUNK;
4044 e0 = min(s0 + CHUNK, n);
4045 if (s0 < n) goto L1; else goto L4;
4047 V = s0 * STEP + N1;
4048 e = e0 * STEP + N1;
4050 BODY;
4051 V += STEP;
4052 if (V cond e) goto L2; else goto L3;
4054 trip += 1;
4055 goto L0;
4059 static void
4060 expand_omp_for_static_chunk (struct omp_region *region,
4061 struct omp_for_data *fd)
4063 tree n, s0, e0, e, t, phi, nphi, args;
4064 tree trip_var, trip_init, trip_main, trip_back, nthreads, threadid;
4065 tree type, itype, cont, v_main, v_back, v_extra;
4066 basic_block entry_bb, exit_bb, body_bb, seq_start_bb, iter_part_bb;
4067 basic_block trip_update_bb, cont_bb, fin_bb;
4068 block_stmt_iterator si;
4069 edge se, re, ene;
4071 itype = type = TREE_TYPE (fd->loop.v);
4072 if (POINTER_TYPE_P (type))
4073 itype = lang_hooks.types.type_for_size (TYPE_PRECISION (type), 0);
4075 entry_bb = region->entry;
4076 se = split_block (entry_bb, last_stmt (entry_bb));
4077 entry_bb = se->src;
4078 iter_part_bb = se->dest;
4079 cont_bb = region->cont;
4080 gcc_assert (EDGE_COUNT (iter_part_bb->succs) == 2);
4081 gcc_assert (BRANCH_EDGE (iter_part_bb)->dest
4082 == FALLTHRU_EDGE (cont_bb)->dest);
4083 seq_start_bb = split_edge (FALLTHRU_EDGE (iter_part_bb));
4084 body_bb = single_succ (seq_start_bb);
4085 gcc_assert (BRANCH_EDGE (cont_bb)->dest == body_bb);
4086 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
4087 fin_bb = FALLTHRU_EDGE (cont_bb)->dest;
4088 trip_update_bb = split_edge (FALLTHRU_EDGE (cont_bb));
4089 exit_bb = region->exit;
4091 /* Trip and adjustment setup goes in ENTRY_BB. */
4092 si = bsi_last (entry_bb);
4093 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_FOR);
4095 t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_NUM_THREADS], 0);
4096 t = fold_convert (itype, t);
4097 nthreads = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
4098 true, BSI_SAME_STMT);
4100 t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
4101 t = fold_convert (itype, t);
4102 threadid = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
4103 true, BSI_SAME_STMT);
4105 fd->loop.n1
4106 = force_gimple_operand_bsi (&si, fold_convert (type, fd->loop.n1),
4107 true, NULL_TREE, true, BSI_SAME_STMT);
4108 fd->loop.n2
4109 = force_gimple_operand_bsi (&si, fold_convert (itype, fd->loop.n2),
4110 true, NULL_TREE, true, BSI_SAME_STMT);
4111 fd->loop.step
4112 = force_gimple_operand_bsi (&si, fold_convert (itype, fd->loop.step),
4113 true, NULL_TREE, true, BSI_SAME_STMT);
4114 fd->chunk_size
4115 = force_gimple_operand_bsi (&si, fold_convert (itype, fd->chunk_size),
4116 true, NULL_TREE, true, BSI_SAME_STMT);
4118 t = build_int_cst (itype, (fd->loop.cond_code == LT_EXPR ? -1 : 1));
4119 t = fold_build2 (PLUS_EXPR, itype, fd->loop.step, t);
4120 t = fold_build2 (PLUS_EXPR, itype, t, fd->loop.n2);
4121 t = fold_build2 (MINUS_EXPR, itype, t, fold_convert (itype, fd->loop.n1));
4122 if (TYPE_UNSIGNED (itype) && fd->loop.cond_code == GT_EXPR)
4123 t = fold_build2 (TRUNC_DIV_EXPR, itype,
4124 fold_build1 (NEGATE_EXPR, itype, t),
4125 fold_build1 (NEGATE_EXPR, itype, fd->loop.step));
4126 else
4127 t = fold_build2 (TRUNC_DIV_EXPR, itype, t, fd->loop.step);
4128 t = fold_convert (itype, t);
4129 n = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
4130 true, BSI_SAME_STMT);
4132 trip_var = create_tmp_var (itype, ".trip");
4133 if (gimple_in_ssa_p (cfun))
4135 add_referenced_var (trip_var);
4136 trip_init = make_ssa_name (trip_var, NULL_TREE);
4137 trip_main = make_ssa_name (trip_var, NULL_TREE);
4138 trip_back = make_ssa_name (trip_var, NULL_TREE);
4140 else
4142 trip_init = trip_var;
4143 trip_main = trip_var;
4144 trip_back = trip_var;
4147 t = build_gimple_modify_stmt (trip_init, build_int_cst (itype, 0));
4148 bsi_insert_before (&si, t, BSI_SAME_STMT);
4149 if (gimple_in_ssa_p (cfun))
4150 SSA_NAME_DEF_STMT (trip_init) = t;
4152 t = fold_build2 (MULT_EXPR, itype, threadid, fd->chunk_size);
4153 t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
4154 if (POINTER_TYPE_P (type))
4155 t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
4156 fold_convert (sizetype, t));
4157 else
4158 t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
4159 v_extra = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
4160 true, BSI_SAME_STMT);
4162 /* Remove the OMP_FOR. */
4163 bsi_remove (&si, true);
4165 /* Iteration space partitioning goes in ITER_PART_BB. */
4166 si = bsi_last (iter_part_bb);
4168 t = fold_build2 (MULT_EXPR, itype, trip_main, nthreads);
4169 t = fold_build2 (PLUS_EXPR, itype, t, threadid);
4170 t = fold_build2 (MULT_EXPR, itype, t, fd->chunk_size);
4171 s0 = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
4172 false, BSI_CONTINUE_LINKING);
4174 t = fold_build2 (PLUS_EXPR, itype, s0, fd->chunk_size);
4175 t = fold_build2 (MIN_EXPR, itype, t, n);
4176 e0 = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
4177 false, BSI_CONTINUE_LINKING);
4179 t = build2 (LT_EXPR, boolean_type_node, s0, n);
4180 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
4181 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
4183 /* Setup code for sequential iteration goes in SEQ_START_BB. */
4184 si = bsi_start (seq_start_bb);
4186 t = fold_convert (itype, s0);
4187 t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
4188 if (POINTER_TYPE_P (type))
4189 t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
4190 fold_convert (sizetype, t));
4191 else
4192 t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
4193 t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
4194 false, BSI_CONTINUE_LINKING);
4195 t = build_gimple_modify_stmt (fd->loop.v, t);
4196 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
4197 if (gimple_in_ssa_p (cfun))
4198 SSA_NAME_DEF_STMT (fd->loop.v) = t;
4200 t = fold_convert (itype, e0);
4201 t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
4202 if (POINTER_TYPE_P (type))
4203 t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
4204 fold_convert (sizetype, t));
4205 else
4206 t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
4207 e = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
4208 false, BSI_CONTINUE_LINKING);
4210 /* The code controlling the sequential loop goes in CONT_BB,
4211 replacing the OMP_CONTINUE. */
4212 si = bsi_last (cont_bb);
4213 cont = bsi_stmt (si);
4214 gcc_assert (TREE_CODE (cont) == OMP_CONTINUE);
4215 v_main = TREE_OPERAND (cont, 1);
4216 v_back = TREE_OPERAND (cont, 0);
4218 if (POINTER_TYPE_P (type))
4219 t = fold_build2 (POINTER_PLUS_EXPR, type, v_main,
4220 fold_convert (sizetype, fd->loop.step));
4221 else
4222 t = build2 (PLUS_EXPR, type, v_main, fd->loop.step);
4223 t = build_gimple_modify_stmt (v_back, t);
4224 bsi_insert_before (&si, t, BSI_SAME_STMT);
4225 if (gimple_in_ssa_p (cfun))
4226 SSA_NAME_DEF_STMT (v_back) = t;
4228 t = build2 (fd->loop.cond_code, boolean_type_node, v_back, e);
4229 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
4230 bsi_insert_before (&si, t, BSI_SAME_STMT);
4232 /* Remove OMP_CONTINUE. */
4233 bsi_remove (&si, true);
4235 /* Trip update code goes into TRIP_UPDATE_BB. */
4236 si = bsi_start (trip_update_bb);
4238 t = build_int_cst (itype, 1);
4239 t = build2 (PLUS_EXPR, itype, trip_main, t);
4240 t = build_gimple_modify_stmt (trip_back, t);
4241 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
4242 if (gimple_in_ssa_p (cfun))
4243 SSA_NAME_DEF_STMT (trip_back) = t;
4245 /* Replace the OMP_RETURN with a barrier, or nothing. */
4246 si = bsi_last (exit_bb);
4247 if (!OMP_RETURN_NOWAIT (bsi_stmt (si)))
4248 force_gimple_operand_bsi (&si, build_omp_barrier (), false, NULL_TREE,
4249 false, BSI_SAME_STMT);
4250 bsi_remove (&si, true);
4252 /* Connect the new blocks. */
4253 find_edge (iter_part_bb, seq_start_bb)->flags = EDGE_TRUE_VALUE;
4254 find_edge (iter_part_bb, fin_bb)->flags = EDGE_FALSE_VALUE;
4256 find_edge (cont_bb, body_bb)->flags = EDGE_TRUE_VALUE;
4257 find_edge (cont_bb, trip_update_bb)->flags = EDGE_FALSE_VALUE;
4259 redirect_edge_and_branch (single_succ_edge (trip_update_bb), iter_part_bb);
4261 if (gimple_in_ssa_p (cfun))
4263 /* When we redirect the edge from trip_update_bb to iter_part_bb, we
4264 remove arguments of the phi nodes in fin_bb. We need to create
4265 appropriate phi nodes in iter_part_bb instead. */
4266 se = single_pred_edge (fin_bb);
4267 re = single_succ_edge (trip_update_bb);
4268 ene = single_succ_edge (entry_bb);
4270 args = PENDING_STMT (re);
4271 PENDING_STMT (re) = NULL_TREE;
4272 for (phi = phi_nodes (fin_bb);
4273 phi && args;
4274 phi = PHI_CHAIN (phi), args = TREE_CHAIN (args))
4276 t = PHI_RESULT (phi);
4277 gcc_assert (t == TREE_PURPOSE (args));
4278 nphi = create_phi_node (t, iter_part_bb);
4279 SSA_NAME_DEF_STMT (t) = nphi;
4281 t = PHI_ARG_DEF_FROM_EDGE (phi, se);
4282 /* A special case -- fd->loop.v is not yet computed in
4283 iter_part_bb, we need to use v_extra instead. */
4284 if (t == fd->loop.v)
4285 t = v_extra;
4286 add_phi_arg (nphi, t, ene);
4287 add_phi_arg (nphi, TREE_VALUE (args), re);
4289 gcc_assert (!phi && !args);
4290 while ((phi = phi_nodes (fin_bb)) != NULL_TREE)
4291 remove_phi_node (phi, NULL_TREE, false);
4293 /* Make phi node for trip. */
4294 phi = create_phi_node (trip_main, iter_part_bb);
4295 SSA_NAME_DEF_STMT (trip_main) = phi;
4296 add_phi_arg (phi, trip_back, single_succ_edge (trip_update_bb));
4297 add_phi_arg (phi, trip_init, single_succ_edge (entry_bb));
4300 set_immediate_dominator (CDI_DOMINATORS, trip_update_bb, cont_bb);
4301 set_immediate_dominator (CDI_DOMINATORS, iter_part_bb,
4302 recompute_dominator (CDI_DOMINATORS, iter_part_bb));
4303 set_immediate_dominator (CDI_DOMINATORS, fin_bb,
4304 recompute_dominator (CDI_DOMINATORS, fin_bb));
4305 set_immediate_dominator (CDI_DOMINATORS, seq_start_bb,
4306 recompute_dominator (CDI_DOMINATORS, seq_start_bb));
4307 set_immediate_dominator (CDI_DOMINATORS, body_bb,
4308 recompute_dominator (CDI_DOMINATORS, body_bb));
4312 /* Expand the OpenMP loop defined by REGION. */
4314 static void
4315 expand_omp_for (struct omp_region *region)
4317 struct omp_for_data fd;
4318 struct omp_for_data_loop *loops;
4320 loops
4321 = (struct omp_for_data_loop *)
4322 alloca (TREE_VEC_LENGTH (OMP_FOR_INIT (last_stmt (region->entry)))
4323 * sizeof (struct omp_for_data_loop));
4325 extract_omp_for_data (last_stmt (region->entry), &fd, loops);
4326 region->sched_kind = fd.sched_kind;
4328 gcc_assert (EDGE_COUNT (region->entry->succs) == 2);
4329 BRANCH_EDGE (region->entry)->flags &= ~EDGE_ABNORMAL;
4330 FALLTHRU_EDGE (region->entry)->flags &= ~EDGE_ABNORMAL;
4331 if (region->cont)
4333 gcc_assert (EDGE_COUNT (region->cont->succs) == 2);
4334 BRANCH_EDGE (region->cont)->flags &= ~EDGE_ABNORMAL;
4335 FALLTHRU_EDGE (region->cont)->flags &= ~EDGE_ABNORMAL;
4338 if (fd.sched_kind == OMP_CLAUSE_SCHEDULE_STATIC
4339 && !fd.have_ordered
4340 && fd.collapse == 1
4341 && region->cont != NULL)
4343 if (fd.chunk_size == NULL)
4344 expand_omp_for_static_nochunk (region, &fd);
4345 else
4346 expand_omp_for_static_chunk (region, &fd);
4348 else
4350 int fn_index, start_ix, next_ix;
4352 gcc_assert (fd.sched_kind != OMP_CLAUSE_SCHEDULE_AUTO);
4353 fn_index = (fd.sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
4354 ? 3 : fd.sched_kind;
4355 fn_index += fd.have_ordered * 4;
4356 start_ix = BUILT_IN_GOMP_LOOP_STATIC_START + fn_index;
4357 next_ix = BUILT_IN_GOMP_LOOP_STATIC_NEXT + fn_index;
4358 if (fd.iter_type == long_long_unsigned_type_node)
4360 start_ix += BUILT_IN_GOMP_LOOP_ULL_STATIC_START
4361 - BUILT_IN_GOMP_LOOP_STATIC_START;
4362 next_ix += BUILT_IN_GOMP_LOOP_ULL_STATIC_NEXT
4363 - BUILT_IN_GOMP_LOOP_STATIC_NEXT;
4365 expand_omp_for_generic (region, &fd, start_ix, next_ix);
4368 update_ssa (TODO_update_ssa_only_virtuals);
4372 /* Expand code for an OpenMP sections directive. In pseudo code, we generate
4374 v = GOMP_sections_start (n);
4376 switch (v)
4378 case 0:
4379 goto L2;
4380 case 1:
4381 section 1;
4382 goto L1;
4383 case 2:
4385 case n:
4387 default:
4388 abort ();
4391 v = GOMP_sections_next ();
4392 goto L0;
4394 reduction;
4396 If this is a combined parallel sections, replace the call to
4397 GOMP_sections_start with call to GOMP_sections_next. */
4399 static void
4400 expand_omp_sections (struct omp_region *region)
4402 tree label_vec, l1, l2, t, u, sections_stmt, vin, vmain, vnext, cont;
4403 unsigned i, casei, len;
4404 basic_block entry_bb, l0_bb, l1_bb, l2_bb, default_bb;
4405 block_stmt_iterator si;
4406 edge_iterator ei;
4407 edge e;
4408 struct omp_region *inner;
4409 bool exit_reachable = region->cont != NULL;
4411 gcc_assert (exit_reachable == (region->exit != NULL));
4412 entry_bb = region->entry;
4413 l0_bb = single_succ (entry_bb);
4414 l1_bb = region->cont;
4415 l2_bb = region->exit;
4416 if (exit_reachable)
4418 if (single_pred (l2_bb) == l0_bb)
4419 l2 = tree_block_label (l2_bb);
4420 else
4422 /* This can happen if there are reductions. */
4423 len = EDGE_COUNT (l0_bb->succs);
4424 gcc_assert (len > 0);
4425 e = EDGE_SUCC (l0_bb, len - 1);
4426 si = bsi_last (e->dest);
4427 l2 = NULL_TREE;
4428 if (bsi_end_p (si) || TREE_CODE (bsi_stmt (si)) != OMP_SECTION)
4429 l2 = tree_block_label (e->dest);
4430 else
4431 FOR_EACH_EDGE (e, ei, l0_bb->succs)
4433 si = bsi_last (e->dest);
4434 if (bsi_end_p (si) || TREE_CODE (bsi_stmt (si)) != OMP_SECTION)
4436 l2 = tree_block_label (e->dest);
4437 break;
4441 default_bb = create_empty_bb (l1_bb->prev_bb);
4442 l1 = tree_block_label (l1_bb);
4444 else
4446 default_bb = create_empty_bb (l0_bb);
4447 l1 = NULL_TREE;
4448 l2 = tree_block_label (default_bb);
4451 /* We will build a switch() with enough cases for all the
4452 OMP_SECTION regions, a '0' case to handle the end of more work
4453 and a default case to abort if something goes wrong. */
4454 len = EDGE_COUNT (l0_bb->succs);
4455 label_vec = make_tree_vec (len + 1);
4457 /* The call to GOMP_sections_start goes in ENTRY_BB, replacing the
4458 OMP_SECTIONS statement. */
4459 si = bsi_last (entry_bb);
4460 sections_stmt = bsi_stmt (si);
4461 gcc_assert (TREE_CODE (sections_stmt) == OMP_SECTIONS);
4462 vin = OMP_SECTIONS_CONTROL (sections_stmt);
4463 if (!is_combined_parallel (region))
4465 /* If we are not inside a combined parallel+sections region,
4466 call GOMP_sections_start. */
4467 t = build_int_cst (unsigned_type_node,
4468 exit_reachable ? len - 1 : len);
4469 u = built_in_decls[BUILT_IN_GOMP_SECTIONS_START];
4470 t = build_call_expr (u, 1, t);
4472 else
4474 /* Otherwise, call GOMP_sections_next. */
4475 u = built_in_decls[BUILT_IN_GOMP_SECTIONS_NEXT];
4476 t = build_call_expr (u, 0);
4478 t = build_gimple_modify_stmt (vin, t);
4479 bsi_insert_after (&si, t, BSI_SAME_STMT);
4480 if (gimple_in_ssa_p (cfun))
4481 SSA_NAME_DEF_STMT (vin) = t;
4482 bsi_remove (&si, true);
4484 /* The switch() statement replacing OMP_SECTIONS_SWITCH goes in L0_BB. */
4485 si = bsi_last (l0_bb);
4486 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_SECTIONS_SWITCH);
4487 if (exit_reachable)
4489 cont = last_stmt (l1_bb);
4490 gcc_assert (TREE_CODE (cont) == OMP_CONTINUE);
4491 vmain = TREE_OPERAND (cont, 1);
4492 vnext = TREE_OPERAND (cont, 0);
4494 else
4496 vmain = vin;
4497 vnext = NULL_TREE;
4500 t = build3 (SWITCH_EXPR, void_type_node, vmain, NULL, label_vec);
4501 bsi_insert_after (&si, t, BSI_SAME_STMT);
4502 bsi_remove (&si, true);
4504 i = 0;
4505 if (exit_reachable)
4507 t = build3 (CASE_LABEL_EXPR, void_type_node,
4508 build_int_cst (unsigned_type_node, 0), NULL, l2);
4509 TREE_VEC_ELT (label_vec, 0) = t;
4510 i++;
4513 /* Convert each OMP_SECTION into a CASE_LABEL_EXPR. */
4514 for (inner = region->inner, casei = 1;
4515 inner;
4516 inner = inner->next, i++, casei++)
4518 basic_block s_entry_bb, s_exit_bb;
4520 /* Skip optional reduction region. */
4521 if (inner->type == OMP_ATOMIC_LOAD)
4523 --i;
4524 --casei;
4525 continue;
4528 s_entry_bb = inner->entry;
4529 s_exit_bb = inner->exit;
4531 t = tree_block_label (s_entry_bb);
4532 u = build_int_cst (unsigned_type_node, casei);
4533 u = build3 (CASE_LABEL_EXPR, void_type_node, u, NULL, t);
4534 TREE_VEC_ELT (label_vec, i) = u;
4536 si = bsi_last (s_entry_bb);
4537 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_SECTION);
4538 gcc_assert (i < len || OMP_SECTION_LAST (bsi_stmt (si)));
4539 bsi_remove (&si, true);
4540 single_succ_edge (s_entry_bb)->flags = EDGE_FALLTHRU;
4542 if (s_exit_bb == NULL)
4543 continue;
4545 si = bsi_last (s_exit_bb);
4546 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_RETURN);
4547 bsi_remove (&si, true);
4549 single_succ_edge (s_exit_bb)->flags = EDGE_FALLTHRU;
4552 /* Error handling code goes in DEFAULT_BB. */
4553 t = tree_block_label (default_bb);
4554 u = build3 (CASE_LABEL_EXPR, void_type_node, NULL, NULL, t);
4555 TREE_VEC_ELT (label_vec, len) = u;
4556 make_edge (l0_bb, default_bb, 0);
4558 si = bsi_start (default_bb);
4559 t = build_call_expr (built_in_decls[BUILT_IN_TRAP], 0);
4560 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
4562 if (exit_reachable)
4564 /* Code to get the next section goes in L1_BB. */
4565 si = bsi_last (l1_bb);
4566 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_CONTINUE);
4568 t = build_call_expr (built_in_decls[BUILT_IN_GOMP_SECTIONS_NEXT], 0);
4569 t = build_gimple_modify_stmt (vnext, t);
4570 bsi_insert_after (&si, t, BSI_SAME_STMT);
4571 if (gimple_in_ssa_p (cfun))
4572 SSA_NAME_DEF_STMT (vnext) = t;
4573 bsi_remove (&si, true);
4575 single_succ_edge (l1_bb)->flags = EDGE_FALLTHRU;
4577 /* Cleanup function replaces OMP_RETURN in EXIT_BB. */
4578 si = bsi_last (l2_bb);
4579 if (OMP_RETURN_NOWAIT (bsi_stmt (si)))
4580 t = built_in_decls[BUILT_IN_GOMP_SECTIONS_END_NOWAIT];
4581 else
4582 t = built_in_decls[BUILT_IN_GOMP_SECTIONS_END];
4583 t = build_call_expr (t, 0);
4584 bsi_insert_after (&si, t, BSI_SAME_STMT);
4585 bsi_remove (&si, true);
4588 set_immediate_dominator (CDI_DOMINATORS, default_bb, l0_bb);
4592 /* Expand code for an OpenMP single directive. We've already expanded
4593 much of the code, here we simply place the GOMP_barrier call. */
4595 static void
4596 expand_omp_single (struct omp_region *region)
4598 basic_block entry_bb, exit_bb;
4599 block_stmt_iterator si;
4600 bool need_barrier = false;
4602 entry_bb = region->entry;
4603 exit_bb = region->exit;
4605 si = bsi_last (entry_bb);
4606 /* The terminal barrier at the end of a GOMP_single_copy sequence cannot
4607 be removed. We need to ensure that the thread that entered the single
4608 does not exit before the data is copied out by the other threads. */
4609 if (find_omp_clause (OMP_SINGLE_CLAUSES (bsi_stmt (si)),
4610 OMP_CLAUSE_COPYPRIVATE))
4611 need_barrier = true;
4612 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_SINGLE);
4613 bsi_remove (&si, true);
4614 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
4616 si = bsi_last (exit_bb);
4617 if (!OMP_RETURN_NOWAIT (bsi_stmt (si)) || need_barrier)
4618 force_gimple_operand_bsi (&si, build_omp_barrier (), false, NULL_TREE,
4619 false, BSI_SAME_STMT);
4620 bsi_remove (&si, true);
4621 single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
4625 /* Generic expansion for OpenMP synchronization directives: master,
4626 ordered and critical. All we need to do here is remove the entry
4627 and exit markers for REGION. */
4629 static void
4630 expand_omp_synch (struct omp_region *region)
4632 basic_block entry_bb, exit_bb;
4633 block_stmt_iterator si;
4635 entry_bb = region->entry;
4636 exit_bb = region->exit;
4638 si = bsi_last (entry_bb);
4639 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_SINGLE
4640 || TREE_CODE (bsi_stmt (si)) == OMP_MASTER
4641 || TREE_CODE (bsi_stmt (si)) == OMP_ORDERED
4642 || TREE_CODE (bsi_stmt (si)) == OMP_CRITICAL);
4643 bsi_remove (&si, true);
4644 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
4646 if (exit_bb)
4648 si = bsi_last (exit_bb);
4649 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_RETURN);
4650 bsi_remove (&si, true);
4651 single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
4655 /* A subroutine of expand_omp_atomic. Attempt to implement the atomic
4656 operation as a __sync_fetch_and_op builtin. INDEX is log2 of the
4657 size of the data type, and thus usable to find the index of the builtin
4658 decl. Returns false if the expression is not of the proper form. */
4660 static bool
4661 expand_omp_atomic_fetch_op (basic_block load_bb,
4662 tree addr, tree loaded_val,
4663 tree stored_val, int index)
4665 enum built_in_function base;
4666 tree decl, itype, call;
4667 enum insn_code *optab;
4668 tree rhs;
4669 basic_block store_bb = single_succ (load_bb);
4670 block_stmt_iterator bsi;
4671 tree stmt;
4673 /* We expect to find the following sequences:
4675 load_bb:
4676 OMP_ATOMIC_LOAD (tmp, mem)
4678 store_bb:
4679 val = tmp OP something; (or: something OP tmp)
4680 OMP_STORE (val)
4682 ???FIXME: Allow a more flexible sequence.
4683 Perhaps use data flow to pick the statements.
4687 bsi = bsi_after_labels (store_bb);
4688 stmt = bsi_stmt (bsi);
4689 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
4690 return false;
4691 bsi_next (&bsi);
4692 if (TREE_CODE (bsi_stmt (bsi)) != OMP_ATOMIC_STORE)
4693 return false;
4695 if (!operand_equal_p (GIMPLE_STMT_OPERAND (stmt, 0), stored_val, 0))
4696 return false;
4698 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
4700 /* Check for one of the supported fetch-op operations. */
4701 switch (TREE_CODE (rhs))
4703 case PLUS_EXPR:
4704 case POINTER_PLUS_EXPR:
4705 base = BUILT_IN_FETCH_AND_ADD_N;
4706 optab = sync_add_optab;
4707 break;
4708 case MINUS_EXPR:
4709 base = BUILT_IN_FETCH_AND_SUB_N;
4710 optab = sync_add_optab;
4711 break;
4712 case BIT_AND_EXPR:
4713 base = BUILT_IN_FETCH_AND_AND_N;
4714 optab = sync_and_optab;
4715 break;
4716 case BIT_IOR_EXPR:
4717 base = BUILT_IN_FETCH_AND_OR_N;
4718 optab = sync_ior_optab;
4719 break;
4720 case BIT_XOR_EXPR:
4721 base = BUILT_IN_FETCH_AND_XOR_N;
4722 optab = sync_xor_optab;
4723 break;
4724 default:
4725 return false;
4727 /* Make sure the expression is of the proper form. */
4728 if (operand_equal_p (TREE_OPERAND (rhs, 0), loaded_val, 0))
4729 rhs = TREE_OPERAND (rhs, 1);
4730 else if (commutative_tree_code (TREE_CODE (rhs))
4731 && operand_equal_p (TREE_OPERAND (rhs, 1), loaded_val, 0))
4732 rhs = TREE_OPERAND (rhs, 0);
4733 else
4734 return false;
4736 decl = built_in_decls[base + index + 1];
4737 itype = TREE_TYPE (TREE_TYPE (decl));
4739 if (optab[TYPE_MODE (itype)] == CODE_FOR_nothing)
4740 return false;
4742 bsi = bsi_last (load_bb);
4743 gcc_assert (TREE_CODE (bsi_stmt (bsi)) == OMP_ATOMIC_LOAD);
4744 call = build_call_expr (decl, 2, addr, fold_convert (itype, rhs));
4745 force_gimple_operand_bsi (&bsi, call, true, NULL_TREE, true, BSI_SAME_STMT);
4746 bsi_remove (&bsi, true);
4748 bsi = bsi_last (store_bb);
4749 gcc_assert (TREE_CODE (bsi_stmt (bsi)) == OMP_ATOMIC_STORE);
4750 bsi_remove (&bsi, true);
4751 bsi = bsi_last (store_bb);
4752 bsi_remove (&bsi, true);
4754 if (gimple_in_ssa_p (cfun))
4755 update_ssa (TODO_update_ssa_no_phi);
4757 return true;
4760 /* A subroutine of expand_omp_atomic. Implement the atomic operation as:
4762 oldval = *addr;
4763 repeat:
4764 newval = rhs; // with oldval replacing *addr in rhs
4765 oldval = __sync_val_compare_and_swap (addr, oldval, newval);
4766 if (oldval != newval)
4767 goto repeat;
4769 INDEX is log2 of the size of the data type, and thus usable to find the
4770 index of the builtin decl. */
4772 static bool
4773 expand_omp_atomic_pipeline (basic_block load_bb, basic_block store_bb,
4774 tree addr, tree loaded_val, tree stored_val,
4775 int index)
4777 tree loadedi, storedi, initial, new_storedi, old_vali;
4778 tree type, itype, cmpxchg, iaddr;
4779 block_stmt_iterator bsi;
4780 basic_block loop_header = single_succ (load_bb);
4781 tree phi, x;
4782 edge e;
4784 cmpxchg = built_in_decls[BUILT_IN_VAL_COMPARE_AND_SWAP_N + index + 1];
4785 type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
4786 itype = TREE_TYPE (TREE_TYPE (cmpxchg));
4788 if (sync_compare_and_swap[TYPE_MODE (itype)] == CODE_FOR_nothing)
4789 return false;
4791 /* Load the initial value, replacing the OMP_ATOMIC_LOAD. */
4792 bsi = bsi_last (load_bb);
4793 gcc_assert (TREE_CODE (bsi_stmt (bsi)) == OMP_ATOMIC_LOAD);
4794 /* For floating-point values, we'll need to view-convert them to integers
4795 so that we can perform the atomic compare and swap. Simplify the
4796 following code by always setting up the "i"ntegral variables. */
4797 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
4799 iaddr = create_tmp_var (build_pointer_type (itype), NULL);
4800 x = build_gimple_modify_stmt (iaddr,
4801 fold_convert (TREE_TYPE (iaddr), addr));
4802 force_gimple_operand_bsi (&bsi, x, true, NULL_TREE,
4803 true, BSI_SAME_STMT);
4804 DECL_NO_TBAA_P (iaddr) = 1;
4805 DECL_POINTER_ALIAS_SET (iaddr) = 0;
4806 loadedi = create_tmp_var (itype, NULL);
4807 if (gimple_in_ssa_p (cfun))
4809 add_referenced_var (iaddr);
4810 add_referenced_var (loadedi);
4811 loadedi = make_ssa_name (loadedi, NULL);
4814 else
4816 iaddr = addr;
4817 loadedi = loaded_val;
4819 initial = force_gimple_operand_bsi (&bsi, build_fold_indirect_ref (iaddr),
4820 true, NULL_TREE, true, BSI_SAME_STMT);
4822 /* Move the value to the LOADEDI temporary. */
4823 if (gimple_in_ssa_p (cfun))
4825 gcc_assert (phi_nodes (loop_header) == NULL_TREE);
4826 phi = create_phi_node (loadedi, loop_header);
4827 SSA_NAME_DEF_STMT (loadedi) = phi;
4828 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, single_succ_edge (load_bb)),
4829 initial);
4831 else
4832 bsi_insert_before (&bsi,
4833 build_gimple_modify_stmt (loadedi, initial),
4834 BSI_SAME_STMT);
4835 if (loadedi != loaded_val)
4837 block_stmt_iterator bsi2;
4839 x = build1 (VIEW_CONVERT_EXPR, type, loadedi);
4840 bsi2 = bsi_start (loop_header);
4841 if (gimple_in_ssa_p (cfun))
4843 x = force_gimple_operand_bsi (&bsi2, x, true, NULL_TREE,
4844 true, BSI_SAME_STMT);
4845 x = build_gimple_modify_stmt (loaded_val, x);
4846 bsi_insert_before (&bsi2, x, BSI_SAME_STMT);
4847 SSA_NAME_DEF_STMT (loaded_val) = x;
4849 else
4851 x = build_gimple_modify_stmt (loaded_val, x);
4852 force_gimple_operand_bsi (&bsi2, x, true, NULL_TREE,
4853 true, BSI_SAME_STMT);
4856 bsi_remove (&bsi, true);
4858 bsi = bsi_last (store_bb);
4859 gcc_assert (TREE_CODE (bsi_stmt (bsi)) == OMP_ATOMIC_STORE);
4861 if (iaddr == addr)
4862 storedi = stored_val;
4863 else
4864 storedi =
4865 force_gimple_operand_bsi (&bsi,
4866 build1 (VIEW_CONVERT_EXPR, itype,
4867 stored_val), true, NULL_TREE, true,
4868 BSI_SAME_STMT);
4870 /* Build the compare&swap statement. */
4871 new_storedi = build_call_expr (cmpxchg, 3, iaddr, loadedi, storedi);
4872 new_storedi = force_gimple_operand_bsi (&bsi,
4873 fold_convert (itype, new_storedi),
4874 true, NULL_TREE,
4875 true, BSI_SAME_STMT);
4877 if (gimple_in_ssa_p (cfun))
4878 old_vali = loadedi;
4879 else
4881 old_vali = create_tmp_var (itype, NULL);
4882 if (gimple_in_ssa_p (cfun))
4883 add_referenced_var (old_vali);
4884 x = build_gimple_modify_stmt (old_vali, loadedi);
4885 force_gimple_operand_bsi (&bsi, x, true, NULL_TREE,
4886 true, BSI_SAME_STMT);
4888 x = build_gimple_modify_stmt (loadedi, new_storedi);
4889 force_gimple_operand_bsi (&bsi, x, true, NULL_TREE,
4890 true, BSI_SAME_STMT);
4893 /* Note that we always perform the comparison as an integer, even for
4894 floating point. This allows the atomic operation to properly
4895 succeed even with NaNs and -0.0. */
4896 x = build2 (NE_EXPR, boolean_type_node, new_storedi, old_vali);
4897 x = build3 (COND_EXPR, void_type_node, x, NULL_TREE, NULL_TREE);
4898 bsi_insert_before (&bsi, x, BSI_SAME_STMT);
4900 /* Update cfg. */
4901 e = single_succ_edge (store_bb);
4902 e->flags &= ~EDGE_FALLTHRU;
4903 e->flags |= EDGE_FALSE_VALUE;
4905 e = make_edge (store_bb, loop_header, EDGE_TRUE_VALUE);
4907 /* Copy the new value to loadedi (we already did that before the condition
4908 if we are not in SSA). */
4909 if (gimple_in_ssa_p (cfun))
4911 phi = phi_nodes (loop_header);
4912 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), new_storedi);
4915 /* Remove OMP_ATOMIC_STORE. */
4916 bsi_remove (&bsi, true);
4918 if (gimple_in_ssa_p (cfun))
4919 update_ssa (TODO_update_ssa_no_phi);
4921 return true;
4924 /* A subroutine of expand_omp_atomic. Implement the atomic operation as:
4926 GOMP_atomic_start ();
4927 *addr = rhs;
4928 GOMP_atomic_end ();
4930 The result is not globally atomic, but works so long as all parallel
4931 references are within #pragma omp atomic directives. According to
4932 responses received from omp@openmp.org, appears to be within spec.
4933 Which makes sense, since that's how several other compilers handle
4934 this situation as well.
4935 LOADED_VAL and ADDR are the operands of OMP_ATOMIC_LOAD we're expanding.
4936 STORED_VAL is the operand of the matching OMP_ATOMIC_STORE.
4938 We replace
4939 OMP_ATOMIC_LOAD (loaded_val, addr) with
4940 loaded_val = *addr;
4942 and replace
4943 OMP_ATOMIC_ATORE (stored_val) with
4944 *addr = stored_val;
4947 static bool
4948 expand_omp_atomic_mutex (basic_block load_bb, basic_block store_bb,
4949 tree addr, tree loaded_val, tree stored_val)
4951 block_stmt_iterator bsi;
4952 tree t;
4954 bsi = bsi_last (load_bb);
4955 gcc_assert (TREE_CODE (bsi_stmt (bsi)) == OMP_ATOMIC_LOAD);
4957 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_START];
4958 t = build_function_call_expr (t, 0);
4959 force_gimple_operand_bsi (&bsi, t, true, NULL_TREE, true, BSI_SAME_STMT);
4961 t = build_gimple_modify_stmt (loaded_val, build_fold_indirect_ref (addr));
4962 if (gimple_in_ssa_p (cfun))
4963 SSA_NAME_DEF_STMT (loaded_val) = t;
4964 bsi_insert_before (&bsi, t, BSI_SAME_STMT);
4965 bsi_remove (&bsi, true);
4967 bsi = bsi_last (store_bb);
4968 gcc_assert (TREE_CODE (bsi_stmt (bsi)) == OMP_ATOMIC_STORE);
4970 t = build_gimple_modify_stmt (build_fold_indirect_ref (unshare_expr (addr)),
4971 stored_val);
4972 bsi_insert_before (&bsi, t, BSI_SAME_STMT);
4974 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_END];
4975 t = build_function_call_expr (t, 0);
4976 force_gimple_operand_bsi (&bsi, t, true, NULL_TREE, true, BSI_SAME_STMT);
4977 bsi_remove (&bsi, true);
4979 if (gimple_in_ssa_p (cfun))
4980 update_ssa (TODO_update_ssa_no_phi);
4981 return true;
4984 /* Expand an OMP_ATOMIC statement. We try to expand
4985 using expand_omp_atomic_fetch_op. If it failed, we try to
4986 call expand_omp_atomic_pipeline, and if it fails too, the
4987 ultimate fallback is wrapping the operation in a mutex
4988 (expand_omp_atomic_mutex). REGION is the atomic region built
4989 by build_omp_regions_1(). */
4991 static void
4992 expand_omp_atomic (struct omp_region *region)
4994 basic_block load_bb = region->entry, store_bb = region->exit;
4995 tree load = last_stmt (load_bb), store = last_stmt (store_bb);
4996 tree loaded_val = TREE_OPERAND (load, 0);
4997 tree addr = TREE_OPERAND (load, 1);
4998 tree stored_val = TREE_OPERAND (store, 0);
4999 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
5000 HOST_WIDE_INT index;
5002 /* Make sure the type is one of the supported sizes. */
5003 index = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5004 index = exact_log2 (index);
5005 if (index >= 0 && index <= 4)
5007 unsigned int align = TYPE_ALIGN_UNIT (type);
5009 /* __sync builtins require strict data alignment. */
5010 if (exact_log2 (align) >= index)
5012 /* When possible, use specialized atomic update functions. */
5013 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
5014 && store_bb == single_succ (load_bb))
5016 if (expand_omp_atomic_fetch_op (load_bb, addr,
5017 loaded_val, stored_val, index))
5018 return;
5021 /* If we don't have specialized __sync builtins, try and implement
5022 as a compare and swap loop. */
5023 if (expand_omp_atomic_pipeline (load_bb, store_bb, addr,
5024 loaded_val, stored_val, index))
5025 return;
5029 /* The ultimate fallback is wrapping the operation in a mutex. */
5030 expand_omp_atomic_mutex (load_bb, store_bb, addr, loaded_val, stored_val);
5034 /* Expand the parallel region tree rooted at REGION. Expansion
5035 proceeds in depth-first order. Innermost regions are expanded
5036 first. This way, parallel regions that require a new function to
5037 be created (e.g., OMP_PARALLEL) can be expanded without having any
5038 internal dependencies in their body. */
5040 static void
5041 expand_omp (struct omp_region *region)
5043 while (region)
5045 location_t saved_location;
5047 /* First, determine whether this is a combined parallel+workshare
5048 region. */
5049 if (region->type == OMP_PARALLEL)
5050 determine_parallel_type (region);
5052 if (region->inner)
5053 expand_omp (region->inner);
5055 saved_location = input_location;
5056 if (EXPR_HAS_LOCATION (last_stmt (region->entry)))
5057 input_location = EXPR_LOCATION (last_stmt (region->entry));
5059 switch (region->type)
5061 case OMP_PARALLEL:
5062 expand_omp_taskreg (region);
5063 break;
5065 case OMP_TASK:
5066 expand_omp_taskreg (region);
5067 break;
5069 case OMP_FOR:
5070 expand_omp_for (region);
5071 break;
5073 case OMP_SECTIONS:
5074 expand_omp_sections (region);
5075 break;
5077 case OMP_SECTION:
5078 /* Individual omp sections are handled together with their
5079 parent OMP_SECTIONS region. */
5080 break;
5082 case OMP_SINGLE:
5083 expand_omp_single (region);
5084 break;
5086 case OMP_MASTER:
5087 case OMP_ORDERED:
5088 case OMP_CRITICAL:
5089 expand_omp_synch (region);
5090 break;
5092 case OMP_ATOMIC_LOAD:
5093 expand_omp_atomic (region);
5094 break;
5096 default:
5097 gcc_unreachable ();
5100 input_location = saved_location;
5101 region = region->next;
5106 /* Helper for build_omp_regions. Scan the dominator tree starting at
5107 block BB. PARENT is the region that contains BB. If SINGLE_TREE is
5108 true, the function ends once a single tree is built (otherwise, whole
5109 forest of OMP constructs may be built). */
5111 static void
5112 build_omp_regions_1 (basic_block bb, struct omp_region *parent,
5113 bool single_tree)
5115 block_stmt_iterator si;
5116 tree stmt;
5117 basic_block son;
5119 si = bsi_last (bb);
5120 if (!bsi_end_p (si) && OMP_DIRECTIVE_P (bsi_stmt (si)))
5122 struct omp_region *region;
5123 enum tree_code code;
5125 stmt = bsi_stmt (si);
5126 code = TREE_CODE (stmt);
5127 if (code == OMP_RETURN)
5129 /* STMT is the return point out of region PARENT. Mark it
5130 as the exit point and make PARENT the immediately
5131 enclosing region. */
5132 gcc_assert (parent);
5133 region = parent;
5134 region->exit = bb;
5135 parent = parent->outer;
5137 else if (code == OMP_ATOMIC_STORE)
5139 /* OMP_ATOMIC_STORE is analogous to OMP_RETURN, but matches with
5140 OMP_ATOMIC_LOAD. */
5141 gcc_assert (parent);
5142 gcc_assert (parent->type == OMP_ATOMIC_LOAD);
5143 region = parent;
5144 region->exit = bb;
5145 parent = parent->outer;
5148 else if (code == OMP_CONTINUE)
5150 gcc_assert (parent);
5151 parent->cont = bb;
5153 else if (code == OMP_SECTIONS_SWITCH)
5155 /* OMP_SECTIONS_SWITCH is part of OMP_SECTIONS, and we do nothing for
5156 it. */ ;
5158 else
5160 /* Otherwise, this directive becomes the parent for a new
5161 region. */
5162 region = new_omp_region (bb, code, parent);
5163 parent = region;
5167 if (single_tree && !parent)
5168 return;
5170 for (son = first_dom_son (CDI_DOMINATORS, bb);
5171 son;
5172 son = next_dom_son (CDI_DOMINATORS, son))
5173 build_omp_regions_1 (son, parent, single_tree);
5176 /* Builds the tree of OMP regions rooted at ROOT, storing it to
5177 root_omp_region. */
5179 static void
5180 build_omp_regions_root (basic_block root)
5182 gcc_assert (root_omp_region == NULL);
5183 build_omp_regions_1 (root, NULL, true);
5184 gcc_assert (root_omp_region != NULL);
5187 /* Expands omp construct (and its subconstructs) starting in HEAD. */
5189 void
5190 omp_expand_local (basic_block head)
5192 build_omp_regions_root (head);
5193 if (dump_file && (dump_flags & TDF_DETAILS))
5195 fprintf (dump_file, "\nOMP region tree\n\n");
5196 dump_omp_region (dump_file, root_omp_region, 0);
5197 fprintf (dump_file, "\n");
5200 remove_exit_barriers (root_omp_region);
5201 expand_omp (root_omp_region);
5203 free_omp_regions ();
5206 /* Scan the CFG and build a tree of OMP regions. Return the root of
5207 the OMP region tree. */
5209 static void
5210 build_omp_regions (void)
5212 gcc_assert (root_omp_region == NULL);
5213 calculate_dominance_info (CDI_DOMINATORS);
5214 build_omp_regions_1 (ENTRY_BLOCK_PTR, NULL, false);
5218 /* Main entry point for expanding OMP-GIMPLE into runtime calls. */
5220 static unsigned int
5221 execute_expand_omp (void)
5223 build_omp_regions ();
5225 if (!root_omp_region)
5226 return 0;
5228 if (dump_file)
5230 fprintf (dump_file, "\nOMP region tree\n\n");
5231 dump_omp_region (dump_file, root_omp_region, 0);
5232 fprintf (dump_file, "\n");
5235 remove_exit_barriers (root_omp_region);
5237 expand_omp (root_omp_region);
5239 cleanup_tree_cfg ();
5241 free_omp_regions ();
5243 return 0;
5246 /* OMP expansion -- the default pass, run before creation of SSA form. */
5248 static bool
5249 gate_expand_omp (void)
5251 return (flag_openmp != 0 && errorcount == 0);
5254 struct gimple_opt_pass pass_expand_omp =
5257 GIMPLE_PASS,
5258 "ompexp", /* name */
5259 gate_expand_omp, /* gate */
5260 execute_expand_omp, /* execute */
5261 NULL, /* sub */
5262 NULL, /* next */
5263 0, /* static_pass_number */
5264 0, /* tv_id */
5265 PROP_gimple_any, /* properties_required */
5266 PROP_gimple_lomp, /* properties_provided */
5267 0, /* properties_destroyed */
5268 0, /* todo_flags_start */
5269 TODO_dump_func /* todo_flags_finish */
5273 /* Routines to lower OpenMP directives into OMP-GIMPLE. */
5275 /* Lower the OpenMP sections directive in *STMT_P. */
5277 static void
5278 lower_omp_sections (tree *stmt_p, omp_context *ctx)
5280 tree new_stmt, stmt, body, bind, block, ilist, olist, new_body, control;
5281 tree t, dlist;
5282 tree_stmt_iterator tsi;
5283 unsigned i, len;
5284 struct gimplify_ctx gctx;
5286 stmt = *stmt_p;
5288 push_gimplify_context (&gctx);
5290 dlist = NULL;
5291 ilist = NULL;
5292 lower_rec_input_clauses (OMP_SECTIONS_CLAUSES (stmt), &ilist, &dlist, ctx);
5294 tsi = tsi_start (OMP_SECTIONS_BODY (stmt));
5295 for (len = 0; !tsi_end_p (tsi); len++, tsi_next (&tsi))
5296 continue;
5298 tsi = tsi_start (OMP_SECTIONS_BODY (stmt));
5299 body = alloc_stmt_list ();
5300 for (i = 0; i < len; i++, tsi_next (&tsi))
5302 omp_context *sctx;
5303 tree sec_start, sec_end;
5305 sec_start = tsi_stmt (tsi);
5306 sctx = maybe_lookup_ctx (sec_start);
5307 gcc_assert (sctx);
5309 append_to_statement_list (sec_start, &body);
5311 lower_omp (&OMP_SECTION_BODY (sec_start), sctx);
5312 append_to_statement_list (OMP_SECTION_BODY (sec_start), &body);
5313 OMP_SECTION_BODY (sec_start) = NULL;
5315 if (i == len - 1)
5317 tree l = alloc_stmt_list ();
5318 lower_lastprivate_clauses (OMP_SECTIONS_CLAUSES (stmt), NULL,
5319 &l, ctx);
5320 append_to_statement_list (l, &body);
5321 OMP_SECTION_LAST (sec_start) = 1;
5324 sec_end = make_node (OMP_RETURN);
5325 append_to_statement_list (sec_end, &body);
5328 block = make_node (BLOCK);
5329 bind = build3 (BIND_EXPR, void_type_node, NULL, body, block);
5331 olist = NULL_TREE;
5332 lower_reduction_clauses (OMP_SECTIONS_CLAUSES (stmt), &olist, ctx);
5334 block = make_node (BLOCK);
5335 new_stmt = build3 (BIND_EXPR, void_type_node, NULL, NULL, block);
5336 TREE_SIDE_EFFECTS (new_stmt) = 1;
5338 pop_gimplify_context (new_stmt);
5340 BIND_EXPR_VARS (new_stmt)
5341 = chainon (BIND_EXPR_VARS (new_stmt), ctx->block_vars);
5342 BLOCK_VARS (block) = BIND_EXPR_VARS (new_stmt);
5343 if (BLOCK_VARS (block))
5344 TREE_USED (block) = 1;
5346 new_body = alloc_stmt_list ();
5347 append_to_statement_list (ilist, &new_body);
5348 append_to_statement_list (stmt, &new_body);
5349 append_to_statement_list (make_node (OMP_SECTIONS_SWITCH), &new_body);
5350 append_to_statement_list (bind, &new_body);
5352 control = create_tmp_var (unsigned_type_node, ".section");
5353 t = build2 (OMP_CONTINUE, void_type_node, control, control);
5354 OMP_SECTIONS_CONTROL (stmt) = control;
5355 append_to_statement_list (t, &new_body);
5357 append_to_statement_list (olist, &new_body);
5358 append_to_statement_list (dlist, &new_body);
5360 maybe_catch_exception (&new_body);
5362 t = make_node (OMP_RETURN);
5363 OMP_RETURN_NOWAIT (t) = !!find_omp_clause (OMP_SECTIONS_CLAUSES (stmt),
5364 OMP_CLAUSE_NOWAIT);
5365 append_to_statement_list (t, &new_body);
5367 BIND_EXPR_BODY (new_stmt) = new_body;
5368 OMP_SECTIONS_BODY (stmt) = NULL;
5370 *stmt_p = new_stmt;
5374 /* A subroutine of lower_omp_single. Expand the simple form of
5375 an OMP_SINGLE, without a copyprivate clause:
5377 if (GOMP_single_start ())
5378 BODY;
5379 [ GOMP_barrier (); ] -> unless 'nowait' is present.
5381 FIXME. It may be better to delay expanding the logic of this until
5382 pass_expand_omp. The expanded logic may make the job more difficult
5383 to a synchronization analysis pass. */
5385 static void
5386 lower_omp_single_simple (tree single_stmt, tree *pre_p)
5388 tree t;
5390 t = build_call_expr (built_in_decls[BUILT_IN_GOMP_SINGLE_START], 0);
5391 if (TREE_TYPE (t) != boolean_type_node)
5392 t = fold_build2 (NE_EXPR, boolean_type_node,
5393 t, build_int_cst (TREE_TYPE (t), 0));
5394 t = build3 (COND_EXPR, void_type_node, t,
5395 OMP_SINGLE_BODY (single_stmt), NULL);
5396 gimplify_and_add (t, pre_p);
5400 /* A subroutine of lower_omp_single. Expand the simple form of
5401 an OMP_SINGLE, with a copyprivate clause:
5403 #pragma omp single copyprivate (a, b, c)
5405 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
5408 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
5410 BODY;
5411 copyout.a = a;
5412 copyout.b = b;
5413 copyout.c = c;
5414 GOMP_single_copy_end (&copyout);
5416 else
5418 a = copyout_p->a;
5419 b = copyout_p->b;
5420 c = copyout_p->c;
5422 GOMP_barrier ();
5425 FIXME. It may be better to delay expanding the logic of this until
5426 pass_expand_omp. The expanded logic may make the job more difficult
5427 to a synchronization analysis pass. */
5429 static void
5430 lower_omp_single_copy (tree single_stmt, tree *pre_p, omp_context *ctx)
5432 tree ptr_type, t, l0, l1, l2, copyin_seq;
5434 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
5436 ptr_type = build_pointer_type (ctx->record_type);
5437 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
5439 l0 = create_artificial_label ();
5440 l1 = create_artificial_label ();
5441 l2 = create_artificial_label ();
5443 t = build_call_expr (built_in_decls[BUILT_IN_GOMP_SINGLE_COPY_START], 0);
5444 t = fold_convert (ptr_type, t);
5445 t = build_gimple_modify_stmt (ctx->receiver_decl, t);
5446 gimplify_and_add (t, pre_p);
5448 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
5449 build_int_cst (ptr_type, 0));
5450 t = build3 (COND_EXPR, void_type_node, t,
5451 build_and_jump (&l0), build_and_jump (&l1));
5452 gimplify_and_add (t, pre_p);
5454 t = build1 (LABEL_EXPR, void_type_node, l0);
5455 gimplify_and_add (t, pre_p);
5457 append_to_statement_list (OMP_SINGLE_BODY (single_stmt), pre_p);
5459 copyin_seq = NULL;
5460 lower_copyprivate_clauses (OMP_SINGLE_CLAUSES (single_stmt), pre_p,
5461 &copyin_seq, ctx);
5463 t = build_fold_addr_expr (ctx->sender_decl);
5464 t = build_call_expr (built_in_decls[BUILT_IN_GOMP_SINGLE_COPY_END], 1, t);
5465 gimplify_and_add (t, pre_p);
5467 t = build_and_jump (&l2);
5468 gimplify_and_add (t, pre_p);
5470 t = build1 (LABEL_EXPR, void_type_node, l1);
5471 gimplify_and_add (t, pre_p);
5473 append_to_statement_list (copyin_seq, pre_p);
5475 t = build1 (LABEL_EXPR, void_type_node, l2);
5476 gimplify_and_add (t, pre_p);
5480 /* Expand code for an OpenMP single directive. */
5482 static void
5483 lower_omp_single (tree *stmt_p, omp_context *ctx)
5485 tree t, bind, block, single_stmt = *stmt_p, dlist;
5486 struct gimplify_ctx gctx;
5488 push_gimplify_context (&gctx);
5490 block = make_node (BLOCK);
5491 *stmt_p = bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, block);
5492 TREE_SIDE_EFFECTS (bind) = 1;
5494 lower_rec_input_clauses (OMP_SINGLE_CLAUSES (single_stmt),
5495 &BIND_EXPR_BODY (bind), &dlist, ctx);
5496 lower_omp (&OMP_SINGLE_BODY (single_stmt), ctx);
5498 append_to_statement_list (single_stmt, &BIND_EXPR_BODY (bind));
5500 if (ctx->record_type)
5501 lower_omp_single_copy (single_stmt, &BIND_EXPR_BODY (bind), ctx);
5502 else
5503 lower_omp_single_simple (single_stmt, &BIND_EXPR_BODY (bind));
5505 OMP_SINGLE_BODY (single_stmt) = NULL;
5507 append_to_statement_list (dlist, &BIND_EXPR_BODY (bind));
5509 maybe_catch_exception (&BIND_EXPR_BODY (bind));
5511 t = make_node (OMP_RETURN);
5512 OMP_RETURN_NOWAIT (t) = !!find_omp_clause (OMP_SINGLE_CLAUSES (single_stmt),
5513 OMP_CLAUSE_NOWAIT);
5514 append_to_statement_list (t, &BIND_EXPR_BODY (bind));
5516 pop_gimplify_context (bind);
5518 BIND_EXPR_VARS (bind) = chainon (BIND_EXPR_VARS (bind), ctx->block_vars);
5519 BLOCK_VARS (block) = BIND_EXPR_VARS (bind);
5520 if (BLOCK_VARS (block))
5521 TREE_USED (block) = 1;
5525 /* Expand code for an OpenMP master directive. */
5527 static void
5528 lower_omp_master (tree *stmt_p, omp_context *ctx)
5530 tree bind, block, stmt = *stmt_p, lab = NULL, x;
5531 struct gimplify_ctx gctx;
5533 push_gimplify_context (&gctx);
5535 block = make_node (BLOCK);
5536 *stmt_p = bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, block);
5537 TREE_SIDE_EFFECTS (bind) = 1;
5539 append_to_statement_list (stmt, &BIND_EXPR_BODY (bind));
5541 x = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
5542 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
5543 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
5544 gimplify_and_add (x, &BIND_EXPR_BODY (bind));
5546 lower_omp (&OMP_MASTER_BODY (stmt), ctx);
5547 maybe_catch_exception (&OMP_MASTER_BODY (stmt));
5548 append_to_statement_list (OMP_MASTER_BODY (stmt), &BIND_EXPR_BODY (bind));
5549 OMP_MASTER_BODY (stmt) = NULL;
5551 x = build1 (LABEL_EXPR, void_type_node, lab);
5552 gimplify_and_add (x, &BIND_EXPR_BODY (bind));
5554 x = make_node (OMP_RETURN);
5555 OMP_RETURN_NOWAIT (x) = 1;
5556 append_to_statement_list (x, &BIND_EXPR_BODY (bind));
5558 pop_gimplify_context (bind);
5560 BIND_EXPR_VARS (bind) = chainon (BIND_EXPR_VARS (bind), ctx->block_vars);
5561 BLOCK_VARS (block) = BIND_EXPR_VARS (bind);
5565 /* Expand code for an OpenMP ordered directive. */
5567 static void
5568 lower_omp_ordered (tree *stmt_p, omp_context *ctx)
5570 tree bind, block, stmt = *stmt_p, x;
5571 struct gimplify_ctx gctx;
5573 push_gimplify_context (&gctx);
5575 block = make_node (BLOCK);
5576 *stmt_p = bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, block);
5577 TREE_SIDE_EFFECTS (bind) = 1;
5579 append_to_statement_list (stmt, &BIND_EXPR_BODY (bind));
5581 x = build_call_expr (built_in_decls[BUILT_IN_GOMP_ORDERED_START], 0);
5582 gimplify_and_add (x, &BIND_EXPR_BODY (bind));
5584 lower_omp (&OMP_ORDERED_BODY (stmt), ctx);
5585 maybe_catch_exception (&OMP_ORDERED_BODY (stmt));
5586 append_to_statement_list (OMP_ORDERED_BODY (stmt), &BIND_EXPR_BODY (bind));
5587 OMP_ORDERED_BODY (stmt) = NULL;
5589 x = build_call_expr (built_in_decls[BUILT_IN_GOMP_ORDERED_END], 0);
5590 gimplify_and_add (x, &BIND_EXPR_BODY (bind));
5592 x = make_node (OMP_RETURN);
5593 OMP_RETURN_NOWAIT (x) = 1;
5594 append_to_statement_list (x, &BIND_EXPR_BODY (bind));
5596 pop_gimplify_context (bind);
5598 BIND_EXPR_VARS (bind) = chainon (BIND_EXPR_VARS (bind), ctx->block_vars);
5599 BLOCK_VARS (block) = BIND_EXPR_VARS (bind);
5603 /* Gimplify an OMP_CRITICAL statement. This is a relatively simple
5604 substitution of a couple of function calls. But in the NAMED case,
5605 requires that languages coordinate a symbol name. It is therefore
5606 best put here in common code. */
5608 static GTY((param1_is (tree), param2_is (tree)))
5609 splay_tree critical_name_mutexes;
5611 static void
5612 lower_omp_critical (tree *stmt_p, omp_context *ctx)
5614 tree bind, block, stmt = *stmt_p;
5615 tree t, lock, unlock, name;
5616 struct gimplify_ctx gctx;
5618 name = OMP_CRITICAL_NAME (stmt);
5619 if (name)
5621 tree decl;
5622 splay_tree_node n;
5624 if (!critical_name_mutexes)
5625 critical_name_mutexes
5626 = splay_tree_new_ggc (splay_tree_compare_pointers);
5628 n = splay_tree_lookup (critical_name_mutexes, (splay_tree_key) name);
5629 if (n == NULL)
5631 char *new_str;
5633 decl = create_tmp_var_raw (ptr_type_node, NULL);
5635 new_str = ACONCAT ((".gomp_critical_user_",
5636 IDENTIFIER_POINTER (name), NULL));
5637 DECL_NAME (decl) = get_identifier (new_str);
5638 TREE_PUBLIC (decl) = 1;
5639 TREE_STATIC (decl) = 1;
5640 DECL_COMMON (decl) = 1;
5641 DECL_ARTIFICIAL (decl) = 1;
5642 DECL_IGNORED_P (decl) = 1;
5643 varpool_finalize_decl (decl);
5645 splay_tree_insert (critical_name_mutexes, (splay_tree_key) name,
5646 (splay_tree_value) decl);
5648 else
5649 decl = (tree) n->value;
5651 lock = built_in_decls[BUILT_IN_GOMP_CRITICAL_NAME_START];
5652 lock = build_call_expr (lock, 1, build_fold_addr_expr (decl));
5654 unlock = built_in_decls[BUILT_IN_GOMP_CRITICAL_NAME_END];
5655 unlock = build_call_expr (unlock, 1, build_fold_addr_expr (decl));
5657 else
5659 lock = built_in_decls[BUILT_IN_GOMP_CRITICAL_START];
5660 lock = build_call_expr (lock, 0);
5662 unlock = built_in_decls[BUILT_IN_GOMP_CRITICAL_END];
5663 unlock = build_call_expr (unlock, 0);
5666 push_gimplify_context (&gctx);
5668 block = make_node (BLOCK);
5669 *stmt_p = bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, block);
5670 TREE_SIDE_EFFECTS (bind) = 1;
5672 append_to_statement_list (stmt, &BIND_EXPR_BODY (bind));
5674 gimplify_and_add (lock, &BIND_EXPR_BODY (bind));
5676 lower_omp (&OMP_CRITICAL_BODY (stmt), ctx);
5677 maybe_catch_exception (&OMP_CRITICAL_BODY (stmt));
5678 append_to_statement_list (OMP_CRITICAL_BODY (stmt), &BIND_EXPR_BODY (bind));
5679 OMP_CRITICAL_BODY (stmt) = NULL;
5681 gimplify_and_add (unlock, &BIND_EXPR_BODY (bind));
5683 t = make_node (OMP_RETURN);
5684 OMP_RETURN_NOWAIT (t) = 1;
5685 append_to_statement_list (t, &BIND_EXPR_BODY (bind));
5687 pop_gimplify_context (bind);
5688 BIND_EXPR_VARS (bind) = chainon (BIND_EXPR_VARS (bind), ctx->block_vars);
5689 BLOCK_VARS (block) = BIND_EXPR_VARS (bind);
5693 /* A subroutine of lower_omp_for. Generate code to emit the predicate
5694 for a lastprivate clause. Given a loop control predicate of (V
5695 cond N2), we gate the clause on (!(V cond N2)). The lowered form
5696 is appended to *DLIST, iterator initialization is appended to
5697 *BODY_P. */
5699 static void
5700 lower_omp_for_lastprivate (struct omp_for_data *fd, tree *body_p,
5701 tree *dlist, struct omp_context *ctx)
5703 tree clauses, cond, stmts, vinit, t;
5704 enum tree_code cond_code;
5706 cond_code = fd->loop.cond_code;
5707 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
5709 /* When possible, use a strict equality expression. This can let VRP
5710 type optimizations deduce the value and remove a copy. */
5711 if (host_integerp (fd->loop.step, 0))
5713 HOST_WIDE_INT step = TREE_INT_CST_LOW (fd->loop.step);
5714 if (step == 1 || step == -1)
5715 cond_code = EQ_EXPR;
5718 cond = build2 (cond_code, boolean_type_node, fd->loop.v, fd->loop.n2);
5720 clauses = OMP_FOR_CLAUSES (fd->for_stmt);
5721 stmts = NULL;
5722 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
5723 if (stmts != NULL)
5725 append_to_statement_list (*dlist, &stmts);
5726 *dlist = stmts;
5728 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
5729 vinit = fd->loop.n1;
5730 if (cond_code == EQ_EXPR
5731 && host_integerp (fd->loop.n2, 0)
5732 && ! integer_zerop (fd->loop.n2))
5733 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
5735 /* Initialize the iterator variable, so that threads that don't execute
5736 any iterations don't execute the lastprivate clauses by accident. */
5737 t = build_gimple_modify_stmt (fd->loop.v, vinit);
5738 gimplify_and_add (t, body_p);
5743 /* Lower code for an OpenMP loop directive. */
5745 static void
5746 lower_omp_for (tree *stmt_p, omp_context *ctx)
5748 tree t, stmt, ilist, dlist, new_stmt, block, *body_p, *rhs_p;
5749 struct omp_for_data fd;
5750 int i;
5751 struct gimplify_ctx gctx;
5753 stmt = *stmt_p;
5755 push_gimplify_context (&gctx);
5757 lower_omp (&OMP_FOR_PRE_BODY (stmt), ctx);
5758 lower_omp (&OMP_FOR_BODY (stmt), ctx);
5760 block = make_node (BLOCK);
5761 new_stmt = build3 (BIND_EXPR, void_type_node, NULL, NULL, block);
5762 TREE_SIDE_EFFECTS (new_stmt) = 1;
5763 body_p = &BIND_EXPR_BODY (new_stmt);
5765 /* Move declaration of temporaries in the loop body before we make
5766 it go away. */
5767 if (TREE_CODE (OMP_FOR_BODY (stmt)) == BIND_EXPR)
5768 BIND_EXPR_VARS (new_stmt)
5769 = chainon (BIND_EXPR_VARS (new_stmt),
5770 BIND_EXPR_VARS (OMP_FOR_BODY (stmt)));
5772 /* The pre-body and input clauses go before the lowered OMP_FOR. */
5773 ilist = NULL;
5774 dlist = NULL;
5775 lower_rec_input_clauses (OMP_FOR_CLAUSES (stmt), body_p, &dlist, ctx);
5776 append_to_statement_list (OMP_FOR_PRE_BODY (stmt), body_p);
5778 /* Lower the header expressions. At this point, we can assume that
5779 the header is of the form:
5781 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
5783 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
5784 using the .omp_data_s mapping, if needed. */
5785 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (stmt)); i++)
5787 rhs_p = &GIMPLE_STMT_OPERAND (TREE_VEC_ELT (OMP_FOR_INIT (stmt), i), 1);
5788 if (!is_gimple_min_invariant (*rhs_p))
5789 *rhs_p = get_formal_tmp_var (*rhs_p, body_p);
5791 rhs_p = &TREE_OPERAND (TREE_VEC_ELT (OMP_FOR_COND (stmt), i), 1);
5792 if (!is_gimple_min_invariant (*rhs_p))
5793 *rhs_p = get_formal_tmp_var (*rhs_p, body_p);
5795 rhs_p = &TREE_OPERAND (GIMPLE_STMT_OPERAND
5796 (TREE_VEC_ELT (OMP_FOR_INCR (stmt), i), 1), 1);
5797 if (!is_gimple_min_invariant (*rhs_p))
5798 *rhs_p = get_formal_tmp_var (*rhs_p, body_p);
5801 /* Once lowered, extract the bounds and clauses. */
5802 extract_omp_for_data (stmt, &fd, NULL);
5804 lower_omp_for_lastprivate (&fd, body_p, &dlist, ctx);
5806 append_to_statement_list (stmt, body_p);
5808 append_to_statement_list (OMP_FOR_BODY (stmt), body_p);
5810 t = build2 (OMP_CONTINUE, void_type_node, fd.loop.v, fd.loop.v);
5811 append_to_statement_list (t, body_p);
5813 /* After the loop, add exit clauses. */
5814 lower_reduction_clauses (OMP_FOR_CLAUSES (stmt), body_p, ctx);
5815 append_to_statement_list (dlist, body_p);
5817 maybe_catch_exception (body_p);
5819 /* Region exit marker goes at the end of the loop body. */
5820 t = make_node (OMP_RETURN);
5821 OMP_RETURN_NOWAIT (t) = fd.have_nowait;
5822 append_to_statement_list (t, body_p);
5824 pop_gimplify_context (new_stmt);
5825 BIND_EXPR_VARS (new_stmt)
5826 = chainon (BIND_EXPR_VARS (new_stmt), ctx->block_vars);
5827 BLOCK_VARS (block) = BIND_EXPR_VARS (new_stmt);
5828 if (BLOCK_VARS (block))
5829 TREE_USED (block) = 1;
5831 OMP_FOR_BODY (stmt) = NULL_TREE;
5832 OMP_FOR_PRE_BODY (stmt) = NULL_TREE;
5833 *stmt_p = new_stmt;
5836 /* Callback for walk_stmts. Check if *TP only contains OMP_FOR
5837 or OMP_PARALLEL. */
5839 static tree
5840 check_combined_parallel (tree *tp, int *walk_subtrees, void *data)
5842 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5843 int *info = (int *) wi->info;
5845 *walk_subtrees = 0;
5846 switch (TREE_CODE (*tp))
5848 case OMP_FOR:
5849 case OMP_SECTIONS:
5850 *info = *info == 0 ? 1 : -1;
5851 break;
5852 default:
5853 *info = -1;
5854 break;
5856 return NULL;
5859 struct omp_taskcopy_context
5861 /* This field must be at the beginning, as we do "inheritance": Some
5862 callback functions for tree-inline.c (e.g., omp_copy_decl)
5863 receive a copy_body_data pointer that is up-casted to an
5864 omp_context pointer. */
5865 copy_body_data cb;
5866 omp_context *ctx;
5869 static tree
5870 task_copyfn_copy_decl (tree var, copy_body_data *cb)
5872 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
5874 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
5875 return create_tmp_var (TREE_TYPE (var), NULL);
5877 return var;
5880 static tree
5881 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
5883 tree name, new_fields = NULL, type, f;
5885 type = lang_hooks.types.make_type (RECORD_TYPE);
5886 name = DECL_NAME (TYPE_NAME (orig_type));
5887 name = build_decl (TYPE_DECL, name, type);
5888 TYPE_NAME (type) = name;
5890 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
5892 tree new_f = copy_node (f);
5893 DECL_CONTEXT (new_f) = type;
5894 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
5895 TREE_CHAIN (new_f) = new_fields;
5896 walk_tree (&DECL_SIZE (new_f), copy_body_r, &tcctx->cb, NULL);
5897 walk_tree (&DECL_SIZE_UNIT (new_f), copy_body_r, &tcctx->cb, NULL);
5898 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_body_r, &tcctx->cb, NULL);
5899 new_fields = new_f;
5900 *pointer_map_insert (tcctx->cb.decl_map, f) = new_f;
5902 TYPE_FIELDS (type) = nreverse (new_fields);
5903 layout_type (type);
5904 return type;
5907 /* Create task copyfn. */
5909 static void
5910 create_task_copyfn (tree task_stmt, omp_context *ctx)
5912 struct function *child_cfun;
5913 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
5914 tree record_type, srecord_type, bind, list;
5915 bool record_needs_remap = false, srecord_needs_remap = false;
5916 splay_tree_node n;
5917 struct omp_taskcopy_context tcctx;
5918 struct gimplify_ctx gctx;
5920 child_fn = OMP_TASK_COPYFN (task_stmt);
5921 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
5922 gcc_assert (child_cfun->cfg == NULL);
5923 child_cfun->dont_save_pending_sizes_p = 1;
5924 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
5926 /* Reset DECL_CONTEXT on function arguments. */
5927 for (t = DECL_ARGUMENTS (child_fn); t; t = TREE_CHAIN (t))
5928 DECL_CONTEXT (t) = child_fn;
5930 /* Populate the function. */
5931 push_gimplify_context (&gctx);
5932 current_function_decl = child_fn;
5934 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
5935 TREE_SIDE_EFFECTS (bind) = 1;
5936 list = NULL;
5937 DECL_SAVED_TREE (child_fn) = bind;
5938 DECL_SOURCE_LOCATION (child_fn) = EXPR_LOCATION (task_stmt);
5940 /* Remap src and dst argument types if needed. */
5941 record_type = ctx->record_type;
5942 srecord_type = ctx->srecord_type;
5943 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
5944 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
5946 record_needs_remap = true;
5947 break;
5949 for (f = TYPE_FIELDS (srecord_type); f ; f = TREE_CHAIN (f))
5950 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
5952 srecord_needs_remap = true;
5953 break;
5956 if (record_needs_remap || srecord_needs_remap)
5958 memset (&tcctx, '\0', sizeof (tcctx));
5959 tcctx.cb.src_fn = ctx->cb.src_fn;
5960 tcctx.cb.dst_fn = child_fn;
5961 tcctx.cb.src_node = cgraph_node (tcctx.cb.src_fn);
5962 tcctx.cb.dst_node = tcctx.cb.src_node;
5963 tcctx.cb.src_cfun = ctx->cb.src_cfun;
5964 tcctx.cb.copy_decl = task_copyfn_copy_decl;
5965 tcctx.cb.eh_region = -1;
5966 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
5967 tcctx.cb.decl_map = pointer_map_create ();
5968 tcctx.ctx = ctx;
5970 if (record_needs_remap)
5971 record_type = task_copyfn_remap_type (&tcctx, record_type);
5972 if (srecord_needs_remap)
5973 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
5975 else
5976 tcctx.cb.decl_map = NULL;
5978 push_cfun (child_cfun);
5980 arg = DECL_ARGUMENTS (child_fn);
5981 TREE_TYPE (arg) = build_pointer_type (record_type);
5982 sarg = TREE_CHAIN (arg);
5983 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
5985 /* First pass: initialize temporaries used in record_type and srecord_type
5986 sizes and field offsets. */
5987 if (tcctx.cb.decl_map)
5988 for (c = OMP_TASK_CLAUSES (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
5989 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
5991 tree *p;
5993 decl = OMP_CLAUSE_DECL (c);
5994 p = (tree *) pointer_map_contains (tcctx.cb.decl_map, decl);
5995 if (p == NULL)
5996 continue;
5997 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
5998 sf = (tree) n->value;
5999 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
6000 src = build_fold_indirect_ref (sarg);
6001 src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
6002 t = build_gimple_modify_stmt (*p, src);
6003 append_to_statement_list (t, &list);
6006 /* Second pass: copy shared var pointers and copy construct non-VLA
6007 firstprivate vars. */
6008 for (c = OMP_TASK_CLAUSES (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
6009 switch (OMP_CLAUSE_CODE (c))
6011 case OMP_CLAUSE_SHARED:
6012 decl = OMP_CLAUSE_DECL (c);
6013 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
6014 if (n == NULL)
6015 break;
6016 f = (tree) n->value;
6017 if (tcctx.cb.decl_map)
6018 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
6019 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
6020 sf = (tree) n->value;
6021 if (tcctx.cb.decl_map)
6022 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
6023 src = build_fold_indirect_ref (sarg);
6024 src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
6025 dst = build_fold_indirect_ref (arg);
6026 dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
6027 t = build_gimple_modify_stmt (dst, src);
6028 append_to_statement_list (t, &list);
6029 break;
6030 case OMP_CLAUSE_FIRSTPRIVATE:
6031 decl = OMP_CLAUSE_DECL (c);
6032 if (is_variable_sized (decl))
6033 break;
6034 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
6035 if (n == NULL)
6036 break;
6037 f = (tree) n->value;
6038 if (tcctx.cb.decl_map)
6039 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
6040 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
6041 if (n != NULL)
6043 sf = (tree) n->value;
6044 if (tcctx.cb.decl_map)
6045 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
6046 src = build_fold_indirect_ref (sarg);
6047 src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
6048 if (use_pointer_for_field (decl, NULL) || is_reference (decl))
6049 src = build_fold_indirect_ref (src);
6051 else
6052 src = decl;
6053 dst = build_fold_indirect_ref (arg);
6054 dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
6055 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
6056 append_to_statement_list (t, &list);
6057 break;
6058 case OMP_CLAUSE_PRIVATE:
6059 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
6060 break;
6061 decl = OMP_CLAUSE_DECL (c);
6062 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
6063 f = (tree) n->value;
6064 if (tcctx.cb.decl_map)
6065 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
6066 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
6067 if (n != NULL)
6069 sf = (tree) n->value;
6070 if (tcctx.cb.decl_map)
6071 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
6072 src = build_fold_indirect_ref (sarg);
6073 src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
6074 if (use_pointer_for_field (decl, NULL))
6075 src = build_fold_indirect_ref (src);
6077 else
6078 src = decl;
6079 dst = build_fold_indirect_ref (arg);
6080 dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
6081 t = build_gimple_modify_stmt (dst, src);
6082 append_to_statement_list (t, &list);
6083 break;
6084 default:
6085 break;
6088 /* Last pass: handle VLA firstprivates. */
6089 if (tcctx.cb.decl_map)
6090 for (c = OMP_TASK_CLAUSES (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
6091 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
6093 tree ind, ptr, df;
6095 decl = OMP_CLAUSE_DECL (c);
6096 if (!is_variable_sized (decl))
6097 continue;
6098 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
6099 if (n == NULL)
6100 continue;
6101 f = (tree) n->value;
6102 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
6103 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
6104 ind = DECL_VALUE_EXPR (decl);
6105 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
6106 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
6107 n = splay_tree_lookup (ctx->sfield_map,
6108 (splay_tree_key) TREE_OPERAND (ind, 0));
6109 sf = (tree) n->value;
6110 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
6111 src = build_fold_indirect_ref (sarg);
6112 src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
6113 src = build_fold_indirect_ref (src);
6114 dst = build_fold_indirect_ref (arg);
6115 dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
6116 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
6117 append_to_statement_list (t, &list);
6118 n = splay_tree_lookup (ctx->field_map,
6119 (splay_tree_key) TREE_OPERAND (ind, 0));
6120 df = (tree) n->value;
6121 df = *(tree *) pointer_map_contains (tcctx.cb.decl_map, df);
6122 ptr = build_fold_indirect_ref (arg);
6123 ptr = build3 (COMPONENT_REF, TREE_TYPE (df), ptr, df, NULL);
6124 t = build_gimple_modify_stmt (ptr, build_fold_addr_expr (dst));
6125 append_to_statement_list (t, &list);
6128 t = build1 (RETURN_EXPR, void_type_node, NULL);
6129 append_to_statement_list (t, &list);
6131 if (tcctx.cb.decl_map)
6132 pointer_map_destroy (tcctx.cb.decl_map);
6133 pop_gimplify_context (NULL);
6134 BIND_EXPR_BODY (bind) = list;
6135 pop_cfun ();
6136 current_function_decl = ctx->cb.src_fn;
6139 /* Lower the OpenMP parallel or task directive in *STMT_P. CTX holds context
6140 information for the directive. */
6142 static void
6143 lower_omp_taskreg (tree *stmt_p, omp_context *ctx)
6145 tree clauses, par_bind, par_body, new_body, bind;
6146 tree olist, ilist, par_olist, par_ilist;
6147 tree stmt, child_fn, t;
6148 struct gimplify_ctx gctx;
6150 stmt = *stmt_p;
6152 clauses = OMP_TASKREG_CLAUSES (stmt);
6153 par_bind = OMP_TASKREG_BODY (stmt);
6154 par_body = BIND_EXPR_BODY (par_bind);
6155 child_fn = ctx->cb.dst_fn;
6156 if (TREE_CODE (stmt) == OMP_PARALLEL && !OMP_PARALLEL_COMBINED (stmt))
6158 struct walk_stmt_info wi;
6159 int ws_num = 0;
6161 memset (&wi, 0, sizeof (wi));
6162 wi.callback = check_combined_parallel;
6163 wi.info = &ws_num;
6164 wi.val_only = true;
6165 walk_stmts (&wi, &par_bind);
6166 if (ws_num == 1)
6167 OMP_PARALLEL_COMBINED (stmt) = 1;
6169 if (ctx->srecord_type)
6170 create_task_copyfn (stmt, ctx);
6172 push_gimplify_context (&gctx);
6174 par_olist = NULL_TREE;
6175 par_ilist = NULL_TREE;
6176 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx);
6177 lower_omp (&par_body, ctx);
6178 if (TREE_CODE (stmt) == OMP_PARALLEL)
6179 lower_reduction_clauses (clauses, &par_olist, ctx);
6181 /* Declare all the variables created by mapping and the variables
6182 declared in the scope of the parallel body. */
6183 record_vars_into (ctx->block_vars, child_fn);
6184 record_vars_into (BIND_EXPR_VARS (par_bind), child_fn);
6186 if (ctx->record_type)
6188 ctx->sender_decl
6189 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
6190 : ctx->record_type, ".omp_data_o");
6191 OMP_TASKREG_DATA_ARG (stmt) = ctx->sender_decl;
6194 olist = NULL_TREE;
6195 ilist = NULL_TREE;
6196 lower_send_clauses (clauses, &ilist, &olist, ctx);
6197 lower_send_shared_vars (&ilist, &olist, ctx);
6199 /* Once all the expansions are done, sequence all the different
6200 fragments inside OMP_TASKREG_BODY. */
6201 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL,
6202 BIND_EXPR_BLOCK (par_bind));
6203 TREE_SIDE_EFFECTS (bind) = 1;
6205 new_body = alloc_stmt_list ();
6207 if (ctx->record_type)
6209 t = build_fold_addr_expr (ctx->sender_decl);
6210 /* fixup_child_record_type might have changed receiver_decl's type. */
6211 t = fold_convert (TREE_TYPE (ctx->receiver_decl), t);
6212 t = build_gimple_modify_stmt (ctx->receiver_decl, t);
6213 append_to_statement_list (t, &new_body);
6216 append_to_statement_list (par_ilist, &new_body);
6217 append_to_statement_list (par_body, &new_body);
6218 append_to_statement_list (par_olist, &new_body);
6219 maybe_catch_exception (&new_body);
6220 t = make_node (OMP_RETURN);
6221 append_to_statement_list (t, &new_body);
6222 OMP_TASKREG_BODY (stmt) = new_body;
6224 append_to_statement_list (stmt, &BIND_EXPR_BODY (bind));
6225 if (ilist || olist)
6227 append_to_statement_list (bind, &ilist);
6228 append_to_statement_list (olist, &ilist);
6229 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
6230 TREE_SIDE_EFFECTS (bind) = 1;
6231 append_to_statement_list (ilist, &BIND_EXPR_BODY (bind));
6234 *stmt_p = bind;
6236 pop_gimplify_context (NULL_TREE);
6239 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
6240 regimplified. */
6242 static tree
6243 lower_omp_2 (tree *tp, int *walk_subtrees, void *data)
6245 tree t = *tp;
6246 omp_context *ctx = (omp_context *) data;
6248 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
6249 if (TREE_CODE (t) == VAR_DECL && ctx && DECL_HAS_VALUE_EXPR_P (t))
6250 return t;
6252 if (task_shared_vars
6253 && DECL_P (t)
6254 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
6255 return t;
6257 /* If a global variable has been privatized, TREE_CONSTANT on
6258 ADDR_EXPR might be wrong. */
6259 if (ctx && TREE_CODE (t) == ADDR_EXPR)
6260 recompute_tree_invariant_for_addr_expr (t);
6262 *walk_subtrees = !TYPE_P (t) && !DECL_P (t);
6263 return NULL_TREE;
6266 static void
6267 lower_omp_1 (tree *tp, omp_context *ctx, tree_stmt_iterator *tsi)
6269 tree t = *tp;
6271 if (!t)
6272 return;
6274 if (EXPR_HAS_LOCATION (t))
6275 input_location = EXPR_LOCATION (t);
6277 /* If we have issued syntax errors, avoid doing any heavy lifting.
6278 Just replace the OpenMP directives with a NOP to avoid
6279 confusing RTL expansion. */
6280 if (errorcount && OMP_DIRECTIVE_P (t))
6282 *tp = build_empty_stmt ();
6283 return;
6286 switch (TREE_CODE (t))
6288 case STATEMENT_LIST:
6290 tree_stmt_iterator i;
6291 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
6292 lower_omp_1 (tsi_stmt_ptr (i), ctx, &i);
6294 break;
6296 case COND_EXPR:
6297 lower_omp_1 (&COND_EXPR_THEN (t), ctx, NULL);
6298 lower_omp_1 (&COND_EXPR_ELSE (t), ctx, NULL);
6299 if ((ctx || task_shared_vars)
6300 && walk_tree (&COND_EXPR_COND (t), lower_omp_2, ctx, NULL))
6302 tree pre = NULL;
6303 gimplify_expr (&COND_EXPR_COND (t), &pre, NULL,
6304 is_gimple_condexpr, fb_rvalue);
6305 if (pre)
6307 if (tsi)
6308 tsi_link_before (tsi, pre, TSI_SAME_STMT);
6309 else
6311 append_to_statement_list (t, &pre);
6312 *tp = pre;
6316 break;
6317 case CATCH_EXPR:
6318 lower_omp_1 (&CATCH_BODY (t), ctx, NULL);
6319 break;
6320 case EH_FILTER_EXPR:
6321 lower_omp_1 (&EH_FILTER_FAILURE (t), ctx, NULL);
6322 break;
6323 case TRY_CATCH_EXPR:
6324 case TRY_FINALLY_EXPR:
6325 lower_omp_1 (&TREE_OPERAND (t, 0), ctx, NULL);
6326 lower_omp_1 (&TREE_OPERAND (t, 1), ctx, NULL);
6327 break;
6328 case BIND_EXPR:
6329 lower_omp_1 (&BIND_EXPR_BODY (t), ctx, NULL);
6330 break;
6331 case RETURN_EXPR:
6332 lower_omp_1 (&TREE_OPERAND (t, 0), ctx, NULL);
6333 break;
6335 case OMP_PARALLEL:
6336 case OMP_TASK:
6337 ctx = maybe_lookup_ctx (t);
6338 lower_omp_taskreg (tp, ctx);
6339 break;
6340 case OMP_FOR:
6341 ctx = maybe_lookup_ctx (t);
6342 gcc_assert (ctx);
6343 lower_omp_for (tp, ctx);
6344 break;
6345 case OMP_SECTIONS:
6346 ctx = maybe_lookup_ctx (t);
6347 gcc_assert (ctx);
6348 lower_omp_sections (tp, ctx);
6349 break;
6350 case OMP_SINGLE:
6351 ctx = maybe_lookup_ctx (t);
6352 gcc_assert (ctx);
6353 lower_omp_single (tp, ctx);
6354 break;
6355 case OMP_MASTER:
6356 ctx = maybe_lookup_ctx (t);
6357 gcc_assert (ctx);
6358 lower_omp_master (tp, ctx);
6359 break;
6360 case OMP_ORDERED:
6361 ctx = maybe_lookup_ctx (t);
6362 gcc_assert (ctx);
6363 lower_omp_ordered (tp, ctx);
6364 break;
6365 case OMP_CRITICAL:
6366 ctx = maybe_lookup_ctx (t);
6367 gcc_assert (ctx);
6368 lower_omp_critical (tp, ctx);
6369 break;
6371 default:
6372 if ((ctx || task_shared_vars)
6373 && walk_tree (tp, lower_omp_2, ctx, NULL))
6375 /* The gimplifier doesn't gimplify CALL_EXPR_STATIC_CHAIN.
6376 Handle that here. */
6377 tree call = get_call_expr_in (t);
6378 if (call
6379 && CALL_EXPR_STATIC_CHAIN (call)
6380 && walk_tree (&CALL_EXPR_STATIC_CHAIN (call), lower_omp_2,
6381 ctx, NULL))
6383 tree pre = NULL;
6384 gimplify_expr (&CALL_EXPR_STATIC_CHAIN (call), &pre, NULL,
6385 is_gimple_val, fb_rvalue);
6386 if (pre)
6388 if (tsi)
6389 tsi_link_before (tsi, pre, TSI_SAME_STMT);
6390 else
6392 append_to_statement_list (t, &pre);
6393 lower_omp_1 (&pre, ctx, NULL);
6394 *tp = pre;
6395 return;
6400 if (tsi == NULL)
6401 gimplify_stmt (tp);
6402 else
6404 tree pre = NULL;
6405 gimplify_expr (tp, &pre, NULL, is_gimple_stmt, fb_none);
6406 if (pre)
6407 tsi_link_before (tsi, pre, TSI_SAME_STMT);
6410 break;
6414 static void
6415 lower_omp (tree *stmt_p, omp_context *ctx)
6417 location_t saved_location = input_location;
6418 lower_omp_1 (stmt_p, ctx, NULL);
6419 input_location = saved_location;
6422 /* Main entry point. */
6424 static unsigned int
6425 execute_lower_omp (void)
6427 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
6428 delete_omp_context);
6430 scan_omp (&DECL_SAVED_TREE (current_function_decl), NULL);
6431 gcc_assert (taskreg_nesting_level == 0);
6433 if (all_contexts->root)
6435 struct gimplify_ctx gctx;
6437 if (task_shared_vars)
6438 push_gimplify_context (&gctx);
6439 lower_omp (&DECL_SAVED_TREE (current_function_decl), NULL);
6440 if (task_shared_vars)
6441 pop_gimplify_context (NULL);
6444 if (all_contexts)
6446 splay_tree_delete (all_contexts);
6447 all_contexts = NULL;
6449 BITMAP_FREE (task_shared_vars);
6450 return 0;
6453 static bool
6454 gate_lower_omp (void)
6456 return flag_openmp != 0;
6459 struct gimple_opt_pass pass_lower_omp =
6462 GIMPLE_PASS,
6463 "omplower", /* name */
6464 gate_lower_omp, /* gate */
6465 execute_lower_omp, /* execute */
6466 NULL, /* sub */
6467 NULL, /* next */
6468 0, /* static_pass_number */
6469 0, /* tv_id */
6470 PROP_gimple_any, /* properties_required */
6471 PROP_gimple_lomp, /* properties_provided */
6472 0, /* properties_destroyed */
6473 0, /* todo_flags_start */
6474 TODO_dump_func /* todo_flags_finish */
6478 /* The following is a utility to diagnose OpenMP structured block violations.
6479 It is not part of the "omplower" pass, as that's invoked too late. It
6480 should be invoked by the respective front ends after gimplification. */
6482 static splay_tree all_labels;
6484 /* Check for mismatched contexts and generate an error if needed. Return
6485 true if an error is detected. */
6487 static bool
6488 diagnose_sb_0 (tree *stmt_p, tree branch_ctx, tree label_ctx)
6490 bool exit_p = true;
6492 if ((label_ctx ? TREE_VALUE (label_ctx) : NULL) == branch_ctx)
6493 return false;
6495 /* Try to avoid confusing the user by producing and error message
6496 with correct "exit" or "enter" verbiage. We prefer "exit"
6497 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
6498 if (branch_ctx == NULL)
6499 exit_p = false;
6500 else
6502 while (label_ctx)
6504 if (TREE_VALUE (label_ctx) == branch_ctx)
6506 exit_p = false;
6507 break;
6509 label_ctx = TREE_CHAIN (label_ctx);
6513 if (exit_p)
6514 error ("invalid exit from OpenMP structured block");
6515 else
6516 error ("invalid entry to OpenMP structured block");
6518 *stmt_p = build_empty_stmt ();
6519 return true;
6522 /* Pass 1: Create a minimal tree of OpenMP structured blocks, and record
6523 where in the tree each label is found. */
6525 static tree
6526 diagnose_sb_1 (tree *tp, int *walk_subtrees, void *data)
6528 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6529 tree context = (tree) wi->info;
6530 tree inner_context;
6531 tree t = *tp;
6532 int i;
6534 *walk_subtrees = 0;
6535 switch (TREE_CODE (t))
6537 case OMP_PARALLEL:
6538 case OMP_TASK:
6539 case OMP_SECTIONS:
6540 case OMP_SINGLE:
6541 walk_tree (&OMP_CLAUSES (t), diagnose_sb_1, wi, NULL);
6542 /* FALLTHRU */
6543 case OMP_SECTION:
6544 case OMP_MASTER:
6545 case OMP_ORDERED:
6546 case OMP_CRITICAL:
6547 /* The minimal context here is just a tree of statements. */
6548 inner_context = tree_cons (NULL, t, context);
6549 wi->info = inner_context;
6550 walk_stmts (wi, &OMP_BODY (t));
6551 wi->info = context;
6552 break;
6554 case OMP_FOR:
6555 walk_tree (&OMP_FOR_CLAUSES (t), diagnose_sb_1, wi, NULL);
6556 inner_context = tree_cons (NULL, t, context);
6557 wi->info = inner_context;
6558 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (t)); i++)
6560 walk_tree (&TREE_VEC_ELT (OMP_FOR_INIT (t), i), diagnose_sb_1,
6561 wi, NULL);
6562 walk_tree (&TREE_VEC_ELT (OMP_FOR_COND (t), i), diagnose_sb_1,
6563 wi, NULL);
6564 walk_tree (&TREE_VEC_ELT (OMP_FOR_INCR (t), i), diagnose_sb_1,
6565 wi, NULL);
6567 walk_stmts (wi, &OMP_FOR_PRE_BODY (t));
6568 walk_stmts (wi, &OMP_FOR_BODY (t));
6569 wi->info = context;
6570 break;
6572 case LABEL_EXPR:
6573 splay_tree_insert (all_labels, (splay_tree_key) LABEL_EXPR_LABEL (t),
6574 (splay_tree_value) context);
6575 break;
6577 default:
6578 break;
6581 return NULL_TREE;
6584 /* Pass 2: Check each branch and see if its context differs from that of
6585 the destination label's context. */
6587 static tree
6588 diagnose_sb_2 (tree *tp, int *walk_subtrees, void *data)
6590 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6591 tree context = (tree) wi->info;
6592 splay_tree_node n;
6593 tree t = *tp;
6594 int i;
6596 *walk_subtrees = 0;
6597 switch (TREE_CODE (t))
6599 case OMP_PARALLEL:
6600 case OMP_TASK:
6601 case OMP_SECTIONS:
6602 case OMP_SINGLE:
6603 walk_tree (&OMP_CLAUSES (t), diagnose_sb_2, wi, NULL);
6604 /* FALLTHRU */
6605 case OMP_SECTION:
6606 case OMP_MASTER:
6607 case OMP_ORDERED:
6608 case OMP_CRITICAL:
6609 wi->info = t;
6610 walk_stmts (wi, &OMP_BODY (t));
6611 wi->info = context;
6612 break;
6614 case OMP_FOR:
6615 walk_tree (&OMP_FOR_CLAUSES (t), diagnose_sb_2, wi, NULL);
6616 wi->info = t;
6617 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (t)); i++)
6619 walk_tree (&TREE_VEC_ELT (OMP_FOR_INIT (t), i), diagnose_sb_2,
6620 wi, NULL);
6621 walk_tree (&TREE_VEC_ELT (OMP_FOR_COND (t), i), diagnose_sb_2,
6622 wi, NULL);
6623 walk_tree (&TREE_VEC_ELT (OMP_FOR_INCR (t), i), diagnose_sb_2,
6624 wi, NULL);
6626 walk_stmts (wi, &OMP_FOR_PRE_BODY (t));
6627 walk_stmts (wi, &OMP_FOR_BODY (t));
6628 wi->info = context;
6629 break;
6631 case GOTO_EXPR:
6633 tree lab = GOTO_DESTINATION (t);
6634 if (TREE_CODE (lab) != LABEL_DECL)
6635 break;
6637 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
6638 diagnose_sb_0 (tp, context, n ? (tree) n->value : NULL_TREE);
6640 break;
6642 case SWITCH_EXPR:
6644 tree vec = SWITCH_LABELS (t);
6645 int i, len = TREE_VEC_LENGTH (vec);
6646 for (i = 0; i < len; ++i)
6648 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
6649 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
6650 if (diagnose_sb_0 (tp, context, (tree) n->value))
6651 break;
6654 break;
6656 case RETURN_EXPR:
6657 diagnose_sb_0 (tp, context, NULL_TREE);
6658 break;
6660 default:
6661 break;
6664 return NULL_TREE;
6667 void
6668 diagnose_omp_structured_block_errors (tree fndecl)
6670 tree save_current = current_function_decl;
6671 struct walk_stmt_info wi;
6673 current_function_decl = fndecl;
6675 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
6677 memset (&wi, 0, sizeof (wi));
6678 wi.callback = diagnose_sb_1;
6679 walk_stmts (&wi, &DECL_SAVED_TREE (fndecl));
6681 memset (&wi, 0, sizeof (wi));
6682 wi.callback = diagnose_sb_2;
6683 wi.want_locations = true;
6684 wi.want_return_expr = true;
6685 walk_stmts (&wi, &DECL_SAVED_TREE (fndecl));
6687 splay_tree_delete (all_labels);
6688 all_labels = NULL;
6690 current_function_decl = save_current;
6693 #include "gt-omp-low.h"