Added Compiler Support for _Cilk_spawn and _Cilk_sync for C++.
[official-gcc.git] / gcc / cp / cp-gimplify.c
blob5fa564c57d1b7142dab4fc14df1656d9156e0609
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2013 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "stor-layout.h"
28 #include "cp-tree.h"
29 #include "c-family/c-common.h"
30 #include "tree-iterator.h"
31 #include "pointer-set.h"
32 #include "basic-block.h"
33 #include "tree-ssa-alias.h"
34 #include "internal-fn.h"
35 #include "gimple-expr.h"
36 #include "is-a.h"
37 #include "gimple.h"
38 #include "gimplify.h"
39 #include "hashtab.h"
40 #include "flags.h"
41 #include "splay-tree.h"
42 #include "target.h"
43 #include "c-family/c-ubsan.h"
44 #include "cilk.h"
46 /* Forward declarations. */
48 static tree cp_genericize_r (tree *, int *, void *);
49 static void cp_genericize_tree (tree*);
51 /* Local declarations. */
53 enum bc_t { bc_break = 0, bc_continue = 1 };
55 /* Stack of labels which are targets for "break" or "continue",
56 linked through TREE_CHAIN. */
57 static tree bc_label[2];
59 /* Begin a scope which can be exited by a break or continue statement. BC
60 indicates which.
62 Just creates a label with location LOCATION and pushes it into the current
63 context. */
65 static tree
66 begin_bc_block (enum bc_t bc, location_t location)
68 tree label = create_artificial_label (location);
69 DECL_CHAIN (label) = bc_label[bc];
70 bc_label[bc] = label;
71 return label;
74 /* Finish a scope which can be exited by a break or continue statement.
75 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
76 an expression for the contents of the scope.
78 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
79 BLOCK. Otherwise, just forget the label. */
81 static void
82 finish_bc_block (tree *block, enum bc_t bc, tree label)
84 gcc_assert (label == bc_label[bc]);
86 if (TREE_USED (label))
87 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
88 block);
90 bc_label[bc] = DECL_CHAIN (label);
91 DECL_CHAIN (label) = NULL_TREE;
94 /* Get the LABEL_EXPR to represent a break or continue statement
95 in the current block scope. BC indicates which. */
97 static tree
98 get_bc_label (enum bc_t bc)
100 tree label = bc_label[bc];
102 /* Mark the label used for finish_bc_block. */
103 TREE_USED (label) = 1;
104 return label;
107 /* Genericize a TRY_BLOCK. */
109 static void
110 genericize_try_block (tree *stmt_p)
112 tree body = TRY_STMTS (*stmt_p);
113 tree cleanup = TRY_HANDLERS (*stmt_p);
115 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
118 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
120 static void
121 genericize_catch_block (tree *stmt_p)
123 tree type = HANDLER_TYPE (*stmt_p);
124 tree body = HANDLER_BODY (*stmt_p);
126 /* FIXME should the caught type go in TREE_TYPE? */
127 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
130 /* A terser interface for building a representation of an exception
131 specification. */
133 static tree
134 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
136 tree t;
138 /* FIXME should the allowed types go in TREE_TYPE? */
139 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
140 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
142 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
143 append_to_statement_list (body, &TREE_OPERAND (t, 0));
145 return t;
148 /* Genericize an EH_SPEC_BLOCK by converting it to a
149 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
151 static void
152 genericize_eh_spec_block (tree *stmt_p)
154 tree body = EH_SPEC_STMTS (*stmt_p);
155 tree allowed = EH_SPEC_RAISES (*stmt_p);
156 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
158 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
159 TREE_NO_WARNING (*stmt_p) = true;
160 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
163 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
165 static void
166 genericize_if_stmt (tree *stmt_p)
168 tree stmt, cond, then_, else_;
169 location_t locus = EXPR_LOCATION (*stmt_p);
171 stmt = *stmt_p;
172 cond = IF_COND (stmt);
173 then_ = THEN_CLAUSE (stmt);
174 else_ = ELSE_CLAUSE (stmt);
176 if (!then_)
177 then_ = build_empty_stmt (locus);
178 if (!else_)
179 else_ = build_empty_stmt (locus);
181 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
182 stmt = then_;
183 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
184 stmt = else_;
185 else
186 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
187 if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
188 SET_EXPR_LOCATION (stmt, locus);
189 *stmt_p = stmt;
192 /* Build a generic representation of one of the C loop forms. COND is the
193 loop condition or NULL_TREE. BODY is the (possibly compound) statement
194 controlled by the loop. INCR is the increment expression of a for-loop,
195 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
196 evaluated before the loop body as in while and for loops, or after the
197 loop body as in do-while loops. */
199 static void
200 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
201 tree incr, bool cond_is_first, int *walk_subtrees,
202 void *data)
204 tree blab, clab;
205 tree entry = NULL, exit = NULL, t;
206 tree stmt_list = NULL;
208 blab = begin_bc_block (bc_break, start_locus);
209 clab = begin_bc_block (bc_continue, start_locus);
211 if (incr && EXPR_P (incr))
212 SET_EXPR_LOCATION (incr, start_locus);
214 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
215 cp_walk_tree (&body, cp_genericize_r, data, NULL);
216 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
217 *walk_subtrees = 0;
219 /* If condition is zero don't generate a loop construct. */
220 if (cond && integer_zerop (cond))
222 if (cond_is_first)
224 t = build1_loc (start_locus, GOTO_EXPR, void_type_node,
225 get_bc_label (bc_break));
226 append_to_statement_list (t, &stmt_list);
229 else
231 /* Expand to gotos, just like c_finish_loop. TODO: Use LOOP_EXPR. */
232 tree top = build1 (LABEL_EXPR, void_type_node,
233 create_artificial_label (start_locus));
235 /* If we have an exit condition, then we build an IF with gotos either
236 out of the loop, or to the top of it. If there's no exit condition,
237 then we just build a jump back to the top. */
238 exit = build1 (GOTO_EXPR, void_type_node, LABEL_EXPR_LABEL (top));
240 if (cond && !integer_nonzerop (cond))
242 /* Canonicalize the loop condition to the end. This means
243 generating a branch to the loop condition. Reuse the
244 continue label, if possible. */
245 if (cond_is_first)
247 if (incr)
249 entry = build1 (LABEL_EXPR, void_type_node,
250 create_artificial_label (start_locus));
251 t = build1_loc (start_locus, GOTO_EXPR, void_type_node,
252 LABEL_EXPR_LABEL (entry));
254 else
255 t = build1_loc (start_locus, GOTO_EXPR, void_type_node,
256 get_bc_label (bc_continue));
257 append_to_statement_list (t, &stmt_list);
260 t = build1 (GOTO_EXPR, void_type_node, get_bc_label (bc_break));
261 exit = fold_build3_loc (start_locus,
262 COND_EXPR, void_type_node, cond, exit, t);
265 append_to_statement_list (top, &stmt_list);
268 append_to_statement_list (body, &stmt_list);
269 finish_bc_block (&stmt_list, bc_continue, clab);
270 append_to_statement_list (incr, &stmt_list);
271 append_to_statement_list (entry, &stmt_list);
272 append_to_statement_list (exit, &stmt_list);
273 finish_bc_block (&stmt_list, bc_break, blab);
275 if (stmt_list == NULL_TREE)
276 stmt_list = build1 (NOP_EXPR, void_type_node, integer_zero_node);
278 *stmt_p = stmt_list;
281 /* Genericize a FOR_STMT node *STMT_P. */
283 static void
284 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
286 tree stmt = *stmt_p;
287 tree expr = NULL;
288 tree loop;
289 tree init = FOR_INIT_STMT (stmt);
291 if (init)
293 cp_walk_tree (&init, cp_genericize_r, data, NULL);
294 append_to_statement_list (init, &expr);
297 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
298 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
299 append_to_statement_list (loop, &expr);
300 *stmt_p = expr;
303 /* Genericize a WHILE_STMT node *STMT_P. */
305 static void
306 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
308 tree stmt = *stmt_p;
309 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
310 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
313 /* Genericize a DO_STMT node *STMT_P. */
315 static void
316 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
318 tree stmt = *stmt_p;
319 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
320 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
323 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
325 static void
326 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
328 tree stmt = *stmt_p;
329 tree break_block, body, cond, type;
330 location_t stmt_locus = EXPR_LOCATION (stmt);
332 break_block = begin_bc_block (bc_break, stmt_locus);
334 body = SWITCH_STMT_BODY (stmt);
335 if (!body)
336 body = build_empty_stmt (stmt_locus);
337 cond = SWITCH_STMT_COND (stmt);
338 type = SWITCH_STMT_TYPE (stmt);
340 cp_walk_tree (&body, cp_genericize_r, data, NULL);
341 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
342 cp_walk_tree (&type, cp_genericize_r, data, NULL);
343 *walk_subtrees = 0;
345 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
346 finish_bc_block (stmt_p, bc_break, break_block);
349 /* Genericize a CONTINUE_STMT node *STMT_P. */
351 static void
352 genericize_continue_stmt (tree *stmt_p)
354 tree stmt_list = NULL;
355 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
356 tree label = get_bc_label (bc_continue);
357 location_t location = EXPR_LOCATION (*stmt_p);
358 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
359 append_to_statement_list (pred, &stmt_list);
360 append_to_statement_list (jump, &stmt_list);
361 *stmt_p = stmt_list;
364 /* Genericize a BREAK_STMT node *STMT_P. */
366 static void
367 genericize_break_stmt (tree *stmt_p)
369 tree label = get_bc_label (bc_break);
370 location_t location = EXPR_LOCATION (*stmt_p);
371 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
374 /* Genericize a OMP_FOR node *STMT_P. */
376 static void
377 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
379 tree stmt = *stmt_p;
380 location_t locus = EXPR_LOCATION (stmt);
381 tree clab = begin_bc_block (bc_continue, locus);
383 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
384 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
385 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
386 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
387 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
388 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
389 *walk_subtrees = 0;
391 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
394 /* Hook into the middle of gimplifying an OMP_FOR node. */
396 static enum gimplify_status
397 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
399 tree for_stmt = *expr_p;
400 gimple_seq seq = NULL;
402 /* Protect ourselves from recursion. */
403 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
404 return GS_UNHANDLED;
405 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
407 gimplify_and_add (for_stmt, &seq);
408 gimple_seq_add_seq (pre_p, seq);
410 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
412 return GS_ALL_DONE;
415 /* Gimplify an EXPR_STMT node. */
417 static void
418 gimplify_expr_stmt (tree *stmt_p)
420 tree stmt = EXPR_STMT_EXPR (*stmt_p);
422 if (stmt == error_mark_node)
423 stmt = NULL;
425 /* Gimplification of a statement expression will nullify the
426 statement if all its side effects are moved to *PRE_P and *POST_P.
428 In this case we will not want to emit the gimplified statement.
429 However, we may still want to emit a warning, so we do that before
430 gimplification. */
431 if (stmt && warn_unused_value)
433 if (!TREE_SIDE_EFFECTS (stmt))
435 if (!IS_EMPTY_STMT (stmt)
436 && !VOID_TYPE_P (TREE_TYPE (stmt))
437 && !TREE_NO_WARNING (stmt))
438 warning (OPT_Wunused_value, "statement with no effect");
440 else
441 warn_if_unused_value (stmt, input_location);
444 if (stmt == NULL_TREE)
445 stmt = alloc_stmt_list ();
447 *stmt_p = stmt;
450 /* Gimplify initialization from an AGGR_INIT_EXPR. */
452 static void
453 cp_gimplify_init_expr (tree *expr_p)
455 tree from = TREE_OPERAND (*expr_p, 1);
456 tree to = TREE_OPERAND (*expr_p, 0);
457 tree t;
459 /* What about code that pulls out the temp and uses it elsewhere? I
460 think that such code never uses the TARGET_EXPR as an initializer. If
461 I'm wrong, we'll abort because the temp won't have any RTL. In that
462 case, I guess we'll need to replace references somehow. */
463 if (TREE_CODE (from) == TARGET_EXPR)
464 from = TARGET_EXPR_INITIAL (from);
466 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
467 inside the TARGET_EXPR. */
468 for (t = from; t; )
470 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
472 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
473 replace the slot operand with our target.
475 Should we add a target parm to gimplify_expr instead? No, as in this
476 case we want to replace the INIT_EXPR. */
477 if (TREE_CODE (sub) == AGGR_INIT_EXPR
478 || TREE_CODE (sub) == VEC_INIT_EXPR)
480 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
481 AGGR_INIT_EXPR_SLOT (sub) = to;
482 else
483 VEC_INIT_EXPR_SLOT (sub) = to;
484 *expr_p = from;
486 /* The initialization is now a side-effect, so the container can
487 become void. */
488 if (from != sub)
489 TREE_TYPE (from) = void_type_node;
492 if (t == sub)
493 break;
494 else
495 t = TREE_OPERAND (t, 1);
500 /* Gimplify a MUST_NOT_THROW_EXPR. */
502 static enum gimplify_status
503 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
505 tree stmt = *expr_p;
506 tree temp = voidify_wrapper_expr (stmt, NULL);
507 tree body = TREE_OPERAND (stmt, 0);
508 gimple_seq try_ = NULL;
509 gimple_seq catch_ = NULL;
510 gimple mnt;
512 gimplify_and_add (body, &try_);
513 mnt = gimple_build_eh_must_not_throw (terminate_node);
514 gimple_seq_add_stmt_without_update (&catch_, mnt);
515 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
517 gimple_seq_add_stmt_without_update (pre_p, mnt);
518 if (temp)
520 *expr_p = temp;
521 return GS_OK;
524 *expr_p = NULL;
525 return GS_ALL_DONE;
528 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
531 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
533 int saved_stmts_are_full_exprs_p = 0;
534 enum tree_code code = TREE_CODE (*expr_p);
535 enum gimplify_status ret;
537 if (STATEMENT_CODE_P (code))
539 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
540 current_stmt_tree ()->stmts_are_full_exprs_p
541 = STMT_IS_FULL_EXPR_P (*expr_p);
544 switch (code)
546 case PTRMEM_CST:
547 *expr_p = cplus_expand_constant (*expr_p);
548 ret = GS_OK;
549 break;
551 case AGGR_INIT_EXPR:
552 simplify_aggr_init_expr (expr_p);
553 ret = GS_OK;
554 break;
556 case VEC_INIT_EXPR:
558 location_t loc = input_location;
559 tree init = VEC_INIT_EXPR_INIT (*expr_p);
560 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
561 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
562 input_location = EXPR_LOCATION (*expr_p);
563 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
564 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
565 from_array,
566 tf_warning_or_error);
567 cp_genericize_tree (expr_p);
568 ret = GS_OK;
569 input_location = loc;
571 break;
573 case THROW_EXPR:
574 /* FIXME communicate throw type to back end, probably by moving
575 THROW_EXPR into ../tree.def. */
576 *expr_p = TREE_OPERAND (*expr_p, 0);
577 ret = GS_OK;
578 break;
580 case MUST_NOT_THROW_EXPR:
581 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
582 break;
584 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
585 LHS of an assignment might also be involved in the RHS, as in bug
586 25979. */
587 case INIT_EXPR:
588 if (fn_contains_cilk_spawn_p (cfun)
589 && cilk_detect_spawn_and_unwrap (expr_p)
590 && !seen_error ())
591 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
592 cp_gimplify_init_expr (expr_p);
593 if (TREE_CODE (*expr_p) != INIT_EXPR)
594 return GS_OK;
595 /* Otherwise fall through. */
596 case MODIFY_EXPR:
598 if (fn_contains_cilk_spawn_p (cfun)
599 && cilk_detect_spawn_and_unwrap (expr_p)
600 && !seen_error ())
601 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
603 /* If the back end isn't clever enough to know that the lhs and rhs
604 types are the same, add an explicit conversion. */
605 tree op0 = TREE_OPERAND (*expr_p, 0);
606 tree op1 = TREE_OPERAND (*expr_p, 1);
608 if (!error_operand_p (op0)
609 && !error_operand_p (op1)
610 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
611 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
612 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
613 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
614 TREE_TYPE (op0), op1);
616 else if ((is_gimple_lvalue (op1) || INDIRECT_REF_P (op1)
617 || (TREE_CODE (op1) == CONSTRUCTOR
618 && CONSTRUCTOR_NELTS (op1) == 0
619 && !TREE_CLOBBER_P (op1))
620 || (TREE_CODE (op1) == CALL_EXPR
621 && !CALL_EXPR_RETURN_SLOT_OPT (op1)))
622 && is_really_empty_class (TREE_TYPE (op0)))
624 /* Remove any copies of empty classes. We check that the RHS
625 has a simple form so that TARGET_EXPRs and non-empty
626 CONSTRUCTORs get reduced properly, and we leave the return
627 slot optimization alone because it isn't a copy (FIXME so it
628 shouldn't be represented as one).
630 Also drop volatile variables on the RHS to avoid infinite
631 recursion from gimplify_expr trying to load the value. */
632 if (!TREE_SIDE_EFFECTS (op1)
633 || (DECL_P (op1) && TREE_THIS_VOLATILE (op1)))
634 *expr_p = op0;
635 else if (TREE_CODE (op1) == MEM_REF
636 && TREE_THIS_VOLATILE (op1))
638 /* Similarly for volatile MEM_REFs on the RHS. */
639 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (op1, 0)))
640 *expr_p = op0;
641 else
642 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
643 TREE_OPERAND (op1, 0), op0);
645 else
646 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
647 op0, op1);
650 ret = GS_OK;
651 break;
653 case EMPTY_CLASS_EXPR:
654 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
655 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
656 ret = GS_OK;
657 break;
659 case BASELINK:
660 *expr_p = BASELINK_FUNCTIONS (*expr_p);
661 ret = GS_OK;
662 break;
664 case TRY_BLOCK:
665 genericize_try_block (expr_p);
666 ret = GS_OK;
667 break;
669 case HANDLER:
670 genericize_catch_block (expr_p);
671 ret = GS_OK;
672 break;
674 case EH_SPEC_BLOCK:
675 genericize_eh_spec_block (expr_p);
676 ret = GS_OK;
677 break;
679 case USING_STMT:
680 gcc_unreachable ();
682 case FOR_STMT:
683 case WHILE_STMT:
684 case DO_STMT:
685 case SWITCH_STMT:
686 case CONTINUE_STMT:
687 case BREAK_STMT:
688 gcc_unreachable ();
690 case OMP_FOR:
691 case OMP_SIMD:
692 case OMP_DISTRIBUTE:
693 ret = cp_gimplify_omp_for (expr_p, pre_p);
694 break;
696 case EXPR_STMT:
697 gimplify_expr_stmt (expr_p);
698 ret = GS_OK;
699 break;
701 case UNARY_PLUS_EXPR:
703 tree arg = TREE_OPERAND (*expr_p, 0);
704 tree type = TREE_TYPE (*expr_p);
705 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
706 : arg;
707 ret = GS_OK;
709 break;
711 case CILK_SPAWN_STMT:
712 gcc_assert
713 (fn_contains_cilk_spawn_p (cfun)
714 && cilk_detect_spawn_and_unwrap (expr_p));
716 /* If errors are seen, then just process it as a CALL_EXPR. */
717 if (!seen_error ())
718 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
720 case CALL_EXPR:
721 if (fn_contains_cilk_spawn_p (cfun)
722 && cilk_detect_spawn_and_unwrap (expr_p)
723 && !seen_error ())
724 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
726 default:
727 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
728 break;
731 /* Restore saved state. */
732 if (STATEMENT_CODE_P (code))
733 current_stmt_tree ()->stmts_are_full_exprs_p
734 = saved_stmts_are_full_exprs_p;
736 return ret;
739 static inline bool
740 is_invisiref_parm (const_tree t)
742 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
743 && DECL_BY_REFERENCE (t));
746 /* Return true if the uid in both int tree maps are equal. */
749 cxx_int_tree_map_eq (const void *va, const void *vb)
751 const struct cxx_int_tree_map *a = (const struct cxx_int_tree_map *) va;
752 const struct cxx_int_tree_map *b = (const struct cxx_int_tree_map *) vb;
753 return (a->uid == b->uid);
756 /* Hash a UID in a cxx_int_tree_map. */
758 unsigned int
759 cxx_int_tree_map_hash (const void *item)
761 return ((const struct cxx_int_tree_map *)item)->uid;
764 /* A stable comparison routine for use with splay trees and DECLs. */
766 static int
767 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
769 tree a = (tree) xa;
770 tree b = (tree) xb;
772 return DECL_UID (a) - DECL_UID (b);
775 /* OpenMP context during genericization. */
777 struct cp_genericize_omp_taskreg
779 bool is_parallel;
780 bool default_shared;
781 struct cp_genericize_omp_taskreg *outer;
782 splay_tree variables;
785 /* Return true if genericization should try to determine if
786 DECL is firstprivate or shared within task regions. */
788 static bool
789 omp_var_to_track (tree decl)
791 tree type = TREE_TYPE (decl);
792 if (is_invisiref_parm (decl))
793 type = TREE_TYPE (type);
794 while (TREE_CODE (type) == ARRAY_TYPE)
795 type = TREE_TYPE (type);
796 if (type == error_mark_node || !CLASS_TYPE_P (type))
797 return false;
798 if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
799 return false;
800 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
801 return false;
802 return true;
805 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
807 static void
808 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
810 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
811 (splay_tree_key) decl);
812 if (n == NULL)
814 int flags = OMP_CLAUSE_DEFAULT_SHARED;
815 if (omp_ctx->outer)
816 omp_cxx_notice_variable (omp_ctx->outer, decl);
817 if (!omp_ctx->default_shared)
819 struct cp_genericize_omp_taskreg *octx;
821 for (octx = omp_ctx->outer; octx; octx = octx->outer)
823 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
824 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
826 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
827 break;
829 if (octx->is_parallel)
830 break;
832 if (octx == NULL
833 && (TREE_CODE (decl) == PARM_DECL
834 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
835 && DECL_CONTEXT (decl) == current_function_decl)))
836 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
837 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
839 /* DECL is implicitly determined firstprivate in
840 the current task construct. Ensure copy ctor and
841 dtor are instantiated, because during gimplification
842 it will be already too late. */
843 tree type = TREE_TYPE (decl);
844 if (is_invisiref_parm (decl))
845 type = TREE_TYPE (type);
846 while (TREE_CODE (type) == ARRAY_TYPE)
847 type = TREE_TYPE (type);
848 get_copy_ctor (type, tf_none);
849 get_dtor (type, tf_none);
852 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
856 /* Genericization context. */
858 struct cp_genericize_data
860 struct pointer_set_t *p_set;
861 vec<tree> bind_expr_stack;
862 struct cp_genericize_omp_taskreg *omp_ctx;
865 /* Perform any pre-gimplification lowering of C++ front end trees to
866 GENERIC. */
868 static tree
869 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
871 tree stmt = *stmt_p;
872 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
873 struct pointer_set_t *p_set = wtd->p_set;
875 /* If in an OpenMP context, note var uses. */
876 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
877 && (VAR_P (stmt)
878 || TREE_CODE (stmt) == PARM_DECL
879 || TREE_CODE (stmt) == RESULT_DECL)
880 && omp_var_to_track (stmt))
881 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
883 if (is_invisiref_parm (stmt)
884 /* Don't dereference parms in a thunk, pass the references through. */
885 && !(DECL_THUNK_P (current_function_decl)
886 && TREE_CODE (stmt) == PARM_DECL))
888 *stmt_p = convert_from_reference (stmt);
889 *walk_subtrees = 0;
890 return NULL;
893 /* Map block scope extern declarations to visible declarations with the
894 same name and type in outer scopes if any. */
895 if (cp_function_chain->extern_decl_map
896 && VAR_OR_FUNCTION_DECL_P (stmt)
897 && DECL_EXTERNAL (stmt))
899 struct cxx_int_tree_map *h, in;
900 in.uid = DECL_UID (stmt);
901 h = (struct cxx_int_tree_map *)
902 htab_find_with_hash (cp_function_chain->extern_decl_map,
903 &in, in.uid);
904 if (h)
906 *stmt_p = h->to;
907 *walk_subtrees = 0;
908 return NULL;
912 /* Other than invisiref parms, don't walk the same tree twice. */
913 if (pointer_set_contains (p_set, stmt))
915 *walk_subtrees = 0;
916 return NULL_TREE;
919 if (TREE_CODE (stmt) == ADDR_EXPR
920 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
922 /* If in an OpenMP context, note var uses. */
923 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
924 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
925 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
926 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
927 *walk_subtrees = 0;
929 else if (TREE_CODE (stmt) == RETURN_EXPR
930 && TREE_OPERAND (stmt, 0)
931 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
932 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
933 *walk_subtrees = 0;
934 else if (TREE_CODE (stmt) == OMP_CLAUSE)
935 switch (OMP_CLAUSE_CODE (stmt))
937 case OMP_CLAUSE_LASTPRIVATE:
938 /* Don't dereference an invisiref in OpenMP clauses. */
939 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
941 *walk_subtrees = 0;
942 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
943 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
944 cp_genericize_r, data, NULL);
946 break;
947 case OMP_CLAUSE_PRIVATE:
948 /* Don't dereference an invisiref in OpenMP clauses. */
949 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
950 *walk_subtrees = 0;
951 else if (wtd->omp_ctx != NULL)
953 /* Private clause doesn't cause any references to the
954 var in outer contexts, avoid calling
955 omp_cxx_notice_variable for it. */
956 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
957 wtd->omp_ctx = NULL;
958 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
959 data, NULL);
960 wtd->omp_ctx = old;
961 *walk_subtrees = 0;
963 break;
964 case OMP_CLAUSE_SHARED:
965 case OMP_CLAUSE_FIRSTPRIVATE:
966 case OMP_CLAUSE_COPYIN:
967 case OMP_CLAUSE_COPYPRIVATE:
968 /* Don't dereference an invisiref in OpenMP clauses. */
969 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
970 *walk_subtrees = 0;
971 break;
972 case OMP_CLAUSE_REDUCTION:
973 /* Don't dereference an invisiref in reduction clause's
974 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
975 still needs to be genericized. */
976 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
978 *walk_subtrees = 0;
979 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
980 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
981 cp_genericize_r, data, NULL);
982 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
983 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
984 cp_genericize_r, data, NULL);
986 break;
987 default:
988 break;
990 else if (IS_TYPE_OR_DECL_P (stmt))
991 *walk_subtrees = 0;
993 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
994 to lower this construct before scanning it, so we need to lower these
995 before doing anything else. */
996 else if (TREE_CODE (stmt) == CLEANUP_STMT)
997 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
998 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
999 : TRY_FINALLY_EXPR,
1000 void_type_node,
1001 CLEANUP_BODY (stmt),
1002 CLEANUP_EXPR (stmt));
1004 else if (TREE_CODE (stmt) == IF_STMT)
1006 genericize_if_stmt (stmt_p);
1007 /* *stmt_p has changed, tail recurse to handle it again. */
1008 return cp_genericize_r (stmt_p, walk_subtrees, data);
1011 /* COND_EXPR might have incompatible types in branches if one or both
1012 arms are bitfields. Fix it up now. */
1013 else if (TREE_CODE (stmt) == COND_EXPR)
1015 tree type_left
1016 = (TREE_OPERAND (stmt, 1)
1017 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1018 : NULL_TREE);
1019 tree type_right
1020 = (TREE_OPERAND (stmt, 2)
1021 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1022 : NULL_TREE);
1023 if (type_left
1024 && !useless_type_conversion_p (TREE_TYPE (stmt),
1025 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1027 TREE_OPERAND (stmt, 1)
1028 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1029 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1030 type_left));
1032 if (type_right
1033 && !useless_type_conversion_p (TREE_TYPE (stmt),
1034 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1036 TREE_OPERAND (stmt, 2)
1037 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1038 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1039 type_right));
1043 else if (TREE_CODE (stmt) == BIND_EXPR)
1045 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1047 tree decl;
1048 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1049 if (VAR_P (decl)
1050 && !DECL_EXTERNAL (decl)
1051 && omp_var_to_track (decl))
1053 splay_tree_node n
1054 = splay_tree_lookup (wtd->omp_ctx->variables,
1055 (splay_tree_key) decl);
1056 if (n == NULL)
1057 splay_tree_insert (wtd->omp_ctx->variables,
1058 (splay_tree_key) decl,
1059 TREE_STATIC (decl)
1060 ? OMP_CLAUSE_DEFAULT_SHARED
1061 : OMP_CLAUSE_DEFAULT_PRIVATE);
1064 wtd->bind_expr_stack.safe_push (stmt);
1065 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1066 cp_genericize_r, data, NULL);
1067 wtd->bind_expr_stack.pop ();
1070 else if (TREE_CODE (stmt) == USING_STMT)
1072 tree block = NULL_TREE;
1074 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1075 BLOCK, and append an IMPORTED_DECL to its
1076 BLOCK_VARS chained list. */
1077 if (wtd->bind_expr_stack.exists ())
1079 int i;
1080 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1081 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1082 break;
1084 if (block)
1086 tree using_directive;
1087 gcc_assert (TREE_OPERAND (stmt, 0));
1089 using_directive = make_node (IMPORTED_DECL);
1090 TREE_TYPE (using_directive) = void_type_node;
1092 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1093 = TREE_OPERAND (stmt, 0);
1094 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1095 BLOCK_VARS (block) = using_directive;
1097 /* The USING_STMT won't appear in GENERIC. */
1098 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1099 *walk_subtrees = 0;
1102 else if (TREE_CODE (stmt) == DECL_EXPR
1103 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1105 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1106 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1107 *walk_subtrees = 0;
1109 else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
1111 struct cp_genericize_omp_taskreg omp_ctx;
1112 tree c, decl;
1113 splay_tree_node n;
1115 *walk_subtrees = 0;
1116 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1117 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1118 omp_ctx.default_shared = omp_ctx.is_parallel;
1119 omp_ctx.outer = wtd->omp_ctx;
1120 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1121 wtd->omp_ctx = &omp_ctx;
1122 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1123 switch (OMP_CLAUSE_CODE (c))
1125 case OMP_CLAUSE_SHARED:
1126 case OMP_CLAUSE_PRIVATE:
1127 case OMP_CLAUSE_FIRSTPRIVATE:
1128 case OMP_CLAUSE_LASTPRIVATE:
1129 decl = OMP_CLAUSE_DECL (c);
1130 if (decl == error_mark_node || !omp_var_to_track (decl))
1131 break;
1132 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1133 if (n != NULL)
1134 break;
1135 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1136 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1137 ? OMP_CLAUSE_DEFAULT_SHARED
1138 : OMP_CLAUSE_DEFAULT_PRIVATE);
1139 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1140 && omp_ctx.outer)
1141 omp_cxx_notice_variable (omp_ctx.outer, decl);
1142 break;
1143 case OMP_CLAUSE_DEFAULT:
1144 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1145 omp_ctx.default_shared = true;
1146 default:
1147 break;
1149 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1150 wtd->omp_ctx = omp_ctx.outer;
1151 splay_tree_delete (omp_ctx.variables);
1153 else if (TREE_CODE (stmt) == CONVERT_EXPR)
1154 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1155 else if (TREE_CODE (stmt) == FOR_STMT)
1156 genericize_for_stmt (stmt_p, walk_subtrees, data);
1157 else if (TREE_CODE (stmt) == WHILE_STMT)
1158 genericize_while_stmt (stmt_p, walk_subtrees, data);
1159 else if (TREE_CODE (stmt) == DO_STMT)
1160 genericize_do_stmt (stmt_p, walk_subtrees, data);
1161 else if (TREE_CODE (stmt) == SWITCH_STMT)
1162 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1163 else if (TREE_CODE (stmt) == CONTINUE_STMT)
1164 genericize_continue_stmt (stmt_p);
1165 else if (TREE_CODE (stmt) == BREAK_STMT)
1166 genericize_break_stmt (stmt_p);
1167 else if (TREE_CODE (stmt) == OMP_FOR
1168 || TREE_CODE (stmt) == OMP_SIMD
1169 || TREE_CODE (stmt) == OMP_DISTRIBUTE)
1170 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1171 else if (TREE_CODE (stmt) == SIZEOF_EXPR)
1173 if (SIZEOF_EXPR_TYPE_P (stmt))
1174 *stmt_p
1175 = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt, 0)),
1176 SIZEOF_EXPR, false);
1177 else if (TYPE_P (TREE_OPERAND (stmt, 0)))
1178 *stmt_p = cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt, 0),
1179 SIZEOF_EXPR, false);
1180 else
1181 *stmt_p = cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt, 0),
1182 SIZEOF_EXPR, false);
1183 if (*stmt_p == error_mark_node)
1184 *stmt_p = size_one_node;
1185 return NULL;
1188 pointer_set_insert (p_set, *stmt_p);
1190 return NULL;
1193 /* Lower C++ front end trees to GENERIC in T_P. */
1195 static void
1196 cp_genericize_tree (tree* t_p)
1198 struct cp_genericize_data wtd;
1200 wtd.p_set = pointer_set_create ();
1201 wtd.bind_expr_stack.create (0);
1202 wtd.omp_ctx = NULL;
1203 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1204 pointer_set_destroy (wtd.p_set);
1205 wtd.bind_expr_stack.release ();
1208 /* If a function that should end with a return in non-void
1209 function doesn't obviously end with return, add ubsan
1210 instrmentation code to verify it at runtime. */
1212 static void
1213 cp_ubsan_maybe_instrument_return (tree fndecl)
1215 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1216 || DECL_CONSTRUCTOR_P (fndecl)
1217 || DECL_DESTRUCTOR_P (fndecl)
1218 || !targetm.warn_func_return (fndecl))
1219 return;
1221 tree t = DECL_SAVED_TREE (fndecl);
1222 while (t)
1224 switch (TREE_CODE (t))
1226 case BIND_EXPR:
1227 t = BIND_EXPR_BODY (t);
1228 continue;
1229 case TRY_FINALLY_EXPR:
1230 t = TREE_OPERAND (t, 0);
1231 continue;
1232 case STATEMENT_LIST:
1234 tree_stmt_iterator i = tsi_last (t);
1235 if (!tsi_end_p (i))
1237 t = tsi_stmt (i);
1238 continue;
1241 break;
1242 case RETURN_EXPR:
1243 return;
1244 default:
1245 break;
1247 break;
1249 if (t == NULL_TREE)
1250 return;
1251 t = DECL_SAVED_TREE (fndecl);
1252 if (TREE_CODE (t) == BIND_EXPR
1253 && TREE_CODE (BIND_EXPR_BODY (t)) == STATEMENT_LIST)
1255 tree_stmt_iterator i = tsi_last (BIND_EXPR_BODY (t));
1256 t = ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl));
1257 tsi_link_after (&i, t, TSI_NEW_STMT);
1261 void
1262 cp_genericize (tree fndecl)
1264 tree t;
1266 /* Fix up the types of parms passed by invisible reference. */
1267 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1268 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1270 /* If a function's arguments are copied to create a thunk,
1271 then DECL_BY_REFERENCE will be set -- but the type of the
1272 argument will be a pointer type, so we will never get
1273 here. */
1274 gcc_assert (!DECL_BY_REFERENCE (t));
1275 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1276 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1277 DECL_BY_REFERENCE (t) = 1;
1278 TREE_ADDRESSABLE (t) = 0;
1279 relayout_decl (t);
1282 /* Do the same for the return value. */
1283 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1285 t = DECL_RESULT (fndecl);
1286 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1287 DECL_BY_REFERENCE (t) = 1;
1288 TREE_ADDRESSABLE (t) = 0;
1289 relayout_decl (t);
1290 if (DECL_NAME (t))
1292 /* Adjust DECL_VALUE_EXPR of the original var. */
1293 tree outer = outer_curly_brace_block (current_function_decl);
1294 tree var;
1296 if (outer)
1297 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1298 if (DECL_NAME (t) == DECL_NAME (var)
1299 && DECL_HAS_VALUE_EXPR_P (var)
1300 && DECL_VALUE_EXPR (var) == t)
1302 tree val = convert_from_reference (t);
1303 SET_DECL_VALUE_EXPR (var, val);
1304 break;
1309 /* If we're a clone, the body is already GIMPLE. */
1310 if (DECL_CLONED_FUNCTION_P (fndecl))
1311 return;
1313 /* Expand all the array notations here. */
1314 if (flag_enable_cilkplus
1315 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1316 DECL_SAVED_TREE (fndecl) =
1317 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1319 /* We do want to see every occurrence of the parms, so we can't just use
1320 walk_tree's hash functionality. */
1321 cp_genericize_tree (&DECL_SAVED_TREE (fndecl));
1323 if (flag_sanitize & SANITIZE_RETURN)
1324 cp_ubsan_maybe_instrument_return (fndecl);
1326 /* Do everything else. */
1327 c_genericize (fndecl);
1329 gcc_assert (bc_label[bc_break] == NULL);
1330 gcc_assert (bc_label[bc_continue] == NULL);
1333 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1334 NULL if there is in fact nothing to do. ARG2 may be null if FN
1335 actually only takes one argument. */
1337 static tree
1338 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1340 tree defparm, parm, t;
1341 int i = 0;
1342 int nargs;
1343 tree *argarray;
1345 if (fn == NULL)
1346 return NULL;
1348 nargs = list_length (DECL_ARGUMENTS (fn));
1349 argarray = XALLOCAVEC (tree, nargs);
1351 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1352 if (arg2)
1353 defparm = TREE_CHAIN (defparm);
1355 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1357 tree inner_type = TREE_TYPE (arg1);
1358 tree start1, end1, p1;
1359 tree start2 = NULL, p2 = NULL;
1360 tree ret = NULL, lab;
1362 start1 = arg1;
1363 start2 = arg2;
1366 inner_type = TREE_TYPE (inner_type);
1367 start1 = build4 (ARRAY_REF, inner_type, start1,
1368 size_zero_node, NULL, NULL);
1369 if (arg2)
1370 start2 = build4 (ARRAY_REF, inner_type, start2,
1371 size_zero_node, NULL, NULL);
1373 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1374 start1 = build_fold_addr_expr_loc (input_location, start1);
1375 if (arg2)
1376 start2 = build_fold_addr_expr_loc (input_location, start2);
1378 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1379 end1 = fold_build_pointer_plus (start1, end1);
1381 p1 = create_tmp_var (TREE_TYPE (start1), NULL);
1382 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1383 append_to_statement_list (t, &ret);
1385 if (arg2)
1387 p2 = create_tmp_var (TREE_TYPE (start2), NULL);
1388 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1389 append_to_statement_list (t, &ret);
1392 lab = create_artificial_label (input_location);
1393 t = build1 (LABEL_EXPR, void_type_node, lab);
1394 append_to_statement_list (t, &ret);
1396 argarray[i++] = p1;
1397 if (arg2)
1398 argarray[i++] = p2;
1399 /* Handle default arguments. */
1400 for (parm = defparm; parm && parm != void_list_node;
1401 parm = TREE_CHAIN (parm), i++)
1402 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1403 TREE_PURPOSE (parm), fn, i,
1404 tf_warning_or_error);
1405 t = build_call_a (fn, i, argarray);
1406 t = fold_convert (void_type_node, t);
1407 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1408 append_to_statement_list (t, &ret);
1410 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1411 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1412 append_to_statement_list (t, &ret);
1414 if (arg2)
1416 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1417 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1418 append_to_statement_list (t, &ret);
1421 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1422 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1423 append_to_statement_list (t, &ret);
1425 return ret;
1427 else
1429 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1430 if (arg2)
1431 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1432 /* Handle default arguments. */
1433 for (parm = defparm; parm && parm != void_list_node;
1434 parm = TREE_CHAIN (parm), i++)
1435 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1436 TREE_PURPOSE (parm),
1437 fn, i, tf_warning_or_error);
1438 t = build_call_a (fn, i, argarray);
1439 t = fold_convert (void_type_node, t);
1440 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1444 /* Return code to initialize DECL with its default constructor, or
1445 NULL if there's nothing to do. */
1447 tree
1448 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1450 tree info = CP_OMP_CLAUSE_INFO (clause);
1451 tree ret = NULL;
1453 if (info)
1454 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1456 return ret;
1459 /* Return code to initialize DST with a copy constructor from SRC. */
1461 tree
1462 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1464 tree info = CP_OMP_CLAUSE_INFO (clause);
1465 tree ret = NULL;
1467 if (info)
1468 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1469 if (ret == NULL)
1470 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1472 return ret;
1475 /* Similarly, except use an assignment operator instead. */
1477 tree
1478 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1480 tree info = CP_OMP_CLAUSE_INFO (clause);
1481 tree ret = NULL;
1483 if (info)
1484 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1485 if (ret == NULL)
1486 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1488 return ret;
1491 /* Return code to destroy DECL. */
1493 tree
1494 cxx_omp_clause_dtor (tree clause, tree decl)
1496 tree info = CP_OMP_CLAUSE_INFO (clause);
1497 tree ret = NULL;
1499 if (info)
1500 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1502 return ret;
1505 /* True if OpenMP should privatize what this DECL points to rather
1506 than the DECL itself. */
1508 bool
1509 cxx_omp_privatize_by_reference (const_tree decl)
1511 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1512 || is_invisiref_parm (decl));
1515 /* Return true if DECL is const qualified var having no mutable member. */
1516 bool
1517 cxx_omp_const_qual_no_mutable (tree decl)
1519 tree type = TREE_TYPE (decl);
1520 if (TREE_CODE (type) == REFERENCE_TYPE)
1522 if (!is_invisiref_parm (decl))
1523 return false;
1524 type = TREE_TYPE (type);
1526 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1528 /* NVR doesn't preserve const qualification of the
1529 variable's type. */
1530 tree outer = outer_curly_brace_block (current_function_decl);
1531 tree var;
1533 if (outer)
1534 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1535 if (DECL_NAME (decl) == DECL_NAME (var)
1536 && (TYPE_MAIN_VARIANT (type)
1537 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1539 if (TYPE_READONLY (TREE_TYPE (var)))
1540 type = TREE_TYPE (var);
1541 break;
1546 if (type == error_mark_node)
1547 return false;
1549 /* Variables with const-qualified type having no mutable member
1550 are predetermined shared. */
1551 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1552 return true;
1554 return false;
1557 /* True if OpenMP sharing attribute of DECL is predetermined. */
1559 enum omp_clause_default_kind
1560 cxx_omp_predetermined_sharing (tree decl)
1562 /* Static data members are predetermined shared. */
1563 if (TREE_STATIC (decl))
1565 tree ctx = CP_DECL_CONTEXT (decl);
1566 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1567 return OMP_CLAUSE_DEFAULT_SHARED;
1570 /* Const qualified vars having no mutable member are predetermined
1571 shared. */
1572 if (cxx_omp_const_qual_no_mutable (decl))
1573 return OMP_CLAUSE_DEFAULT_SHARED;
1575 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1578 /* Finalize an implicitly determined clause. */
1580 void
1581 cxx_omp_finish_clause (tree c)
1583 tree decl, inner_type;
1584 bool make_shared = false;
1586 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1587 return;
1589 decl = OMP_CLAUSE_DECL (c);
1590 decl = require_complete_type (decl);
1591 inner_type = TREE_TYPE (decl);
1592 if (decl == error_mark_node)
1593 make_shared = true;
1594 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1596 if (is_invisiref_parm (decl))
1597 inner_type = TREE_TYPE (inner_type);
1598 else
1600 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1601 decl);
1602 make_shared = true;
1606 /* We're interested in the base element, not arrays. */
1607 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1608 inner_type = TREE_TYPE (inner_type);
1610 /* Check for special function availability by building a call to one.
1611 Save the results, because later we won't be in the right context
1612 for making these queries. */
1613 if (!make_shared
1614 && CLASS_TYPE_P (inner_type)
1615 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1616 make_shared = true;
1618 if (make_shared)
1619 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;