PR c++/65054
[official-gcc.git] / gcc / cp / cp-gimplify.c
blob4233a64aefe88da857bee8fb6e1c3cd389fbc51b
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2015 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "hash-set.h"
27 #include "machmode.h"
28 #include "vec.h"
29 #include "double-int.h"
30 #include "input.h"
31 #include "alias.h"
32 #include "symtab.h"
33 #include "wide-int.h"
34 #include "inchash.h"
35 #include "tree.h"
36 #include "stor-layout.h"
37 #include "cp-tree.h"
38 #include "c-family/c-common.h"
39 #include "tree-iterator.h"
40 #include "predict.h"
41 #include "hard-reg-set.h"
42 #include "input.h"
43 #include "function.h"
44 #include "basic-block.h"
45 #include "tree-ssa-alias.h"
46 #include "internal-fn.h"
47 #include "gimple-expr.h"
48 #include "is-a.h"
49 #include "gimple.h"
50 #include "gimplify.h"
51 #include "flags.h"
52 #include "splay-tree.h"
53 #include "target.h"
54 #include "c-family/c-ubsan.h"
55 #include "cilk.h"
57 /* Forward declarations. */
59 static tree cp_genericize_r (tree *, int *, void *);
60 static void cp_genericize_tree (tree*);
62 /* Local declarations. */
64 enum bc_t { bc_break = 0, bc_continue = 1 };
66 /* Stack of labels which are targets for "break" or "continue",
67 linked through TREE_CHAIN. */
68 static tree bc_label[2];
70 /* Begin a scope which can be exited by a break or continue statement. BC
71 indicates which.
73 Just creates a label with location LOCATION and pushes it into the current
74 context. */
76 static tree
77 begin_bc_block (enum bc_t bc, location_t location)
79 tree label = create_artificial_label (location);
80 DECL_CHAIN (label) = bc_label[bc];
81 bc_label[bc] = label;
82 if (bc == bc_break)
83 LABEL_DECL_BREAK (label) = true;
84 else
85 LABEL_DECL_CONTINUE (label) = true;
86 return label;
89 /* Finish a scope which can be exited by a break or continue statement.
90 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
91 an expression for the contents of the scope.
93 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
94 BLOCK. Otherwise, just forget the label. */
96 static void
97 finish_bc_block (tree *block, enum bc_t bc, tree label)
99 gcc_assert (label == bc_label[bc]);
101 if (TREE_USED (label))
102 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
103 block);
105 bc_label[bc] = DECL_CHAIN (label);
106 DECL_CHAIN (label) = NULL_TREE;
109 /* Get the LABEL_EXPR to represent a break or continue statement
110 in the current block scope. BC indicates which. */
112 static tree
113 get_bc_label (enum bc_t bc)
115 tree label = bc_label[bc];
117 /* Mark the label used for finish_bc_block. */
118 TREE_USED (label) = 1;
119 return label;
122 /* Genericize a TRY_BLOCK. */
124 static void
125 genericize_try_block (tree *stmt_p)
127 tree body = TRY_STMTS (*stmt_p);
128 tree cleanup = TRY_HANDLERS (*stmt_p);
130 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
133 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
135 static void
136 genericize_catch_block (tree *stmt_p)
138 tree type = HANDLER_TYPE (*stmt_p);
139 tree body = HANDLER_BODY (*stmt_p);
141 /* FIXME should the caught type go in TREE_TYPE? */
142 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
145 /* A terser interface for building a representation of an exception
146 specification. */
148 static tree
149 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
151 tree t;
153 /* FIXME should the allowed types go in TREE_TYPE? */
154 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
155 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
157 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
158 append_to_statement_list (body, &TREE_OPERAND (t, 0));
160 return t;
163 /* Genericize an EH_SPEC_BLOCK by converting it to a
164 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
166 static void
167 genericize_eh_spec_block (tree *stmt_p)
169 tree body = EH_SPEC_STMTS (*stmt_p);
170 tree allowed = EH_SPEC_RAISES (*stmt_p);
171 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
173 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
174 TREE_NO_WARNING (*stmt_p) = true;
175 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
178 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
180 static void
181 genericize_if_stmt (tree *stmt_p)
183 tree stmt, cond, then_, else_;
184 location_t locus = EXPR_LOCATION (*stmt_p);
186 stmt = *stmt_p;
187 cond = IF_COND (stmt);
188 then_ = THEN_CLAUSE (stmt);
189 else_ = ELSE_CLAUSE (stmt);
191 if (!then_)
192 then_ = build_empty_stmt (locus);
193 if (!else_)
194 else_ = build_empty_stmt (locus);
196 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
197 stmt = then_;
198 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
199 stmt = else_;
200 else
201 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
202 if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
203 SET_EXPR_LOCATION (stmt, locus);
204 *stmt_p = stmt;
207 /* Build a generic representation of one of the C loop forms. COND is the
208 loop condition or NULL_TREE. BODY is the (possibly compound) statement
209 controlled by the loop. INCR is the increment expression of a for-loop,
210 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
211 evaluated before the loop body as in while and for loops, or after the
212 loop body as in do-while loops. */
214 static void
215 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
216 tree incr, bool cond_is_first, int *walk_subtrees,
217 void *data)
219 tree blab, clab;
220 tree exit = NULL;
221 tree stmt_list = NULL;
223 blab = begin_bc_block (bc_break, start_locus);
224 clab = begin_bc_block (bc_continue, start_locus);
226 if (incr && EXPR_P (incr))
227 SET_EXPR_LOCATION (incr, start_locus);
229 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
230 cp_walk_tree (&body, cp_genericize_r, data, NULL);
231 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
232 *walk_subtrees = 0;
234 if (cond && TREE_CODE (cond) != INTEGER_CST)
236 /* If COND is constant, don't bother building an exit. If it's false,
237 we won't build a loop. If it's true, any exits are in the body. */
238 location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
239 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
240 get_bc_label (bc_break));
241 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
242 build_empty_stmt (cloc), exit);
245 if (exit && cond_is_first)
246 append_to_statement_list (exit, &stmt_list);
247 append_to_statement_list (body, &stmt_list);
248 finish_bc_block (&stmt_list, bc_continue, clab);
249 append_to_statement_list (incr, &stmt_list);
250 if (exit && !cond_is_first)
251 append_to_statement_list (exit, &stmt_list);
253 if (!stmt_list)
254 stmt_list = build_empty_stmt (start_locus);
256 tree loop;
257 if (cond && integer_zerop (cond))
259 if (cond_is_first)
260 loop = fold_build3_loc (start_locus, COND_EXPR,
261 void_type_node, cond, stmt_list,
262 build_empty_stmt (start_locus));
263 else
264 loop = stmt_list;
266 else
267 loop = build1_loc (start_locus, LOOP_EXPR, void_type_node, stmt_list);
269 stmt_list = NULL;
270 append_to_statement_list (loop, &stmt_list);
271 finish_bc_block (&stmt_list, bc_break, blab);
272 if (!stmt_list)
273 stmt_list = build_empty_stmt (start_locus);
275 *stmt_p = stmt_list;
278 /* Genericize a FOR_STMT node *STMT_P. */
280 static void
281 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
283 tree stmt = *stmt_p;
284 tree expr = NULL;
285 tree loop;
286 tree init = FOR_INIT_STMT (stmt);
288 if (init)
290 cp_walk_tree (&init, cp_genericize_r, data, NULL);
291 append_to_statement_list (init, &expr);
294 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
295 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
296 append_to_statement_list (loop, &expr);
297 if (expr == NULL_TREE)
298 expr = loop;
299 *stmt_p = expr;
302 /* Genericize a WHILE_STMT node *STMT_P. */
304 static void
305 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
307 tree stmt = *stmt_p;
308 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
309 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
312 /* Genericize a DO_STMT node *STMT_P. */
314 static void
315 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
317 tree stmt = *stmt_p;
318 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
319 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
322 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
324 static void
325 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
327 tree stmt = *stmt_p;
328 tree break_block, body, cond, type;
329 location_t stmt_locus = EXPR_LOCATION (stmt);
331 break_block = begin_bc_block (bc_break, stmt_locus);
333 body = SWITCH_STMT_BODY (stmt);
334 if (!body)
335 body = build_empty_stmt (stmt_locus);
336 cond = SWITCH_STMT_COND (stmt);
337 type = SWITCH_STMT_TYPE (stmt);
339 cp_walk_tree (&body, cp_genericize_r, data, NULL);
340 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
341 cp_walk_tree (&type, cp_genericize_r, data, NULL);
342 *walk_subtrees = 0;
344 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
345 finish_bc_block (stmt_p, bc_break, break_block);
348 /* Genericize a CONTINUE_STMT node *STMT_P. */
350 static void
351 genericize_continue_stmt (tree *stmt_p)
353 tree stmt_list = NULL;
354 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
355 tree label = get_bc_label (bc_continue);
356 location_t location = EXPR_LOCATION (*stmt_p);
357 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
358 append_to_statement_list (pred, &stmt_list);
359 append_to_statement_list (jump, &stmt_list);
360 *stmt_p = stmt_list;
363 /* Genericize a BREAK_STMT node *STMT_P. */
365 static void
366 genericize_break_stmt (tree *stmt_p)
368 tree label = get_bc_label (bc_break);
369 location_t location = EXPR_LOCATION (*stmt_p);
370 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
373 /* Genericize a OMP_FOR node *STMT_P. */
375 static void
376 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
378 tree stmt = *stmt_p;
379 location_t locus = EXPR_LOCATION (stmt);
380 tree clab = begin_bc_block (bc_continue, locus);
382 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
383 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
384 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
385 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
386 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
387 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
388 *walk_subtrees = 0;
390 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
393 /* Hook into the middle of gimplifying an OMP_FOR node. */
395 static enum gimplify_status
396 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
398 tree for_stmt = *expr_p;
399 gimple_seq seq = NULL;
401 /* Protect ourselves from recursion. */
402 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
403 return GS_UNHANDLED;
404 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
406 gimplify_and_add (for_stmt, &seq);
407 gimple_seq_add_seq (pre_p, seq);
409 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
411 return GS_ALL_DONE;
414 /* Gimplify an EXPR_STMT node. */
416 static void
417 gimplify_expr_stmt (tree *stmt_p)
419 tree stmt = EXPR_STMT_EXPR (*stmt_p);
421 if (stmt == error_mark_node)
422 stmt = NULL;
424 /* Gimplification of a statement expression will nullify the
425 statement if all its side effects are moved to *PRE_P and *POST_P.
427 In this case we will not want to emit the gimplified statement.
428 However, we may still want to emit a warning, so we do that before
429 gimplification. */
430 if (stmt && warn_unused_value)
432 if (!TREE_SIDE_EFFECTS (stmt))
434 if (!IS_EMPTY_STMT (stmt)
435 && !VOID_TYPE_P (TREE_TYPE (stmt))
436 && !TREE_NO_WARNING (stmt))
437 warning (OPT_Wunused_value, "statement with no effect");
439 else
440 warn_if_unused_value (stmt, input_location);
443 if (stmt == NULL_TREE)
444 stmt = alloc_stmt_list ();
446 *stmt_p = stmt;
449 /* Gimplify initialization from an AGGR_INIT_EXPR. */
451 static void
452 cp_gimplify_init_expr (tree *expr_p)
454 tree from = TREE_OPERAND (*expr_p, 1);
455 tree to = TREE_OPERAND (*expr_p, 0);
456 tree t;
458 /* What about code that pulls out the temp and uses it elsewhere? I
459 think that such code never uses the TARGET_EXPR as an initializer. If
460 I'm wrong, we'll abort because the temp won't have any RTL. In that
461 case, I guess we'll need to replace references somehow. */
462 if (TREE_CODE (from) == TARGET_EXPR)
463 from = TARGET_EXPR_INITIAL (from);
465 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
466 inside the TARGET_EXPR. */
467 for (t = from; t; )
469 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
471 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
472 replace the slot operand with our target.
474 Should we add a target parm to gimplify_expr instead? No, as in this
475 case we want to replace the INIT_EXPR. */
476 if (TREE_CODE (sub) == AGGR_INIT_EXPR
477 || TREE_CODE (sub) == VEC_INIT_EXPR)
479 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
480 AGGR_INIT_EXPR_SLOT (sub) = to;
481 else
482 VEC_INIT_EXPR_SLOT (sub) = to;
483 *expr_p = from;
485 /* The initialization is now a side-effect, so the container can
486 become void. */
487 if (from != sub)
488 TREE_TYPE (from) = void_type_node;
491 if (cxx_dialect >= cxx14 && TREE_CODE (sub) == CONSTRUCTOR)
492 /* Handle aggregate NSDMI. */
493 replace_placeholders (sub, to);
495 if (t == sub)
496 break;
497 else
498 t = TREE_OPERAND (t, 1);
503 /* Gimplify a MUST_NOT_THROW_EXPR. */
505 static enum gimplify_status
506 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
508 tree stmt = *expr_p;
509 tree temp = voidify_wrapper_expr (stmt, NULL);
510 tree body = TREE_OPERAND (stmt, 0);
511 gimple_seq try_ = NULL;
512 gimple_seq catch_ = NULL;
513 gimple mnt;
515 gimplify_and_add (body, &try_);
516 mnt = gimple_build_eh_must_not_throw (terminate_node);
517 gimple_seq_add_stmt_without_update (&catch_, mnt);
518 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
520 gimple_seq_add_stmt_without_update (pre_p, mnt);
521 if (temp)
523 *expr_p = temp;
524 return GS_OK;
527 *expr_p = NULL;
528 return GS_ALL_DONE;
531 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
534 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
536 int saved_stmts_are_full_exprs_p = 0;
537 enum tree_code code = TREE_CODE (*expr_p);
538 enum gimplify_status ret;
540 if (STATEMENT_CODE_P (code))
542 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
543 current_stmt_tree ()->stmts_are_full_exprs_p
544 = STMT_IS_FULL_EXPR_P (*expr_p);
547 switch (code)
549 case PTRMEM_CST:
550 *expr_p = cplus_expand_constant (*expr_p);
551 ret = GS_OK;
552 break;
554 case AGGR_INIT_EXPR:
555 simplify_aggr_init_expr (expr_p);
556 ret = GS_OK;
557 break;
559 case VEC_INIT_EXPR:
561 location_t loc = input_location;
562 tree init = VEC_INIT_EXPR_INIT (*expr_p);
563 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
564 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
565 input_location = EXPR_LOCATION (*expr_p);
566 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
567 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
568 from_array,
569 tf_warning_or_error);
570 cp_genericize_tree (expr_p);
571 ret = GS_OK;
572 input_location = loc;
574 break;
576 case THROW_EXPR:
577 /* FIXME communicate throw type to back end, probably by moving
578 THROW_EXPR into ../tree.def. */
579 *expr_p = TREE_OPERAND (*expr_p, 0);
580 ret = GS_OK;
581 break;
583 case MUST_NOT_THROW_EXPR:
584 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
585 break;
587 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
588 LHS of an assignment might also be involved in the RHS, as in bug
589 25979. */
590 case INIT_EXPR:
591 if (fn_contains_cilk_spawn_p (cfun)
592 && cilk_detect_spawn_and_unwrap (expr_p)
593 && !seen_error ())
594 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
595 cp_gimplify_init_expr (expr_p);
596 if (TREE_CODE (*expr_p) != INIT_EXPR)
597 return GS_OK;
598 /* Otherwise fall through. */
599 case MODIFY_EXPR:
601 if (fn_contains_cilk_spawn_p (cfun)
602 && cilk_detect_spawn_and_unwrap (expr_p)
603 && !seen_error ())
604 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
606 /* If the back end isn't clever enough to know that the lhs and rhs
607 types are the same, add an explicit conversion. */
608 tree op0 = TREE_OPERAND (*expr_p, 0);
609 tree op1 = TREE_OPERAND (*expr_p, 1);
611 if (!error_operand_p (op0)
612 && !error_operand_p (op1)
613 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
614 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
615 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
616 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
617 TREE_TYPE (op0), op1);
619 else if ((is_gimple_lvalue (op1) || INDIRECT_REF_P (op1)
620 || (TREE_CODE (op1) == CONSTRUCTOR
621 && CONSTRUCTOR_NELTS (op1) == 0
622 && !TREE_CLOBBER_P (op1))
623 || (TREE_CODE (op1) == CALL_EXPR
624 && !CALL_EXPR_RETURN_SLOT_OPT (op1)))
625 && is_really_empty_class (TREE_TYPE (op0)))
627 /* Remove any copies of empty classes. We check that the RHS
628 has a simple form so that TARGET_EXPRs and non-empty
629 CONSTRUCTORs get reduced properly, and we leave the return
630 slot optimization alone because it isn't a copy (FIXME so it
631 shouldn't be represented as one).
633 Also drop volatile variables on the RHS to avoid infinite
634 recursion from gimplify_expr trying to load the value. */
635 if (!TREE_SIDE_EFFECTS (op1))
636 *expr_p = op0;
637 else if (TREE_THIS_VOLATILE (op1)
638 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
639 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
640 build_fold_addr_expr (op1), op0);
641 else
642 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
643 op0, op1);
646 ret = GS_OK;
647 break;
649 case EMPTY_CLASS_EXPR:
650 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
651 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
652 ret = GS_OK;
653 break;
655 case BASELINK:
656 *expr_p = BASELINK_FUNCTIONS (*expr_p);
657 ret = GS_OK;
658 break;
660 case TRY_BLOCK:
661 genericize_try_block (expr_p);
662 ret = GS_OK;
663 break;
665 case HANDLER:
666 genericize_catch_block (expr_p);
667 ret = GS_OK;
668 break;
670 case EH_SPEC_BLOCK:
671 genericize_eh_spec_block (expr_p);
672 ret = GS_OK;
673 break;
675 case USING_STMT:
676 gcc_unreachable ();
678 case FOR_STMT:
679 case WHILE_STMT:
680 case DO_STMT:
681 case SWITCH_STMT:
682 case CONTINUE_STMT:
683 case BREAK_STMT:
684 gcc_unreachable ();
686 case OMP_FOR:
687 case OMP_SIMD:
688 case OMP_DISTRIBUTE:
689 ret = cp_gimplify_omp_for (expr_p, pre_p);
690 break;
692 case EXPR_STMT:
693 gimplify_expr_stmt (expr_p);
694 ret = GS_OK;
695 break;
697 case UNARY_PLUS_EXPR:
699 tree arg = TREE_OPERAND (*expr_p, 0);
700 tree type = TREE_TYPE (*expr_p);
701 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
702 : arg;
703 ret = GS_OK;
705 break;
707 case CILK_SPAWN_STMT:
708 gcc_assert
709 (fn_contains_cilk_spawn_p (cfun)
710 && cilk_detect_spawn_and_unwrap (expr_p));
712 /* If errors are seen, then just process it as a CALL_EXPR. */
713 if (!seen_error ())
714 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
716 case CALL_EXPR:
717 if (fn_contains_cilk_spawn_p (cfun)
718 && cilk_detect_spawn_and_unwrap (expr_p)
719 && !seen_error ())
720 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
722 /* DR 1030 says that we need to evaluate the elements of an
723 initializer-list in forward order even when it's used as arguments to
724 a constructor. So if the target wants to evaluate them in reverse
725 order and there's more than one argument other than 'this', gimplify
726 them in order. */
727 ret = GS_OK;
728 if (PUSH_ARGS_REVERSED && CALL_EXPR_LIST_INIT_P (*expr_p)
729 && call_expr_nargs (*expr_p) > 2)
731 int nargs = call_expr_nargs (*expr_p);
732 location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
733 for (int i = 1; i < nargs; ++i)
735 enum gimplify_status t
736 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
737 if (t == GS_ERROR)
738 ret = GS_ERROR;
741 break;
743 default:
744 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
745 break;
748 /* Restore saved state. */
749 if (STATEMENT_CODE_P (code))
750 current_stmt_tree ()->stmts_are_full_exprs_p
751 = saved_stmts_are_full_exprs_p;
753 return ret;
756 static inline bool
757 is_invisiref_parm (const_tree t)
759 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
760 && DECL_BY_REFERENCE (t));
763 /* Return true if the uid in both int tree maps are equal. */
765 bool
766 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
768 return (a->uid == b->uid);
771 /* Hash a UID in a cxx_int_tree_map. */
773 unsigned int
774 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
776 return item->uid;
779 /* A stable comparison routine for use with splay trees and DECLs. */
781 static int
782 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
784 tree a = (tree) xa;
785 tree b = (tree) xb;
787 return DECL_UID (a) - DECL_UID (b);
790 /* OpenMP context during genericization. */
792 struct cp_genericize_omp_taskreg
794 bool is_parallel;
795 bool default_shared;
796 struct cp_genericize_omp_taskreg *outer;
797 splay_tree variables;
800 /* Return true if genericization should try to determine if
801 DECL is firstprivate or shared within task regions. */
803 static bool
804 omp_var_to_track (tree decl)
806 tree type = TREE_TYPE (decl);
807 if (is_invisiref_parm (decl))
808 type = TREE_TYPE (type);
809 while (TREE_CODE (type) == ARRAY_TYPE)
810 type = TREE_TYPE (type);
811 if (type == error_mark_node || !CLASS_TYPE_P (type))
812 return false;
813 if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
814 return false;
815 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
816 return false;
817 return true;
820 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
822 static void
823 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
825 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
826 (splay_tree_key) decl);
827 if (n == NULL)
829 int flags = OMP_CLAUSE_DEFAULT_SHARED;
830 if (omp_ctx->outer)
831 omp_cxx_notice_variable (omp_ctx->outer, decl);
832 if (!omp_ctx->default_shared)
834 struct cp_genericize_omp_taskreg *octx;
836 for (octx = omp_ctx->outer; octx; octx = octx->outer)
838 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
839 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
841 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
842 break;
844 if (octx->is_parallel)
845 break;
847 if (octx == NULL
848 && (TREE_CODE (decl) == PARM_DECL
849 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
850 && DECL_CONTEXT (decl) == current_function_decl)))
851 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
852 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
854 /* DECL is implicitly determined firstprivate in
855 the current task construct. Ensure copy ctor and
856 dtor are instantiated, because during gimplification
857 it will be already too late. */
858 tree type = TREE_TYPE (decl);
859 if (is_invisiref_parm (decl))
860 type = TREE_TYPE (type);
861 while (TREE_CODE (type) == ARRAY_TYPE)
862 type = TREE_TYPE (type);
863 get_copy_ctor (type, tf_none);
864 get_dtor (type, tf_none);
867 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
871 /* Genericization context. */
873 struct cp_genericize_data
875 hash_set<tree> *p_set;
876 vec<tree> bind_expr_stack;
877 struct cp_genericize_omp_taskreg *omp_ctx;
880 /* Perform any pre-gimplification lowering of C++ front end trees to
881 GENERIC. */
883 static tree
884 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
886 tree stmt = *stmt_p;
887 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
888 hash_set<tree> *p_set = wtd->p_set;
890 /* If in an OpenMP context, note var uses. */
891 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
892 && (VAR_P (stmt)
893 || TREE_CODE (stmt) == PARM_DECL
894 || TREE_CODE (stmt) == RESULT_DECL)
895 && omp_var_to_track (stmt))
896 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
898 if (is_invisiref_parm (stmt)
899 /* Don't dereference parms in a thunk, pass the references through. */
900 && !(DECL_THUNK_P (current_function_decl)
901 && TREE_CODE (stmt) == PARM_DECL))
903 *stmt_p = convert_from_reference (stmt);
904 *walk_subtrees = 0;
905 return NULL;
908 /* Map block scope extern declarations to visible declarations with the
909 same name and type in outer scopes if any. */
910 if (cp_function_chain->extern_decl_map
911 && VAR_OR_FUNCTION_DECL_P (stmt)
912 && DECL_EXTERNAL (stmt))
914 struct cxx_int_tree_map *h, in;
915 in.uid = DECL_UID (stmt);
916 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
917 if (h)
919 *stmt_p = h->to;
920 *walk_subtrees = 0;
921 return NULL;
925 /* Other than invisiref parms, don't walk the same tree twice. */
926 if (p_set->contains (stmt))
928 *walk_subtrees = 0;
929 return NULL_TREE;
932 if (TREE_CODE (stmt) == ADDR_EXPR
933 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
935 /* If in an OpenMP context, note var uses. */
936 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
937 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
938 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
939 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
940 *walk_subtrees = 0;
942 else if (TREE_CODE (stmt) == RETURN_EXPR
943 && TREE_OPERAND (stmt, 0)
944 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
945 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
946 *walk_subtrees = 0;
947 else if (TREE_CODE (stmt) == OMP_CLAUSE)
948 switch (OMP_CLAUSE_CODE (stmt))
950 case OMP_CLAUSE_LASTPRIVATE:
951 /* Don't dereference an invisiref in OpenMP clauses. */
952 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
954 *walk_subtrees = 0;
955 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
956 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
957 cp_genericize_r, data, NULL);
959 break;
960 case OMP_CLAUSE_PRIVATE:
961 /* Don't dereference an invisiref in OpenMP clauses. */
962 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
963 *walk_subtrees = 0;
964 else if (wtd->omp_ctx != NULL)
966 /* Private clause doesn't cause any references to the
967 var in outer contexts, avoid calling
968 omp_cxx_notice_variable for it. */
969 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
970 wtd->omp_ctx = NULL;
971 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
972 data, NULL);
973 wtd->omp_ctx = old;
974 *walk_subtrees = 0;
976 break;
977 case OMP_CLAUSE_SHARED:
978 case OMP_CLAUSE_FIRSTPRIVATE:
979 case OMP_CLAUSE_COPYIN:
980 case OMP_CLAUSE_COPYPRIVATE:
981 /* Don't dereference an invisiref in OpenMP clauses. */
982 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
983 *walk_subtrees = 0;
984 break;
985 case OMP_CLAUSE_REDUCTION:
986 /* Don't dereference an invisiref in reduction clause's
987 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
988 still needs to be genericized. */
989 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
991 *walk_subtrees = 0;
992 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
993 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
994 cp_genericize_r, data, NULL);
995 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
996 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
997 cp_genericize_r, data, NULL);
999 break;
1000 default:
1001 break;
1003 else if (IS_TYPE_OR_DECL_P (stmt))
1004 *walk_subtrees = 0;
1006 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1007 to lower this construct before scanning it, so we need to lower these
1008 before doing anything else. */
1009 else if (TREE_CODE (stmt) == CLEANUP_STMT)
1010 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1011 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1012 : TRY_FINALLY_EXPR,
1013 void_type_node,
1014 CLEANUP_BODY (stmt),
1015 CLEANUP_EXPR (stmt));
1017 else if (TREE_CODE (stmt) == IF_STMT)
1019 genericize_if_stmt (stmt_p);
1020 /* *stmt_p has changed, tail recurse to handle it again. */
1021 return cp_genericize_r (stmt_p, walk_subtrees, data);
1024 /* COND_EXPR might have incompatible types in branches if one or both
1025 arms are bitfields. Fix it up now. */
1026 else if (TREE_CODE (stmt) == COND_EXPR)
1028 tree type_left
1029 = (TREE_OPERAND (stmt, 1)
1030 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1031 : NULL_TREE);
1032 tree type_right
1033 = (TREE_OPERAND (stmt, 2)
1034 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1035 : NULL_TREE);
1036 if (type_left
1037 && !useless_type_conversion_p (TREE_TYPE (stmt),
1038 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1040 TREE_OPERAND (stmt, 1)
1041 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1042 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1043 type_left));
1045 if (type_right
1046 && !useless_type_conversion_p (TREE_TYPE (stmt),
1047 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1049 TREE_OPERAND (stmt, 2)
1050 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1051 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1052 type_right));
1056 else if (TREE_CODE (stmt) == BIND_EXPR)
1058 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1060 tree decl;
1061 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1062 if (VAR_P (decl)
1063 && !DECL_EXTERNAL (decl)
1064 && omp_var_to_track (decl))
1066 splay_tree_node n
1067 = splay_tree_lookup (wtd->omp_ctx->variables,
1068 (splay_tree_key) decl);
1069 if (n == NULL)
1070 splay_tree_insert (wtd->omp_ctx->variables,
1071 (splay_tree_key) decl,
1072 TREE_STATIC (decl)
1073 ? OMP_CLAUSE_DEFAULT_SHARED
1074 : OMP_CLAUSE_DEFAULT_PRIVATE);
1077 wtd->bind_expr_stack.safe_push (stmt);
1078 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1079 cp_genericize_r, data, NULL);
1080 wtd->bind_expr_stack.pop ();
1083 else if (TREE_CODE (stmt) == USING_STMT)
1085 tree block = NULL_TREE;
1087 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1088 BLOCK, and append an IMPORTED_DECL to its
1089 BLOCK_VARS chained list. */
1090 if (wtd->bind_expr_stack.exists ())
1092 int i;
1093 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1094 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1095 break;
1097 if (block)
1099 tree using_directive;
1100 gcc_assert (TREE_OPERAND (stmt, 0));
1102 using_directive = make_node (IMPORTED_DECL);
1103 TREE_TYPE (using_directive) = void_type_node;
1105 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1106 = TREE_OPERAND (stmt, 0);
1107 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1108 BLOCK_VARS (block) = using_directive;
1110 /* The USING_STMT won't appear in GENERIC. */
1111 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1112 *walk_subtrees = 0;
1115 else if (TREE_CODE (stmt) == DECL_EXPR
1116 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1118 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1119 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1120 *walk_subtrees = 0;
1122 else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
1124 struct cp_genericize_omp_taskreg omp_ctx;
1125 tree c, decl;
1126 splay_tree_node n;
1128 *walk_subtrees = 0;
1129 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1130 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1131 omp_ctx.default_shared = omp_ctx.is_parallel;
1132 omp_ctx.outer = wtd->omp_ctx;
1133 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1134 wtd->omp_ctx = &omp_ctx;
1135 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1136 switch (OMP_CLAUSE_CODE (c))
1138 case OMP_CLAUSE_SHARED:
1139 case OMP_CLAUSE_PRIVATE:
1140 case OMP_CLAUSE_FIRSTPRIVATE:
1141 case OMP_CLAUSE_LASTPRIVATE:
1142 decl = OMP_CLAUSE_DECL (c);
1143 if (decl == error_mark_node || !omp_var_to_track (decl))
1144 break;
1145 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1146 if (n != NULL)
1147 break;
1148 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1149 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1150 ? OMP_CLAUSE_DEFAULT_SHARED
1151 : OMP_CLAUSE_DEFAULT_PRIVATE);
1152 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1153 && omp_ctx.outer)
1154 omp_cxx_notice_variable (omp_ctx.outer, decl);
1155 break;
1156 case OMP_CLAUSE_DEFAULT:
1157 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1158 omp_ctx.default_shared = true;
1159 default:
1160 break;
1162 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1163 wtd->omp_ctx = omp_ctx.outer;
1164 splay_tree_delete (omp_ctx.variables);
1166 else if (TREE_CODE (stmt) == CONVERT_EXPR)
1167 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1168 else if (TREE_CODE (stmt) == FOR_STMT)
1169 genericize_for_stmt (stmt_p, walk_subtrees, data);
1170 else if (TREE_CODE (stmt) == WHILE_STMT)
1171 genericize_while_stmt (stmt_p, walk_subtrees, data);
1172 else if (TREE_CODE (stmt) == DO_STMT)
1173 genericize_do_stmt (stmt_p, walk_subtrees, data);
1174 else if (TREE_CODE (stmt) == SWITCH_STMT)
1175 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1176 else if (TREE_CODE (stmt) == CONTINUE_STMT)
1177 genericize_continue_stmt (stmt_p);
1178 else if (TREE_CODE (stmt) == BREAK_STMT)
1179 genericize_break_stmt (stmt_p);
1180 else if (TREE_CODE (stmt) == OMP_FOR
1181 || TREE_CODE (stmt) == OMP_SIMD
1182 || TREE_CODE (stmt) == OMP_DISTRIBUTE)
1183 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1184 else if (TREE_CODE (stmt) == SIZEOF_EXPR)
1186 if (SIZEOF_EXPR_TYPE_P (stmt))
1187 *stmt_p
1188 = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt, 0)),
1189 SIZEOF_EXPR, false);
1190 else if (TYPE_P (TREE_OPERAND (stmt, 0)))
1191 *stmt_p = cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt, 0),
1192 SIZEOF_EXPR, false);
1193 else
1194 *stmt_p = cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt, 0),
1195 SIZEOF_EXPR, false);
1196 if (*stmt_p == error_mark_node)
1197 *stmt_p = size_one_node;
1198 return NULL;
1200 else if (flag_sanitize
1201 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1203 if ((flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1204 && TREE_CODE (stmt) == NOP_EXPR
1205 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1206 ubsan_maybe_instrument_reference (stmt);
1207 else if (TREE_CODE (stmt) == CALL_EXPR)
1209 tree fn = CALL_EXPR_FN (stmt);
1210 if (fn != NULL_TREE
1211 && !error_operand_p (fn)
1212 && POINTER_TYPE_P (TREE_TYPE (fn))
1213 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1215 bool is_ctor
1216 = TREE_CODE (fn) == ADDR_EXPR
1217 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1218 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1219 if (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1220 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1221 if ((flag_sanitize & SANITIZE_VPTR) && !is_ctor)
1222 cp_ubsan_maybe_instrument_member_call (stmt);
1227 p_set->add (*stmt_p);
1229 return NULL;
1232 /* Lower C++ front end trees to GENERIC in T_P. */
1234 static void
1235 cp_genericize_tree (tree* t_p)
1237 struct cp_genericize_data wtd;
1239 wtd.p_set = new hash_set<tree>;
1240 wtd.bind_expr_stack.create (0);
1241 wtd.omp_ctx = NULL;
1242 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1243 delete wtd.p_set;
1244 wtd.bind_expr_stack.release ();
1245 if (flag_sanitize & SANITIZE_VPTR)
1246 cp_ubsan_instrument_member_accesses (t_p);
1249 /* If a function that should end with a return in non-void
1250 function doesn't obviously end with return, add ubsan
1251 instrumentation code to verify it at runtime. */
1253 static void
1254 cp_ubsan_maybe_instrument_return (tree fndecl)
1256 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1257 || DECL_CONSTRUCTOR_P (fndecl)
1258 || DECL_DESTRUCTOR_P (fndecl)
1259 || !targetm.warn_func_return (fndecl))
1260 return;
1262 tree t = DECL_SAVED_TREE (fndecl);
1263 while (t)
1265 switch (TREE_CODE (t))
1267 case BIND_EXPR:
1268 t = BIND_EXPR_BODY (t);
1269 continue;
1270 case TRY_FINALLY_EXPR:
1271 t = TREE_OPERAND (t, 0);
1272 continue;
1273 case STATEMENT_LIST:
1275 tree_stmt_iterator i = tsi_last (t);
1276 if (!tsi_end_p (i))
1278 t = tsi_stmt (i);
1279 continue;
1282 break;
1283 case RETURN_EXPR:
1284 return;
1285 default:
1286 break;
1288 break;
1290 if (t == NULL_TREE)
1291 return;
1292 t = DECL_SAVED_TREE (fndecl);
1293 if (TREE_CODE (t) == BIND_EXPR
1294 && TREE_CODE (BIND_EXPR_BODY (t)) == STATEMENT_LIST)
1296 tree_stmt_iterator i = tsi_last (BIND_EXPR_BODY (t));
1297 t = ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl));
1298 tsi_link_after (&i, t, TSI_NEW_STMT);
1302 void
1303 cp_genericize (tree fndecl)
1305 tree t;
1307 /* Fix up the types of parms passed by invisible reference. */
1308 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1309 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1311 /* If a function's arguments are copied to create a thunk,
1312 then DECL_BY_REFERENCE will be set -- but the type of the
1313 argument will be a pointer type, so we will never get
1314 here. */
1315 gcc_assert (!DECL_BY_REFERENCE (t));
1316 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1317 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1318 DECL_BY_REFERENCE (t) = 1;
1319 TREE_ADDRESSABLE (t) = 0;
1320 relayout_decl (t);
1323 /* Do the same for the return value. */
1324 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1326 t = DECL_RESULT (fndecl);
1327 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1328 DECL_BY_REFERENCE (t) = 1;
1329 TREE_ADDRESSABLE (t) = 0;
1330 relayout_decl (t);
1331 if (DECL_NAME (t))
1333 /* Adjust DECL_VALUE_EXPR of the original var. */
1334 tree outer = outer_curly_brace_block (current_function_decl);
1335 tree var;
1337 if (outer)
1338 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1339 if (DECL_NAME (t) == DECL_NAME (var)
1340 && DECL_HAS_VALUE_EXPR_P (var)
1341 && DECL_VALUE_EXPR (var) == t)
1343 tree val = convert_from_reference (t);
1344 SET_DECL_VALUE_EXPR (var, val);
1345 break;
1350 /* If we're a clone, the body is already GIMPLE. */
1351 if (DECL_CLONED_FUNCTION_P (fndecl))
1352 return;
1354 /* Expand all the array notations here. */
1355 if (flag_cilkplus
1356 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1357 DECL_SAVED_TREE (fndecl) =
1358 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1360 /* We do want to see every occurrence of the parms, so we can't just use
1361 walk_tree's hash functionality. */
1362 cp_genericize_tree (&DECL_SAVED_TREE (fndecl));
1364 if (flag_sanitize & SANITIZE_RETURN
1365 && do_ubsan_in_current_function ())
1366 cp_ubsan_maybe_instrument_return (fndecl);
1368 /* Do everything else. */
1369 c_genericize (fndecl);
1371 gcc_assert (bc_label[bc_break] == NULL);
1372 gcc_assert (bc_label[bc_continue] == NULL);
1375 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1376 NULL if there is in fact nothing to do. ARG2 may be null if FN
1377 actually only takes one argument. */
1379 static tree
1380 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1382 tree defparm, parm, t;
1383 int i = 0;
1384 int nargs;
1385 tree *argarray;
1387 if (fn == NULL)
1388 return NULL;
1390 nargs = list_length (DECL_ARGUMENTS (fn));
1391 argarray = XALLOCAVEC (tree, nargs);
1393 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1394 if (arg2)
1395 defparm = TREE_CHAIN (defparm);
1397 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1399 tree inner_type = TREE_TYPE (arg1);
1400 tree start1, end1, p1;
1401 tree start2 = NULL, p2 = NULL;
1402 tree ret = NULL, lab;
1404 start1 = arg1;
1405 start2 = arg2;
1408 inner_type = TREE_TYPE (inner_type);
1409 start1 = build4 (ARRAY_REF, inner_type, start1,
1410 size_zero_node, NULL, NULL);
1411 if (arg2)
1412 start2 = build4 (ARRAY_REF, inner_type, start2,
1413 size_zero_node, NULL, NULL);
1415 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1416 start1 = build_fold_addr_expr_loc (input_location, start1);
1417 if (arg2)
1418 start2 = build_fold_addr_expr_loc (input_location, start2);
1420 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1421 end1 = fold_build_pointer_plus (start1, end1);
1423 p1 = create_tmp_var (TREE_TYPE (start1));
1424 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1425 append_to_statement_list (t, &ret);
1427 if (arg2)
1429 p2 = create_tmp_var (TREE_TYPE (start2));
1430 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1431 append_to_statement_list (t, &ret);
1434 lab = create_artificial_label (input_location);
1435 t = build1 (LABEL_EXPR, void_type_node, lab);
1436 append_to_statement_list (t, &ret);
1438 argarray[i++] = p1;
1439 if (arg2)
1440 argarray[i++] = p2;
1441 /* Handle default arguments. */
1442 for (parm = defparm; parm && parm != void_list_node;
1443 parm = TREE_CHAIN (parm), i++)
1444 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1445 TREE_PURPOSE (parm), fn, i,
1446 tf_warning_or_error);
1447 t = build_call_a (fn, i, argarray);
1448 t = fold_convert (void_type_node, t);
1449 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1450 append_to_statement_list (t, &ret);
1452 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1453 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1454 append_to_statement_list (t, &ret);
1456 if (arg2)
1458 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1459 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1460 append_to_statement_list (t, &ret);
1463 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1464 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1465 append_to_statement_list (t, &ret);
1467 return ret;
1469 else
1471 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1472 if (arg2)
1473 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1474 /* Handle default arguments. */
1475 for (parm = defparm; parm && parm != void_list_node;
1476 parm = TREE_CHAIN (parm), i++)
1477 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1478 TREE_PURPOSE (parm),
1479 fn, i, tf_warning_or_error);
1480 t = build_call_a (fn, i, argarray);
1481 t = fold_convert (void_type_node, t);
1482 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1486 /* Return code to initialize DECL with its default constructor, or
1487 NULL if there's nothing to do. */
1489 tree
1490 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1492 tree info = CP_OMP_CLAUSE_INFO (clause);
1493 tree ret = NULL;
1495 if (info)
1496 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1498 return ret;
1501 /* Return code to initialize DST with a copy constructor from SRC. */
1503 tree
1504 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1506 tree info = CP_OMP_CLAUSE_INFO (clause);
1507 tree ret = NULL;
1509 if (info)
1510 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1511 if (ret == NULL)
1512 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1514 return ret;
1517 /* Similarly, except use an assignment operator instead. */
1519 tree
1520 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1522 tree info = CP_OMP_CLAUSE_INFO (clause);
1523 tree ret = NULL;
1525 if (info)
1526 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1527 if (ret == NULL)
1528 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1530 return ret;
1533 /* Return code to destroy DECL. */
1535 tree
1536 cxx_omp_clause_dtor (tree clause, tree decl)
1538 tree info = CP_OMP_CLAUSE_INFO (clause);
1539 tree ret = NULL;
1541 if (info)
1542 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1544 return ret;
1547 /* True if OpenMP should privatize what this DECL points to rather
1548 than the DECL itself. */
1550 bool
1551 cxx_omp_privatize_by_reference (const_tree decl)
1553 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1554 || is_invisiref_parm (decl));
1557 /* Return true if DECL is const qualified var having no mutable member. */
1558 bool
1559 cxx_omp_const_qual_no_mutable (tree decl)
1561 tree type = TREE_TYPE (decl);
1562 if (TREE_CODE (type) == REFERENCE_TYPE)
1564 if (!is_invisiref_parm (decl))
1565 return false;
1566 type = TREE_TYPE (type);
1568 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1570 /* NVR doesn't preserve const qualification of the
1571 variable's type. */
1572 tree outer = outer_curly_brace_block (current_function_decl);
1573 tree var;
1575 if (outer)
1576 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1577 if (DECL_NAME (decl) == DECL_NAME (var)
1578 && (TYPE_MAIN_VARIANT (type)
1579 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1581 if (TYPE_READONLY (TREE_TYPE (var)))
1582 type = TREE_TYPE (var);
1583 break;
1588 if (type == error_mark_node)
1589 return false;
1591 /* Variables with const-qualified type having no mutable member
1592 are predetermined shared. */
1593 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1594 return true;
1596 return false;
1599 /* True if OpenMP sharing attribute of DECL is predetermined. */
1601 enum omp_clause_default_kind
1602 cxx_omp_predetermined_sharing (tree decl)
1604 /* Static data members are predetermined shared. */
1605 if (TREE_STATIC (decl))
1607 tree ctx = CP_DECL_CONTEXT (decl);
1608 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1609 return OMP_CLAUSE_DEFAULT_SHARED;
1612 /* Const qualified vars having no mutable member are predetermined
1613 shared. */
1614 if (cxx_omp_const_qual_no_mutable (decl))
1615 return OMP_CLAUSE_DEFAULT_SHARED;
1617 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1620 /* Finalize an implicitly determined clause. */
1622 void
1623 cxx_omp_finish_clause (tree c, gimple_seq *)
1625 tree decl, inner_type;
1626 bool make_shared = false;
1628 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1629 return;
1631 decl = OMP_CLAUSE_DECL (c);
1632 decl = require_complete_type (decl);
1633 inner_type = TREE_TYPE (decl);
1634 if (decl == error_mark_node)
1635 make_shared = true;
1636 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1638 if (is_invisiref_parm (decl))
1639 inner_type = TREE_TYPE (inner_type);
1640 else
1642 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1643 decl);
1644 make_shared = true;
1648 /* We're interested in the base element, not arrays. */
1649 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1650 inner_type = TREE_TYPE (inner_type);
1652 /* Check for special function availability by building a call to one.
1653 Save the results, because later we won't be in the right context
1654 for making these queries. */
1655 if (!make_shared
1656 && CLASS_TYPE_P (inner_type)
1657 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1658 make_shared = true;
1660 if (make_shared)
1661 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;