fix ChangeLog entry for r227407
[official-gcc.git] / gcc / cp / cp-gimplify.c
blob5ab060431a3f3965dc1448d00278a441c72bf7af
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2015 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "function.h"
27 #include "predict.h"
28 #include "basic-block.h"
29 #include "tree.h"
30 #include "cp-tree.h"
31 #include "gimple.h"
32 #include "hard-reg-set.h"
33 #include "alias.h"
34 #include "stor-layout.h"
35 #include "c-family/c-common.h"
36 #include "tree-iterator.h"
37 #include "internal-fn.h"
38 #include "gimplify.h"
39 #include "flags.h"
40 #include "splay-tree.h"
41 #include "target.h"
42 #include "c-family/c-ubsan.h"
43 #include "cilk.h"
44 #include "gimplify.h"
46 /* Forward declarations. */
48 static tree cp_genericize_r (tree *, int *, void *);
49 static void cp_genericize_tree (tree*);
51 /* Local declarations. */
53 enum bc_t { bc_break = 0, bc_continue = 1 };
55 /* Stack of labels which are targets for "break" or "continue",
56 linked through TREE_CHAIN. */
57 static tree bc_label[2];
59 /* Begin a scope which can be exited by a break or continue statement. BC
60 indicates which.
62 Just creates a label with location LOCATION and pushes it into the current
63 context. */
65 static tree
66 begin_bc_block (enum bc_t bc, location_t location)
68 tree label = create_artificial_label (location);
69 DECL_CHAIN (label) = bc_label[bc];
70 bc_label[bc] = label;
71 if (bc == bc_break)
72 LABEL_DECL_BREAK (label) = true;
73 else
74 LABEL_DECL_CONTINUE (label) = true;
75 return label;
78 /* Finish a scope which can be exited by a break or continue statement.
79 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
80 an expression for the contents of the scope.
82 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
83 BLOCK. Otherwise, just forget the label. */
85 static void
86 finish_bc_block (tree *block, enum bc_t bc, tree label)
88 gcc_assert (label == bc_label[bc]);
90 if (TREE_USED (label))
91 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
92 block);
94 bc_label[bc] = DECL_CHAIN (label);
95 DECL_CHAIN (label) = NULL_TREE;
98 /* This function is a wrapper for cilk_gimplify_call_params_in_spawned_fn.
99 *EXPR_P can be a CALL_EXPR, INIT_EXPR, MODIFY_EXPR, AGGR_INIT_EXPR or
100 TARGET_EXPR. *PRE_P and *POST_P are gimple sequences from the caller
101 of gimplify_cilk_spawn. */
103 static void
104 cilk_cp_gimplify_call_params_in_spawned_fn (tree *expr_p, gimple_seq *pre_p,
105 gimple_seq *post_p)
107 int ii = 0;
109 cilk_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
110 if (TREE_CODE (*expr_p) == AGGR_INIT_EXPR)
111 for (ii = 0; ii < aggr_init_expr_nargs (*expr_p); ii++)
112 gimplify_expr (&AGGR_INIT_EXPR_ARG (*expr_p, ii), pre_p, post_p,
113 is_gimple_reg, fb_rvalue);
117 /* Get the LABEL_EXPR to represent a break or continue statement
118 in the current block scope. BC indicates which. */
120 static tree
121 get_bc_label (enum bc_t bc)
123 tree label = bc_label[bc];
125 /* Mark the label used for finish_bc_block. */
126 TREE_USED (label) = 1;
127 return label;
130 /* Genericize a TRY_BLOCK. */
132 static void
133 genericize_try_block (tree *stmt_p)
135 tree body = TRY_STMTS (*stmt_p);
136 tree cleanup = TRY_HANDLERS (*stmt_p);
138 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
141 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
143 static void
144 genericize_catch_block (tree *stmt_p)
146 tree type = HANDLER_TYPE (*stmt_p);
147 tree body = HANDLER_BODY (*stmt_p);
149 /* FIXME should the caught type go in TREE_TYPE? */
150 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
153 /* A terser interface for building a representation of an exception
154 specification. */
156 static tree
157 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
159 tree t;
161 /* FIXME should the allowed types go in TREE_TYPE? */
162 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
163 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
165 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
166 append_to_statement_list (body, &TREE_OPERAND (t, 0));
168 return t;
171 /* Genericize an EH_SPEC_BLOCK by converting it to a
172 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
174 static void
175 genericize_eh_spec_block (tree *stmt_p)
177 tree body = EH_SPEC_STMTS (*stmt_p);
178 tree allowed = EH_SPEC_RAISES (*stmt_p);
179 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
181 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
182 TREE_NO_WARNING (*stmt_p) = true;
183 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
186 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
188 static void
189 genericize_if_stmt (tree *stmt_p)
191 tree stmt, cond, then_, else_;
192 location_t locus = EXPR_LOCATION (*stmt_p);
194 stmt = *stmt_p;
195 cond = IF_COND (stmt);
196 then_ = THEN_CLAUSE (stmt);
197 else_ = ELSE_CLAUSE (stmt);
199 if (!then_)
200 then_ = build_empty_stmt (locus);
201 if (!else_)
202 else_ = build_empty_stmt (locus);
204 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
205 stmt = then_;
206 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
207 stmt = else_;
208 else
209 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
210 if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
211 SET_EXPR_LOCATION (stmt, locus);
212 *stmt_p = stmt;
215 /* Build a generic representation of one of the C loop forms. COND is the
216 loop condition or NULL_TREE. BODY is the (possibly compound) statement
217 controlled by the loop. INCR is the increment expression of a for-loop,
218 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
219 evaluated before the loop body as in while and for loops, or after the
220 loop body as in do-while loops. */
222 static void
223 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
224 tree incr, bool cond_is_first, int *walk_subtrees,
225 void *data)
227 tree blab, clab;
228 tree exit = NULL;
229 tree stmt_list = NULL;
231 blab = begin_bc_block (bc_break, start_locus);
232 clab = begin_bc_block (bc_continue, start_locus);
234 if (incr && EXPR_P (incr))
235 SET_EXPR_LOCATION (incr, start_locus);
237 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
238 cp_walk_tree (&body, cp_genericize_r, data, NULL);
239 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
240 *walk_subtrees = 0;
242 if (cond && TREE_CODE (cond) != INTEGER_CST)
244 /* If COND is constant, don't bother building an exit. If it's false,
245 we won't build a loop. If it's true, any exits are in the body. */
246 location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
247 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
248 get_bc_label (bc_break));
249 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
250 build_empty_stmt (cloc), exit);
253 if (exit && cond_is_first)
254 append_to_statement_list (exit, &stmt_list);
255 append_to_statement_list (body, &stmt_list);
256 finish_bc_block (&stmt_list, bc_continue, clab);
257 append_to_statement_list (incr, &stmt_list);
258 if (exit && !cond_is_first)
259 append_to_statement_list (exit, &stmt_list);
261 if (!stmt_list)
262 stmt_list = build_empty_stmt (start_locus);
264 tree loop;
265 if (cond && integer_zerop (cond))
267 if (cond_is_first)
268 loop = fold_build3_loc (start_locus, COND_EXPR,
269 void_type_node, cond, stmt_list,
270 build_empty_stmt (start_locus));
271 else
272 loop = stmt_list;
274 else
275 loop = build1_loc (start_locus, LOOP_EXPR, void_type_node, stmt_list);
277 stmt_list = NULL;
278 append_to_statement_list (loop, &stmt_list);
279 finish_bc_block (&stmt_list, bc_break, blab);
280 if (!stmt_list)
281 stmt_list = build_empty_stmt (start_locus);
283 *stmt_p = stmt_list;
286 /* Genericize a FOR_STMT node *STMT_P. */
288 static void
289 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
291 tree stmt = *stmt_p;
292 tree expr = NULL;
293 tree loop;
294 tree init = FOR_INIT_STMT (stmt);
296 if (init)
298 cp_walk_tree (&init, cp_genericize_r, data, NULL);
299 append_to_statement_list (init, &expr);
302 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
303 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
304 append_to_statement_list (loop, &expr);
305 if (expr == NULL_TREE)
306 expr = loop;
307 *stmt_p = expr;
310 /* Genericize a WHILE_STMT node *STMT_P. */
312 static void
313 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
315 tree stmt = *stmt_p;
316 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
317 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
320 /* Genericize a DO_STMT node *STMT_P. */
322 static void
323 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
325 tree stmt = *stmt_p;
326 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
327 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
330 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
332 static void
333 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
335 tree stmt = *stmt_p;
336 tree break_block, body, cond, type;
337 location_t stmt_locus = EXPR_LOCATION (stmt);
339 break_block = begin_bc_block (bc_break, stmt_locus);
341 body = SWITCH_STMT_BODY (stmt);
342 if (!body)
343 body = build_empty_stmt (stmt_locus);
344 cond = SWITCH_STMT_COND (stmt);
345 type = SWITCH_STMT_TYPE (stmt);
347 cp_walk_tree (&body, cp_genericize_r, data, NULL);
348 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
349 cp_walk_tree (&type, cp_genericize_r, data, NULL);
350 *walk_subtrees = 0;
352 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
353 finish_bc_block (stmt_p, bc_break, break_block);
356 /* Genericize a CONTINUE_STMT node *STMT_P. */
358 static void
359 genericize_continue_stmt (tree *stmt_p)
361 tree stmt_list = NULL;
362 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
363 tree label = get_bc_label (bc_continue);
364 location_t location = EXPR_LOCATION (*stmt_p);
365 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
366 append_to_statement_list (pred, &stmt_list);
367 append_to_statement_list (jump, &stmt_list);
368 *stmt_p = stmt_list;
371 /* Genericize a BREAK_STMT node *STMT_P. */
373 static void
374 genericize_break_stmt (tree *stmt_p)
376 tree label = get_bc_label (bc_break);
377 location_t location = EXPR_LOCATION (*stmt_p);
378 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
381 /* Genericize a OMP_FOR node *STMT_P. */
383 static void
384 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
386 tree stmt = *stmt_p;
387 location_t locus = EXPR_LOCATION (stmt);
388 tree clab = begin_bc_block (bc_continue, locus);
390 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
391 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
392 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
393 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
394 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
395 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
396 *walk_subtrees = 0;
398 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
401 /* Hook into the middle of gimplifying an OMP_FOR node. */
403 static enum gimplify_status
404 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
406 tree for_stmt = *expr_p;
407 gimple_seq seq = NULL;
409 /* Protect ourselves from recursion. */
410 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
411 return GS_UNHANDLED;
412 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
414 gimplify_and_add (for_stmt, &seq);
415 gimple_seq_add_seq (pre_p, seq);
417 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
419 return GS_ALL_DONE;
422 /* Gimplify an EXPR_STMT node. */
424 static void
425 gimplify_expr_stmt (tree *stmt_p)
427 tree stmt = EXPR_STMT_EXPR (*stmt_p);
429 if (stmt == error_mark_node)
430 stmt = NULL;
432 /* Gimplification of a statement expression will nullify the
433 statement if all its side effects are moved to *PRE_P and *POST_P.
435 In this case we will not want to emit the gimplified statement.
436 However, we may still want to emit a warning, so we do that before
437 gimplification. */
438 if (stmt && warn_unused_value)
440 if (!TREE_SIDE_EFFECTS (stmt))
442 if (!IS_EMPTY_STMT (stmt)
443 && !VOID_TYPE_P (TREE_TYPE (stmt))
444 && !TREE_NO_WARNING (stmt))
445 warning (OPT_Wunused_value, "statement with no effect");
447 else
448 warn_if_unused_value (stmt, input_location);
451 if (stmt == NULL_TREE)
452 stmt = alloc_stmt_list ();
454 *stmt_p = stmt;
457 /* Gimplify initialization from an AGGR_INIT_EXPR. */
459 static void
460 cp_gimplify_init_expr (tree *expr_p)
462 tree from = TREE_OPERAND (*expr_p, 1);
463 tree to = TREE_OPERAND (*expr_p, 0);
464 tree t;
466 /* What about code that pulls out the temp and uses it elsewhere? I
467 think that such code never uses the TARGET_EXPR as an initializer. If
468 I'm wrong, we'll abort because the temp won't have any RTL. In that
469 case, I guess we'll need to replace references somehow. */
470 if (TREE_CODE (from) == TARGET_EXPR)
471 from = TARGET_EXPR_INITIAL (from);
473 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
474 inside the TARGET_EXPR. */
475 for (t = from; t; )
477 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
479 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
480 replace the slot operand with our target.
482 Should we add a target parm to gimplify_expr instead? No, as in this
483 case we want to replace the INIT_EXPR. */
484 if (TREE_CODE (sub) == AGGR_INIT_EXPR
485 || TREE_CODE (sub) == VEC_INIT_EXPR)
487 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
488 AGGR_INIT_EXPR_SLOT (sub) = to;
489 else
490 VEC_INIT_EXPR_SLOT (sub) = to;
491 *expr_p = from;
493 /* The initialization is now a side-effect, so the container can
494 become void. */
495 if (from != sub)
496 TREE_TYPE (from) = void_type_node;
499 if (cxx_dialect >= cxx14 && TREE_CODE (sub) == CONSTRUCTOR)
500 /* Handle aggregate NSDMI. */
501 replace_placeholders (sub, to);
503 if (t == sub)
504 break;
505 else
506 t = TREE_OPERAND (t, 1);
511 /* Gimplify a MUST_NOT_THROW_EXPR. */
513 static enum gimplify_status
514 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
516 tree stmt = *expr_p;
517 tree temp = voidify_wrapper_expr (stmt, NULL);
518 tree body = TREE_OPERAND (stmt, 0);
519 gimple_seq try_ = NULL;
520 gimple_seq catch_ = NULL;
521 gimple mnt;
523 gimplify_and_add (body, &try_);
524 mnt = gimple_build_eh_must_not_throw (terminate_node);
525 gimple_seq_add_stmt_without_update (&catch_, mnt);
526 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
528 gimple_seq_add_stmt_without_update (pre_p, mnt);
529 if (temp)
531 *expr_p = temp;
532 return GS_OK;
535 *expr_p = NULL;
536 return GS_ALL_DONE;
539 /* Return TRUE if an operand (OP) of a given TYPE being copied is
540 really just an empty class copy.
542 Check that the operand has a simple form so that TARGET_EXPRs and
543 non-empty CONSTRUCTORs get reduced properly, and we leave the
544 return slot optimization alone because it isn't a copy. */
546 static bool
547 simple_empty_class_p (tree type, tree op)
549 return
550 ((TREE_CODE (op) == COMPOUND_EXPR
551 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
552 || is_gimple_lvalue (op)
553 || INDIRECT_REF_P (op)
554 || (TREE_CODE (op) == CONSTRUCTOR
555 && CONSTRUCTOR_NELTS (op) == 0
556 && !TREE_CLOBBER_P (op))
557 || (TREE_CODE (op) == CALL_EXPR
558 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
559 && is_really_empty_class (type);
562 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
565 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
567 int saved_stmts_are_full_exprs_p = 0;
568 enum tree_code code = TREE_CODE (*expr_p);
569 enum gimplify_status ret;
571 if (STATEMENT_CODE_P (code))
573 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
574 current_stmt_tree ()->stmts_are_full_exprs_p
575 = STMT_IS_FULL_EXPR_P (*expr_p);
578 switch (code)
580 case PTRMEM_CST:
581 *expr_p = cplus_expand_constant (*expr_p);
582 ret = GS_OK;
583 break;
585 case AGGR_INIT_EXPR:
586 simplify_aggr_init_expr (expr_p);
587 ret = GS_OK;
588 break;
590 case VEC_INIT_EXPR:
592 location_t loc = input_location;
593 tree init = VEC_INIT_EXPR_INIT (*expr_p);
594 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
595 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
596 input_location = EXPR_LOCATION (*expr_p);
597 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
598 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
599 from_array,
600 tf_warning_or_error);
601 cp_genericize_tree (expr_p);
602 ret = GS_OK;
603 input_location = loc;
605 break;
607 case THROW_EXPR:
608 /* FIXME communicate throw type to back end, probably by moving
609 THROW_EXPR into ../tree.def. */
610 *expr_p = TREE_OPERAND (*expr_p, 0);
611 ret = GS_OK;
612 break;
614 case MUST_NOT_THROW_EXPR:
615 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
616 break;
618 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
619 LHS of an assignment might also be involved in the RHS, as in bug
620 25979. */
621 case INIT_EXPR:
622 if (fn_contains_cilk_spawn_p (cfun)
623 && cilk_detect_spawn_and_unwrap (expr_p)
624 && !seen_error ())
626 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
627 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
629 cp_gimplify_init_expr (expr_p);
630 if (TREE_CODE (*expr_p) != INIT_EXPR)
631 return GS_OK;
632 /* Otherwise fall through. */
633 case MODIFY_EXPR:
634 modify_expr_case:
636 if (fn_contains_cilk_spawn_p (cfun)
637 && cilk_detect_spawn_and_unwrap (expr_p)
638 && !seen_error ())
640 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
641 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
643 /* If the back end isn't clever enough to know that the lhs and rhs
644 types are the same, add an explicit conversion. */
645 tree op0 = TREE_OPERAND (*expr_p, 0);
646 tree op1 = TREE_OPERAND (*expr_p, 1);
648 if (!error_operand_p (op0)
649 && !error_operand_p (op1)
650 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
651 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
652 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
653 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
654 TREE_TYPE (op0), op1);
656 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
658 /* Remove any copies of empty classes. Also drop volatile
659 variables on the RHS to avoid infinite recursion from
660 gimplify_expr trying to load the value. */
661 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
662 is_gimple_lvalue, fb_lvalue);
663 if (TREE_SIDE_EFFECTS (op1))
665 if (TREE_THIS_VOLATILE (op1)
666 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
667 op1 = build_fold_addr_expr (op1);
669 gimplify_and_add (op1, pre_p);
671 *expr_p = TREE_OPERAND (*expr_p, 0);
674 ret = GS_OK;
675 break;
677 case EMPTY_CLASS_EXPR:
678 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
679 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
680 ret = GS_OK;
681 break;
683 case BASELINK:
684 *expr_p = BASELINK_FUNCTIONS (*expr_p);
685 ret = GS_OK;
686 break;
688 case TRY_BLOCK:
689 genericize_try_block (expr_p);
690 ret = GS_OK;
691 break;
693 case HANDLER:
694 genericize_catch_block (expr_p);
695 ret = GS_OK;
696 break;
698 case EH_SPEC_BLOCK:
699 genericize_eh_spec_block (expr_p);
700 ret = GS_OK;
701 break;
703 case USING_STMT:
704 gcc_unreachable ();
706 case FOR_STMT:
707 case WHILE_STMT:
708 case DO_STMT:
709 case SWITCH_STMT:
710 case CONTINUE_STMT:
711 case BREAK_STMT:
712 gcc_unreachable ();
714 case OMP_FOR:
715 case OMP_SIMD:
716 case OMP_DISTRIBUTE:
717 ret = cp_gimplify_omp_for (expr_p, pre_p);
718 break;
720 case EXPR_STMT:
721 gimplify_expr_stmt (expr_p);
722 ret = GS_OK;
723 break;
725 case UNARY_PLUS_EXPR:
727 tree arg = TREE_OPERAND (*expr_p, 0);
728 tree type = TREE_TYPE (*expr_p);
729 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
730 : arg;
731 ret = GS_OK;
733 break;
735 case CILK_SPAWN_STMT:
736 gcc_assert
737 (fn_contains_cilk_spawn_p (cfun)
738 && cilk_detect_spawn_and_unwrap (expr_p));
740 /* If errors are seen, then just process it as a CALL_EXPR. */
741 if (!seen_error ())
743 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
744 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
746 case CALL_EXPR:
747 if (fn_contains_cilk_spawn_p (cfun)
748 && cilk_detect_spawn_and_unwrap (expr_p)
749 && !seen_error ())
751 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
752 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
754 /* DR 1030 says that we need to evaluate the elements of an
755 initializer-list in forward order even when it's used as arguments to
756 a constructor. So if the target wants to evaluate them in reverse
757 order and there's more than one argument other than 'this', gimplify
758 them in order. */
759 ret = GS_OK;
760 if (PUSH_ARGS_REVERSED && CALL_EXPR_LIST_INIT_P (*expr_p)
761 && call_expr_nargs (*expr_p) > 2)
763 int nargs = call_expr_nargs (*expr_p);
764 location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
765 for (int i = 1; i < nargs; ++i)
767 enum gimplify_status t
768 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
769 if (t == GS_ERROR)
770 ret = GS_ERROR;
773 break;
775 case RETURN_EXPR:
776 if (TREE_OPERAND (*expr_p, 0)
777 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
778 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
780 expr_p = &TREE_OPERAND (*expr_p, 0);
781 code = TREE_CODE (*expr_p);
782 /* Avoid going through the INIT_EXPR case, which can
783 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
784 goto modify_expr_case;
786 /* Fall through. */
788 default:
789 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
790 break;
793 /* Restore saved state. */
794 if (STATEMENT_CODE_P (code))
795 current_stmt_tree ()->stmts_are_full_exprs_p
796 = saved_stmts_are_full_exprs_p;
798 return ret;
801 static inline bool
802 is_invisiref_parm (const_tree t)
804 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
805 && DECL_BY_REFERENCE (t));
808 /* Return true if the uid in both int tree maps are equal. */
810 bool
811 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
813 return (a->uid == b->uid);
816 /* Hash a UID in a cxx_int_tree_map. */
818 unsigned int
819 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
821 return item->uid;
824 /* A stable comparison routine for use with splay trees and DECLs. */
826 static int
827 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
829 tree a = (tree) xa;
830 tree b = (tree) xb;
832 return DECL_UID (a) - DECL_UID (b);
835 /* OpenMP context during genericization. */
837 struct cp_genericize_omp_taskreg
839 bool is_parallel;
840 bool default_shared;
841 struct cp_genericize_omp_taskreg *outer;
842 splay_tree variables;
845 /* Return true if genericization should try to determine if
846 DECL is firstprivate or shared within task regions. */
848 static bool
849 omp_var_to_track (tree decl)
851 tree type = TREE_TYPE (decl);
852 if (is_invisiref_parm (decl))
853 type = TREE_TYPE (type);
854 while (TREE_CODE (type) == ARRAY_TYPE)
855 type = TREE_TYPE (type);
856 if (type == error_mark_node || !CLASS_TYPE_P (type))
857 return false;
858 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
859 return false;
860 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
861 return false;
862 return true;
865 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
867 static void
868 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
870 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
871 (splay_tree_key) decl);
872 if (n == NULL)
874 int flags = OMP_CLAUSE_DEFAULT_SHARED;
875 if (omp_ctx->outer)
876 omp_cxx_notice_variable (omp_ctx->outer, decl);
877 if (!omp_ctx->default_shared)
879 struct cp_genericize_omp_taskreg *octx;
881 for (octx = omp_ctx->outer; octx; octx = octx->outer)
883 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
884 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
886 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
887 break;
889 if (octx->is_parallel)
890 break;
892 if (octx == NULL
893 && (TREE_CODE (decl) == PARM_DECL
894 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
895 && DECL_CONTEXT (decl) == current_function_decl)))
896 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
897 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
899 /* DECL is implicitly determined firstprivate in
900 the current task construct. Ensure copy ctor and
901 dtor are instantiated, because during gimplification
902 it will be already too late. */
903 tree type = TREE_TYPE (decl);
904 if (is_invisiref_parm (decl))
905 type = TREE_TYPE (type);
906 while (TREE_CODE (type) == ARRAY_TYPE)
907 type = TREE_TYPE (type);
908 get_copy_ctor (type, tf_none);
909 get_dtor (type, tf_none);
912 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
916 /* Genericization context. */
918 struct cp_genericize_data
920 hash_set<tree> *p_set;
921 vec<tree> bind_expr_stack;
922 struct cp_genericize_omp_taskreg *omp_ctx;
923 tree try_block;
924 bool no_sanitize_p;
927 /* Perform any pre-gimplification lowering of C++ front end trees to
928 GENERIC. */
930 static tree
931 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
933 tree stmt = *stmt_p;
934 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
935 hash_set<tree> *p_set = wtd->p_set;
937 /* If in an OpenMP context, note var uses. */
938 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
939 && (VAR_P (stmt)
940 || TREE_CODE (stmt) == PARM_DECL
941 || TREE_CODE (stmt) == RESULT_DECL)
942 && omp_var_to_track (stmt))
943 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
945 if (is_invisiref_parm (stmt)
946 /* Don't dereference parms in a thunk, pass the references through. */
947 && !(DECL_THUNK_P (current_function_decl)
948 && TREE_CODE (stmt) == PARM_DECL))
950 *stmt_p = convert_from_reference (stmt);
951 *walk_subtrees = 0;
952 return NULL;
955 /* Map block scope extern declarations to visible declarations with the
956 same name and type in outer scopes if any. */
957 if (cp_function_chain->extern_decl_map
958 && VAR_OR_FUNCTION_DECL_P (stmt)
959 && DECL_EXTERNAL (stmt))
961 struct cxx_int_tree_map *h, in;
962 in.uid = DECL_UID (stmt);
963 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
964 if (h)
966 *stmt_p = h->to;
967 *walk_subtrees = 0;
968 return NULL;
972 /* Other than invisiref parms, don't walk the same tree twice. */
973 if (p_set->contains (stmt))
975 *walk_subtrees = 0;
976 return NULL_TREE;
979 if (TREE_CODE (stmt) == ADDR_EXPR
980 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
982 /* If in an OpenMP context, note var uses. */
983 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
984 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
985 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
986 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
987 *walk_subtrees = 0;
989 else if (TREE_CODE (stmt) == RETURN_EXPR
990 && TREE_OPERAND (stmt, 0)
991 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
992 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
993 *walk_subtrees = 0;
994 else if (TREE_CODE (stmt) == OMP_CLAUSE)
995 switch (OMP_CLAUSE_CODE (stmt))
997 case OMP_CLAUSE_LASTPRIVATE:
998 /* Don't dereference an invisiref in OpenMP clauses. */
999 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1001 *walk_subtrees = 0;
1002 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1003 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1004 cp_genericize_r, data, NULL);
1006 break;
1007 case OMP_CLAUSE_PRIVATE:
1008 /* Don't dereference an invisiref in OpenMP clauses. */
1009 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1010 *walk_subtrees = 0;
1011 else if (wtd->omp_ctx != NULL)
1013 /* Private clause doesn't cause any references to the
1014 var in outer contexts, avoid calling
1015 omp_cxx_notice_variable for it. */
1016 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1017 wtd->omp_ctx = NULL;
1018 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1019 data, NULL);
1020 wtd->omp_ctx = old;
1021 *walk_subtrees = 0;
1023 break;
1024 case OMP_CLAUSE_SHARED:
1025 case OMP_CLAUSE_FIRSTPRIVATE:
1026 case OMP_CLAUSE_COPYIN:
1027 case OMP_CLAUSE_COPYPRIVATE:
1028 /* Don't dereference an invisiref in OpenMP clauses. */
1029 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1030 *walk_subtrees = 0;
1031 break;
1032 case OMP_CLAUSE_REDUCTION:
1033 /* Don't dereference an invisiref in reduction clause's
1034 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1035 still needs to be genericized. */
1036 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1038 *walk_subtrees = 0;
1039 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1040 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1041 cp_genericize_r, data, NULL);
1042 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1043 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1044 cp_genericize_r, data, NULL);
1046 break;
1047 default:
1048 break;
1050 else if (IS_TYPE_OR_DECL_P (stmt))
1051 *walk_subtrees = 0;
1053 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1054 to lower this construct before scanning it, so we need to lower these
1055 before doing anything else. */
1056 else if (TREE_CODE (stmt) == CLEANUP_STMT)
1057 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1058 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1059 : TRY_FINALLY_EXPR,
1060 void_type_node,
1061 CLEANUP_BODY (stmt),
1062 CLEANUP_EXPR (stmt));
1064 else if (TREE_CODE (stmt) == IF_STMT)
1066 genericize_if_stmt (stmt_p);
1067 /* *stmt_p has changed, tail recurse to handle it again. */
1068 return cp_genericize_r (stmt_p, walk_subtrees, data);
1071 /* COND_EXPR might have incompatible types in branches if one or both
1072 arms are bitfields. Fix it up now. */
1073 else if (TREE_CODE (stmt) == COND_EXPR)
1075 tree type_left
1076 = (TREE_OPERAND (stmt, 1)
1077 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1078 : NULL_TREE);
1079 tree type_right
1080 = (TREE_OPERAND (stmt, 2)
1081 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1082 : NULL_TREE);
1083 if (type_left
1084 && !useless_type_conversion_p (TREE_TYPE (stmt),
1085 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1087 TREE_OPERAND (stmt, 1)
1088 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1089 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1090 type_left));
1092 if (type_right
1093 && !useless_type_conversion_p (TREE_TYPE (stmt),
1094 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1096 TREE_OPERAND (stmt, 2)
1097 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1098 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1099 type_right));
1103 else if (TREE_CODE (stmt) == BIND_EXPR)
1105 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1107 tree decl;
1108 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1109 if (VAR_P (decl)
1110 && !DECL_EXTERNAL (decl)
1111 && omp_var_to_track (decl))
1113 splay_tree_node n
1114 = splay_tree_lookup (wtd->omp_ctx->variables,
1115 (splay_tree_key) decl);
1116 if (n == NULL)
1117 splay_tree_insert (wtd->omp_ctx->variables,
1118 (splay_tree_key) decl,
1119 TREE_STATIC (decl)
1120 ? OMP_CLAUSE_DEFAULT_SHARED
1121 : OMP_CLAUSE_DEFAULT_PRIVATE);
1124 if (flag_sanitize
1125 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1127 /* The point here is to not sanitize static initializers. */
1128 bool no_sanitize_p = wtd->no_sanitize_p;
1129 wtd->no_sanitize_p = true;
1130 for (tree decl = BIND_EXPR_VARS (stmt);
1131 decl;
1132 decl = DECL_CHAIN (decl))
1133 if (VAR_P (decl)
1134 && TREE_STATIC (decl)
1135 && DECL_INITIAL (decl))
1136 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1137 wtd->no_sanitize_p = no_sanitize_p;
1139 wtd->bind_expr_stack.safe_push (stmt);
1140 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1141 cp_genericize_r, data, NULL);
1142 wtd->bind_expr_stack.pop ();
1145 else if (TREE_CODE (stmt) == USING_STMT)
1147 tree block = NULL_TREE;
1149 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1150 BLOCK, and append an IMPORTED_DECL to its
1151 BLOCK_VARS chained list. */
1152 if (wtd->bind_expr_stack.exists ())
1154 int i;
1155 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1156 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1157 break;
1159 if (block)
1161 tree using_directive;
1162 gcc_assert (TREE_OPERAND (stmt, 0));
1164 using_directive = make_node (IMPORTED_DECL);
1165 TREE_TYPE (using_directive) = void_type_node;
1167 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1168 = TREE_OPERAND (stmt, 0);
1169 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1170 BLOCK_VARS (block) = using_directive;
1172 /* The USING_STMT won't appear in GENERIC. */
1173 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1174 *walk_subtrees = 0;
1177 else if (TREE_CODE (stmt) == DECL_EXPR
1178 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1180 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1181 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1182 *walk_subtrees = 0;
1184 else if (TREE_CODE (stmt) == DECL_EXPR)
1186 tree d = DECL_EXPR_DECL (stmt);
1187 if (TREE_CODE (d) == VAR_DECL)
1188 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1190 else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
1192 struct cp_genericize_omp_taskreg omp_ctx;
1193 tree c, decl;
1194 splay_tree_node n;
1196 *walk_subtrees = 0;
1197 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1198 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1199 omp_ctx.default_shared = omp_ctx.is_parallel;
1200 omp_ctx.outer = wtd->omp_ctx;
1201 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1202 wtd->omp_ctx = &omp_ctx;
1203 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1204 switch (OMP_CLAUSE_CODE (c))
1206 case OMP_CLAUSE_SHARED:
1207 case OMP_CLAUSE_PRIVATE:
1208 case OMP_CLAUSE_FIRSTPRIVATE:
1209 case OMP_CLAUSE_LASTPRIVATE:
1210 decl = OMP_CLAUSE_DECL (c);
1211 if (decl == error_mark_node || !omp_var_to_track (decl))
1212 break;
1213 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1214 if (n != NULL)
1215 break;
1216 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1217 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1218 ? OMP_CLAUSE_DEFAULT_SHARED
1219 : OMP_CLAUSE_DEFAULT_PRIVATE);
1220 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1221 && omp_ctx.outer)
1222 omp_cxx_notice_variable (omp_ctx.outer, decl);
1223 break;
1224 case OMP_CLAUSE_DEFAULT:
1225 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1226 omp_ctx.default_shared = true;
1227 default:
1228 break;
1230 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1231 wtd->omp_ctx = omp_ctx.outer;
1232 splay_tree_delete (omp_ctx.variables);
1234 else if (TREE_CODE (stmt) == TRY_BLOCK)
1236 *walk_subtrees = 0;
1237 tree try_block = wtd->try_block;
1238 wtd->try_block = stmt;
1239 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1240 wtd->try_block = try_block;
1241 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1243 else if (TREE_CODE (stmt) == MUST_NOT_THROW_EXPR)
1245 /* MUST_NOT_THROW_COND might be something else with TM. */
1246 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1248 *walk_subtrees = 0;
1249 tree try_block = wtd->try_block;
1250 wtd->try_block = stmt;
1251 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1252 wtd->try_block = try_block;
1255 else if (TREE_CODE (stmt) == THROW_EXPR)
1257 location_t loc = location_of (stmt);
1258 if (TREE_NO_WARNING (stmt))
1259 /* Never mind. */;
1260 else if (wtd->try_block)
1262 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1263 && warning_at (loc, OPT_Wterminate,
1264 "throw will always call terminate()")
1265 && cxx_dialect >= cxx11
1266 && DECL_DESTRUCTOR_P (current_function_decl))
1267 inform (loc, "in C++11 destructors default to noexcept");
1269 else
1271 if (warn_cxx11_compat && cxx_dialect < cxx11
1272 && DECL_DESTRUCTOR_P (current_function_decl)
1273 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1274 == NULL_TREE)
1275 && (get_defaulted_eh_spec (current_function_decl)
1276 == empty_except_spec))
1277 warning_at (loc, OPT_Wc__11_compat,
1278 "in C++11 this throw will terminate because "
1279 "destructors default to noexcept");
1282 else if (TREE_CODE (stmt) == CONVERT_EXPR)
1283 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1284 else if (TREE_CODE (stmt) == FOR_STMT)
1285 genericize_for_stmt (stmt_p, walk_subtrees, data);
1286 else if (TREE_CODE (stmt) == WHILE_STMT)
1287 genericize_while_stmt (stmt_p, walk_subtrees, data);
1288 else if (TREE_CODE (stmt) == DO_STMT)
1289 genericize_do_stmt (stmt_p, walk_subtrees, data);
1290 else if (TREE_CODE (stmt) == SWITCH_STMT)
1291 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1292 else if (TREE_CODE (stmt) == CONTINUE_STMT)
1293 genericize_continue_stmt (stmt_p);
1294 else if (TREE_CODE (stmt) == BREAK_STMT)
1295 genericize_break_stmt (stmt_p);
1296 else if (TREE_CODE (stmt) == OMP_FOR
1297 || TREE_CODE (stmt) == OMP_SIMD
1298 || TREE_CODE (stmt) == OMP_DISTRIBUTE)
1299 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1300 else if (TREE_CODE (stmt) == SIZEOF_EXPR)
1302 if (SIZEOF_EXPR_TYPE_P (stmt))
1303 *stmt_p
1304 = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt, 0)),
1305 SIZEOF_EXPR, false);
1306 else if (TYPE_P (TREE_OPERAND (stmt, 0)))
1307 *stmt_p = cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt, 0),
1308 SIZEOF_EXPR, false);
1309 else
1310 *stmt_p = cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt, 0),
1311 SIZEOF_EXPR, false);
1312 if (*stmt_p == error_mark_node)
1313 *stmt_p = size_one_node;
1314 return NULL;
1316 else if ((flag_sanitize
1317 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1318 && !wtd->no_sanitize_p)
1320 if ((flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1321 && TREE_CODE (stmt) == NOP_EXPR
1322 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1323 ubsan_maybe_instrument_reference (stmt);
1324 else if (TREE_CODE (stmt) == CALL_EXPR)
1326 tree fn = CALL_EXPR_FN (stmt);
1327 if (fn != NULL_TREE
1328 && !error_operand_p (fn)
1329 && POINTER_TYPE_P (TREE_TYPE (fn))
1330 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1332 bool is_ctor
1333 = TREE_CODE (fn) == ADDR_EXPR
1334 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1335 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1336 if (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1337 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1338 if ((flag_sanitize & SANITIZE_VPTR) && !is_ctor)
1339 cp_ubsan_maybe_instrument_member_call (stmt);
1344 p_set->add (*stmt_p);
1346 return NULL;
1349 /* Lower C++ front end trees to GENERIC in T_P. */
1351 static void
1352 cp_genericize_tree (tree* t_p)
1354 struct cp_genericize_data wtd;
1356 wtd.p_set = new hash_set<tree>;
1357 wtd.bind_expr_stack.create (0);
1358 wtd.omp_ctx = NULL;
1359 wtd.try_block = NULL_TREE;
1360 wtd.no_sanitize_p = false;
1361 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1362 delete wtd.p_set;
1363 wtd.bind_expr_stack.release ();
1364 if (flag_sanitize & SANITIZE_VPTR)
1365 cp_ubsan_instrument_member_accesses (t_p);
1368 /* If a function that should end with a return in non-void
1369 function doesn't obviously end with return, add ubsan
1370 instrumentation code to verify it at runtime. */
1372 static void
1373 cp_ubsan_maybe_instrument_return (tree fndecl)
1375 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1376 || DECL_CONSTRUCTOR_P (fndecl)
1377 || DECL_DESTRUCTOR_P (fndecl)
1378 || !targetm.warn_func_return (fndecl))
1379 return;
1381 tree t = DECL_SAVED_TREE (fndecl);
1382 while (t)
1384 switch (TREE_CODE (t))
1386 case BIND_EXPR:
1387 t = BIND_EXPR_BODY (t);
1388 continue;
1389 case TRY_FINALLY_EXPR:
1390 t = TREE_OPERAND (t, 0);
1391 continue;
1392 case STATEMENT_LIST:
1394 tree_stmt_iterator i = tsi_last (t);
1395 if (!tsi_end_p (i))
1397 t = tsi_stmt (i);
1398 continue;
1401 break;
1402 case RETURN_EXPR:
1403 return;
1404 default:
1405 break;
1407 break;
1409 if (t == NULL_TREE)
1410 return;
1411 t = DECL_SAVED_TREE (fndecl);
1412 if (TREE_CODE (t) == BIND_EXPR
1413 && TREE_CODE (BIND_EXPR_BODY (t)) == STATEMENT_LIST)
1415 tree_stmt_iterator i = tsi_last (BIND_EXPR_BODY (t));
1416 t = ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl));
1417 tsi_link_after (&i, t, TSI_NEW_STMT);
1421 void
1422 cp_genericize (tree fndecl)
1424 tree t;
1426 /* Fix up the types of parms passed by invisible reference. */
1427 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1428 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1430 /* If a function's arguments are copied to create a thunk,
1431 then DECL_BY_REFERENCE will be set -- but the type of the
1432 argument will be a pointer type, so we will never get
1433 here. */
1434 gcc_assert (!DECL_BY_REFERENCE (t));
1435 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1436 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1437 DECL_BY_REFERENCE (t) = 1;
1438 TREE_ADDRESSABLE (t) = 0;
1439 relayout_decl (t);
1442 /* Do the same for the return value. */
1443 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1445 t = DECL_RESULT (fndecl);
1446 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1447 DECL_BY_REFERENCE (t) = 1;
1448 TREE_ADDRESSABLE (t) = 0;
1449 relayout_decl (t);
1450 if (DECL_NAME (t))
1452 /* Adjust DECL_VALUE_EXPR of the original var. */
1453 tree outer = outer_curly_brace_block (current_function_decl);
1454 tree var;
1456 if (outer)
1457 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1458 if (DECL_NAME (t) == DECL_NAME (var)
1459 && DECL_HAS_VALUE_EXPR_P (var)
1460 && DECL_VALUE_EXPR (var) == t)
1462 tree val = convert_from_reference (t);
1463 SET_DECL_VALUE_EXPR (var, val);
1464 break;
1469 /* If we're a clone, the body is already GIMPLE. */
1470 if (DECL_CLONED_FUNCTION_P (fndecl))
1471 return;
1473 /* Expand all the array notations here. */
1474 if (flag_cilkplus
1475 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1476 DECL_SAVED_TREE (fndecl) =
1477 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1479 /* We do want to see every occurrence of the parms, so we can't just use
1480 walk_tree's hash functionality. */
1481 cp_genericize_tree (&DECL_SAVED_TREE (fndecl));
1483 if (flag_sanitize & SANITIZE_RETURN
1484 && do_ubsan_in_current_function ())
1485 cp_ubsan_maybe_instrument_return (fndecl);
1487 /* Do everything else. */
1488 c_genericize (fndecl);
1490 gcc_assert (bc_label[bc_break] == NULL);
1491 gcc_assert (bc_label[bc_continue] == NULL);
1494 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1495 NULL if there is in fact nothing to do. ARG2 may be null if FN
1496 actually only takes one argument. */
1498 static tree
1499 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1501 tree defparm, parm, t;
1502 int i = 0;
1503 int nargs;
1504 tree *argarray;
1506 if (fn == NULL)
1507 return NULL;
1509 nargs = list_length (DECL_ARGUMENTS (fn));
1510 argarray = XALLOCAVEC (tree, nargs);
1512 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1513 if (arg2)
1514 defparm = TREE_CHAIN (defparm);
1516 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1518 tree inner_type = TREE_TYPE (arg1);
1519 tree start1, end1, p1;
1520 tree start2 = NULL, p2 = NULL;
1521 tree ret = NULL, lab;
1523 start1 = arg1;
1524 start2 = arg2;
1527 inner_type = TREE_TYPE (inner_type);
1528 start1 = build4 (ARRAY_REF, inner_type, start1,
1529 size_zero_node, NULL, NULL);
1530 if (arg2)
1531 start2 = build4 (ARRAY_REF, inner_type, start2,
1532 size_zero_node, NULL, NULL);
1534 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1535 start1 = build_fold_addr_expr_loc (input_location, start1);
1536 if (arg2)
1537 start2 = build_fold_addr_expr_loc (input_location, start2);
1539 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1540 end1 = fold_build_pointer_plus (start1, end1);
1542 p1 = create_tmp_var (TREE_TYPE (start1));
1543 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1544 append_to_statement_list (t, &ret);
1546 if (arg2)
1548 p2 = create_tmp_var (TREE_TYPE (start2));
1549 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1550 append_to_statement_list (t, &ret);
1553 lab = create_artificial_label (input_location);
1554 t = build1 (LABEL_EXPR, void_type_node, lab);
1555 append_to_statement_list (t, &ret);
1557 argarray[i++] = p1;
1558 if (arg2)
1559 argarray[i++] = p2;
1560 /* Handle default arguments. */
1561 for (parm = defparm; parm && parm != void_list_node;
1562 parm = TREE_CHAIN (parm), i++)
1563 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1564 TREE_PURPOSE (parm), fn, i,
1565 tf_warning_or_error);
1566 t = build_call_a (fn, i, argarray);
1567 t = fold_convert (void_type_node, t);
1568 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1569 append_to_statement_list (t, &ret);
1571 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1572 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1573 append_to_statement_list (t, &ret);
1575 if (arg2)
1577 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1578 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1579 append_to_statement_list (t, &ret);
1582 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1583 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1584 append_to_statement_list (t, &ret);
1586 return ret;
1588 else
1590 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1591 if (arg2)
1592 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1593 /* Handle default arguments. */
1594 for (parm = defparm; parm && parm != void_list_node;
1595 parm = TREE_CHAIN (parm), i++)
1596 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1597 TREE_PURPOSE (parm),
1598 fn, i, tf_warning_or_error);
1599 t = build_call_a (fn, i, argarray);
1600 t = fold_convert (void_type_node, t);
1601 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1605 /* Return code to initialize DECL with its default constructor, or
1606 NULL if there's nothing to do. */
1608 tree
1609 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1611 tree info = CP_OMP_CLAUSE_INFO (clause);
1612 tree ret = NULL;
1614 if (info)
1615 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1617 return ret;
1620 /* Return code to initialize DST with a copy constructor from SRC. */
1622 tree
1623 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1625 tree info = CP_OMP_CLAUSE_INFO (clause);
1626 tree ret = NULL;
1628 if (info)
1629 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1630 if (ret == NULL)
1631 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1633 return ret;
1636 /* Similarly, except use an assignment operator instead. */
1638 tree
1639 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1641 tree info = CP_OMP_CLAUSE_INFO (clause);
1642 tree ret = NULL;
1644 if (info)
1645 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1646 if (ret == NULL)
1647 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1649 return ret;
1652 /* Return code to destroy DECL. */
1654 tree
1655 cxx_omp_clause_dtor (tree clause, tree decl)
1657 tree info = CP_OMP_CLAUSE_INFO (clause);
1658 tree ret = NULL;
1660 if (info)
1661 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1663 return ret;
1666 /* True if OpenMP should privatize what this DECL points to rather
1667 than the DECL itself. */
1669 bool
1670 cxx_omp_privatize_by_reference (const_tree decl)
1672 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1673 || is_invisiref_parm (decl));
1676 /* Return true if DECL is const qualified var having no mutable member. */
1677 bool
1678 cxx_omp_const_qual_no_mutable (tree decl)
1680 tree type = TREE_TYPE (decl);
1681 if (TREE_CODE (type) == REFERENCE_TYPE)
1683 if (!is_invisiref_parm (decl))
1684 return false;
1685 type = TREE_TYPE (type);
1687 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1689 /* NVR doesn't preserve const qualification of the
1690 variable's type. */
1691 tree outer = outer_curly_brace_block (current_function_decl);
1692 tree var;
1694 if (outer)
1695 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1696 if (DECL_NAME (decl) == DECL_NAME (var)
1697 && (TYPE_MAIN_VARIANT (type)
1698 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1700 if (TYPE_READONLY (TREE_TYPE (var)))
1701 type = TREE_TYPE (var);
1702 break;
1707 if (type == error_mark_node)
1708 return false;
1710 /* Variables with const-qualified type having no mutable member
1711 are predetermined shared. */
1712 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1713 return true;
1715 return false;
1718 /* True if OpenMP sharing attribute of DECL is predetermined. */
1720 enum omp_clause_default_kind
1721 cxx_omp_predetermined_sharing (tree decl)
1723 /* Static data members are predetermined shared. */
1724 if (TREE_STATIC (decl))
1726 tree ctx = CP_DECL_CONTEXT (decl);
1727 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1728 return OMP_CLAUSE_DEFAULT_SHARED;
1731 /* Const qualified vars having no mutable member are predetermined
1732 shared. */
1733 if (cxx_omp_const_qual_no_mutable (decl))
1734 return OMP_CLAUSE_DEFAULT_SHARED;
1736 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1739 /* Finalize an implicitly determined clause. */
1741 void
1742 cxx_omp_finish_clause (tree c, gimple_seq *)
1744 tree decl, inner_type;
1745 bool make_shared = false;
1747 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1748 return;
1750 decl = OMP_CLAUSE_DECL (c);
1751 decl = require_complete_type (decl);
1752 inner_type = TREE_TYPE (decl);
1753 if (decl == error_mark_node)
1754 make_shared = true;
1755 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1757 if (is_invisiref_parm (decl))
1758 inner_type = TREE_TYPE (inner_type);
1759 else
1761 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1762 decl);
1763 make_shared = true;
1767 /* We're interested in the base element, not arrays. */
1768 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1769 inner_type = TREE_TYPE (inner_type);
1771 /* Check for special function availability by building a call to one.
1772 Save the results, because later we won't be in the right context
1773 for making these queries. */
1774 if (!make_shared
1775 && CLASS_TYPE_P (inner_type)
1776 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1777 make_shared = true;
1779 if (make_shared)
1780 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;