2015-10-18 Paul Thomas <pault@gcc.gnu.org>
[official-gcc.git] / gcc / cp / cp-gimplify.c
blobe37cbc7cd99df70632d70a72c1c274c5818de05c
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2015 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "function.h"
27 #include "predict.h"
28 #include "basic-block.h"
29 #include "tree.h"
30 #include "cp-tree.h"
31 #include "gimple.h"
32 #include "hard-reg-set.h"
33 #include "alias.h"
34 #include "stor-layout.h"
35 #include "c-family/c-common.h"
36 #include "tree-iterator.h"
37 #include "internal-fn.h"
38 #include "gimplify.h"
39 #include "flags.h"
40 #include "splay-tree.h"
41 #include "target.h"
42 #include "c-family/c-ubsan.h"
43 #include "cilk.h"
44 #include "gimplify.h"
46 /* Forward declarations. */
48 static tree cp_genericize_r (tree *, int *, void *);
49 static void cp_genericize_tree (tree*);
51 /* Local declarations. */
53 enum bc_t { bc_break = 0, bc_continue = 1 };
55 /* Stack of labels which are targets for "break" or "continue",
56 linked through TREE_CHAIN. */
57 static tree bc_label[2];
59 /* Begin a scope which can be exited by a break or continue statement. BC
60 indicates which.
62 Just creates a label with location LOCATION and pushes it into the current
63 context. */
65 static tree
66 begin_bc_block (enum bc_t bc, location_t location)
68 tree label = create_artificial_label (location);
69 DECL_CHAIN (label) = bc_label[bc];
70 bc_label[bc] = label;
71 if (bc == bc_break)
72 LABEL_DECL_BREAK (label) = true;
73 else
74 LABEL_DECL_CONTINUE (label) = true;
75 return label;
78 /* Finish a scope which can be exited by a break or continue statement.
79 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
80 an expression for the contents of the scope.
82 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
83 BLOCK. Otherwise, just forget the label. */
85 static void
86 finish_bc_block (tree *block, enum bc_t bc, tree label)
88 gcc_assert (label == bc_label[bc]);
90 if (TREE_USED (label))
91 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
92 block);
94 bc_label[bc] = DECL_CHAIN (label);
95 DECL_CHAIN (label) = NULL_TREE;
98 /* This function is a wrapper for cilk_gimplify_call_params_in_spawned_fn.
99 *EXPR_P can be a CALL_EXPR, INIT_EXPR, MODIFY_EXPR, AGGR_INIT_EXPR or
100 TARGET_EXPR. *PRE_P and *POST_P are gimple sequences from the caller
101 of gimplify_cilk_spawn. */
103 static void
104 cilk_cp_gimplify_call_params_in_spawned_fn (tree *expr_p, gimple_seq *pre_p,
105 gimple_seq *post_p)
107 int ii = 0;
109 cilk_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
110 if (TREE_CODE (*expr_p) == AGGR_INIT_EXPR)
111 for (ii = 0; ii < aggr_init_expr_nargs (*expr_p); ii++)
112 gimplify_expr (&AGGR_INIT_EXPR_ARG (*expr_p, ii), pre_p, post_p,
113 is_gimple_reg, fb_rvalue);
117 /* Get the LABEL_EXPR to represent a break or continue statement
118 in the current block scope. BC indicates which. */
120 static tree
121 get_bc_label (enum bc_t bc)
123 tree label = bc_label[bc];
125 /* Mark the label used for finish_bc_block. */
126 TREE_USED (label) = 1;
127 return label;
130 /* Genericize a TRY_BLOCK. */
132 static void
133 genericize_try_block (tree *stmt_p)
135 tree body = TRY_STMTS (*stmt_p);
136 tree cleanup = TRY_HANDLERS (*stmt_p);
138 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
141 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
143 static void
144 genericize_catch_block (tree *stmt_p)
146 tree type = HANDLER_TYPE (*stmt_p);
147 tree body = HANDLER_BODY (*stmt_p);
149 /* FIXME should the caught type go in TREE_TYPE? */
150 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
153 /* A terser interface for building a representation of an exception
154 specification. */
156 static tree
157 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
159 tree t;
161 /* FIXME should the allowed types go in TREE_TYPE? */
162 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
163 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
165 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
166 append_to_statement_list (body, &TREE_OPERAND (t, 0));
168 return t;
171 /* Genericize an EH_SPEC_BLOCK by converting it to a
172 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
174 static void
175 genericize_eh_spec_block (tree *stmt_p)
177 tree body = EH_SPEC_STMTS (*stmt_p);
178 tree allowed = EH_SPEC_RAISES (*stmt_p);
179 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
181 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
182 TREE_NO_WARNING (*stmt_p) = true;
183 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
186 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
188 static void
189 genericize_if_stmt (tree *stmt_p)
191 tree stmt, cond, then_, else_;
192 location_t locus = EXPR_LOCATION (*stmt_p);
194 stmt = *stmt_p;
195 cond = IF_COND (stmt);
196 then_ = THEN_CLAUSE (stmt);
197 else_ = ELSE_CLAUSE (stmt);
199 if (!then_)
200 then_ = build_empty_stmt (locus);
201 if (!else_)
202 else_ = build_empty_stmt (locus);
204 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
205 stmt = then_;
206 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
207 stmt = else_;
208 else
209 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
210 if (!EXPR_HAS_LOCATION (stmt))
211 protected_set_expr_location (stmt, locus);
212 *stmt_p = stmt;
215 /* Build a generic representation of one of the C loop forms. COND is the
216 loop condition or NULL_TREE. BODY is the (possibly compound) statement
217 controlled by the loop. INCR is the increment expression of a for-loop,
218 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
219 evaluated before the loop body as in while and for loops, or after the
220 loop body as in do-while loops. */
222 static void
223 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
224 tree incr, bool cond_is_first, int *walk_subtrees,
225 void *data)
227 tree blab, clab;
228 tree exit = NULL;
229 tree stmt_list = NULL;
231 blab = begin_bc_block (bc_break, start_locus);
232 clab = begin_bc_block (bc_continue, start_locus);
234 protected_set_expr_location (incr, start_locus);
236 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
237 cp_walk_tree (&body, cp_genericize_r, data, NULL);
238 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
239 *walk_subtrees = 0;
241 if (cond && TREE_CODE (cond) != INTEGER_CST)
243 /* If COND is constant, don't bother building an exit. If it's false,
244 we won't build a loop. If it's true, any exits are in the body. */
245 location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
246 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
247 get_bc_label (bc_break));
248 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
249 build_empty_stmt (cloc), exit);
252 if (exit && cond_is_first)
253 append_to_statement_list (exit, &stmt_list);
254 append_to_statement_list (body, &stmt_list);
255 finish_bc_block (&stmt_list, bc_continue, clab);
256 append_to_statement_list (incr, &stmt_list);
257 if (exit && !cond_is_first)
258 append_to_statement_list (exit, &stmt_list);
260 if (!stmt_list)
261 stmt_list = build_empty_stmt (start_locus);
263 tree loop;
264 if (cond && integer_zerop (cond))
266 if (cond_is_first)
267 loop = fold_build3_loc (start_locus, COND_EXPR,
268 void_type_node, cond, stmt_list,
269 build_empty_stmt (start_locus));
270 else
271 loop = stmt_list;
273 else
274 loop = build1_loc (start_locus, LOOP_EXPR, void_type_node, stmt_list);
276 stmt_list = NULL;
277 append_to_statement_list (loop, &stmt_list);
278 finish_bc_block (&stmt_list, bc_break, blab);
279 if (!stmt_list)
280 stmt_list = build_empty_stmt (start_locus);
282 *stmt_p = stmt_list;
285 /* Genericize a FOR_STMT node *STMT_P. */
287 static void
288 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
290 tree stmt = *stmt_p;
291 tree expr = NULL;
292 tree loop;
293 tree init = FOR_INIT_STMT (stmt);
295 if (init)
297 cp_walk_tree (&init, cp_genericize_r, data, NULL);
298 append_to_statement_list (init, &expr);
301 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
302 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
303 append_to_statement_list (loop, &expr);
304 if (expr == NULL_TREE)
305 expr = loop;
306 *stmt_p = expr;
309 /* Genericize a WHILE_STMT node *STMT_P. */
311 static void
312 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
314 tree stmt = *stmt_p;
315 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
316 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
319 /* Genericize a DO_STMT node *STMT_P. */
321 static void
322 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
324 tree stmt = *stmt_p;
325 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
326 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
329 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
331 static void
332 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
334 tree stmt = *stmt_p;
335 tree break_block, body, cond, type;
336 location_t stmt_locus = EXPR_LOCATION (stmt);
338 break_block = begin_bc_block (bc_break, stmt_locus);
340 body = SWITCH_STMT_BODY (stmt);
341 if (!body)
342 body = build_empty_stmt (stmt_locus);
343 cond = SWITCH_STMT_COND (stmt);
344 type = SWITCH_STMT_TYPE (stmt);
346 cp_walk_tree (&body, cp_genericize_r, data, NULL);
347 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
348 cp_walk_tree (&type, cp_genericize_r, data, NULL);
349 *walk_subtrees = 0;
351 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
352 finish_bc_block (stmt_p, bc_break, break_block);
355 /* Genericize a CONTINUE_STMT node *STMT_P. */
357 static void
358 genericize_continue_stmt (tree *stmt_p)
360 tree stmt_list = NULL;
361 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
362 tree label = get_bc_label (bc_continue);
363 location_t location = EXPR_LOCATION (*stmt_p);
364 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
365 append_to_statement_list (pred, &stmt_list);
366 append_to_statement_list (jump, &stmt_list);
367 *stmt_p = stmt_list;
370 /* Genericize a BREAK_STMT node *STMT_P. */
372 static void
373 genericize_break_stmt (tree *stmt_p)
375 tree label = get_bc_label (bc_break);
376 location_t location = EXPR_LOCATION (*stmt_p);
377 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
380 /* Genericize a OMP_FOR node *STMT_P. */
382 static void
383 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
385 tree stmt = *stmt_p;
386 location_t locus = EXPR_LOCATION (stmt);
387 tree clab = begin_bc_block (bc_continue, locus);
389 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
390 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
391 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
392 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
393 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
394 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
395 *walk_subtrees = 0;
397 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
400 /* Hook into the middle of gimplifying an OMP_FOR node. */
402 static enum gimplify_status
403 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
405 tree for_stmt = *expr_p;
406 gimple_seq seq = NULL;
408 /* Protect ourselves from recursion. */
409 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
410 return GS_UNHANDLED;
411 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
413 gimplify_and_add (for_stmt, &seq);
414 gimple_seq_add_seq (pre_p, seq);
416 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
418 return GS_ALL_DONE;
421 /* Gimplify an EXPR_STMT node. */
423 static void
424 gimplify_expr_stmt (tree *stmt_p)
426 tree stmt = EXPR_STMT_EXPR (*stmt_p);
428 if (stmt == error_mark_node)
429 stmt = NULL;
431 /* Gimplification of a statement expression will nullify the
432 statement if all its side effects are moved to *PRE_P and *POST_P.
434 In this case we will not want to emit the gimplified statement.
435 However, we may still want to emit a warning, so we do that before
436 gimplification. */
437 if (stmt && warn_unused_value)
439 if (!TREE_SIDE_EFFECTS (stmt))
441 if (!IS_EMPTY_STMT (stmt)
442 && !VOID_TYPE_P (TREE_TYPE (stmt))
443 && !TREE_NO_WARNING (stmt))
444 warning (OPT_Wunused_value, "statement with no effect");
446 else
447 warn_if_unused_value (stmt, input_location);
450 if (stmt == NULL_TREE)
451 stmt = alloc_stmt_list ();
453 *stmt_p = stmt;
456 /* Gimplify initialization from an AGGR_INIT_EXPR. */
458 static void
459 cp_gimplify_init_expr (tree *expr_p)
461 tree from = TREE_OPERAND (*expr_p, 1);
462 tree to = TREE_OPERAND (*expr_p, 0);
463 tree t;
465 /* What about code that pulls out the temp and uses it elsewhere? I
466 think that such code never uses the TARGET_EXPR as an initializer. If
467 I'm wrong, we'll abort because the temp won't have any RTL. In that
468 case, I guess we'll need to replace references somehow. */
469 if (TREE_CODE (from) == TARGET_EXPR)
470 from = TARGET_EXPR_INITIAL (from);
472 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
473 inside the TARGET_EXPR. */
474 for (t = from; t; )
476 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
478 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
479 replace the slot operand with our target.
481 Should we add a target parm to gimplify_expr instead? No, as in this
482 case we want to replace the INIT_EXPR. */
483 if (TREE_CODE (sub) == AGGR_INIT_EXPR
484 || TREE_CODE (sub) == VEC_INIT_EXPR)
486 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
487 AGGR_INIT_EXPR_SLOT (sub) = to;
488 else
489 VEC_INIT_EXPR_SLOT (sub) = to;
490 *expr_p = from;
492 /* The initialization is now a side-effect, so the container can
493 become void. */
494 if (from != sub)
495 TREE_TYPE (from) = void_type_node;
498 if (cxx_dialect >= cxx14 && TREE_CODE (sub) == CONSTRUCTOR)
499 /* Handle aggregate NSDMI. */
500 replace_placeholders (sub, to);
502 if (t == sub)
503 break;
504 else
505 t = TREE_OPERAND (t, 1);
510 /* Gimplify a MUST_NOT_THROW_EXPR. */
512 static enum gimplify_status
513 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
515 tree stmt = *expr_p;
516 tree temp = voidify_wrapper_expr (stmt, NULL);
517 tree body = TREE_OPERAND (stmt, 0);
518 gimple_seq try_ = NULL;
519 gimple_seq catch_ = NULL;
520 gimple *mnt;
522 gimplify_and_add (body, &try_);
523 mnt = gimple_build_eh_must_not_throw (terminate_node);
524 gimple_seq_add_stmt_without_update (&catch_, mnt);
525 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
527 gimple_seq_add_stmt_without_update (pre_p, mnt);
528 if (temp)
530 *expr_p = temp;
531 return GS_OK;
534 *expr_p = NULL;
535 return GS_ALL_DONE;
538 /* Return TRUE if an operand (OP) of a given TYPE being copied is
539 really just an empty class copy.
541 Check that the operand has a simple form so that TARGET_EXPRs and
542 non-empty CONSTRUCTORs get reduced properly, and we leave the
543 return slot optimization alone because it isn't a copy. */
545 static bool
546 simple_empty_class_p (tree type, tree op)
548 return
549 ((TREE_CODE (op) == COMPOUND_EXPR
550 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
551 || is_gimple_lvalue (op)
552 || INDIRECT_REF_P (op)
553 || (TREE_CODE (op) == CONSTRUCTOR
554 && CONSTRUCTOR_NELTS (op) == 0
555 && !TREE_CLOBBER_P (op))
556 || (TREE_CODE (op) == CALL_EXPR
557 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
558 && is_really_empty_class (type);
561 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
564 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
566 int saved_stmts_are_full_exprs_p = 0;
567 enum tree_code code = TREE_CODE (*expr_p);
568 enum gimplify_status ret;
570 if (STATEMENT_CODE_P (code))
572 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
573 current_stmt_tree ()->stmts_are_full_exprs_p
574 = STMT_IS_FULL_EXPR_P (*expr_p);
577 switch (code)
579 case PTRMEM_CST:
580 *expr_p = cplus_expand_constant (*expr_p);
581 ret = GS_OK;
582 break;
584 case AGGR_INIT_EXPR:
585 simplify_aggr_init_expr (expr_p);
586 ret = GS_OK;
587 break;
589 case VEC_INIT_EXPR:
591 location_t loc = input_location;
592 tree init = VEC_INIT_EXPR_INIT (*expr_p);
593 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
594 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
595 input_location = EXPR_LOCATION (*expr_p);
596 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
597 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
598 from_array,
599 tf_warning_or_error);
600 cp_genericize_tree (expr_p);
601 ret = GS_OK;
602 input_location = loc;
604 break;
606 case THROW_EXPR:
607 /* FIXME communicate throw type to back end, probably by moving
608 THROW_EXPR into ../tree.def. */
609 *expr_p = TREE_OPERAND (*expr_p, 0);
610 ret = GS_OK;
611 break;
613 case MUST_NOT_THROW_EXPR:
614 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
615 break;
617 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
618 LHS of an assignment might also be involved in the RHS, as in bug
619 25979. */
620 case INIT_EXPR:
621 if (fn_contains_cilk_spawn_p (cfun)
622 && cilk_detect_spawn_and_unwrap (expr_p)
623 && !seen_error ())
625 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
626 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
628 cp_gimplify_init_expr (expr_p);
629 if (TREE_CODE (*expr_p) != INIT_EXPR)
630 return GS_OK;
631 /* Otherwise fall through. */
632 case MODIFY_EXPR:
633 modify_expr_case:
635 if (fn_contains_cilk_spawn_p (cfun)
636 && cilk_detect_spawn_and_unwrap (expr_p)
637 && !seen_error ())
639 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
640 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
642 /* If the back end isn't clever enough to know that the lhs and rhs
643 types are the same, add an explicit conversion. */
644 tree op0 = TREE_OPERAND (*expr_p, 0);
645 tree op1 = TREE_OPERAND (*expr_p, 1);
647 if (!error_operand_p (op0)
648 && !error_operand_p (op1)
649 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
650 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
651 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
652 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
653 TREE_TYPE (op0), op1);
655 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
657 /* Remove any copies of empty classes. Also drop volatile
658 variables on the RHS to avoid infinite recursion from
659 gimplify_expr trying to load the value. */
660 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
661 is_gimple_lvalue, fb_lvalue);
662 if (TREE_SIDE_EFFECTS (op1))
664 if (TREE_THIS_VOLATILE (op1)
665 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
666 op1 = build_fold_addr_expr (op1);
668 gimplify_and_add (op1, pre_p);
670 *expr_p = TREE_OPERAND (*expr_p, 0);
673 ret = GS_OK;
674 break;
676 case EMPTY_CLASS_EXPR:
677 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
678 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
679 ret = GS_OK;
680 break;
682 case BASELINK:
683 *expr_p = BASELINK_FUNCTIONS (*expr_p);
684 ret = GS_OK;
685 break;
687 case TRY_BLOCK:
688 genericize_try_block (expr_p);
689 ret = GS_OK;
690 break;
692 case HANDLER:
693 genericize_catch_block (expr_p);
694 ret = GS_OK;
695 break;
697 case EH_SPEC_BLOCK:
698 genericize_eh_spec_block (expr_p);
699 ret = GS_OK;
700 break;
702 case USING_STMT:
703 gcc_unreachable ();
705 case FOR_STMT:
706 case WHILE_STMT:
707 case DO_STMT:
708 case SWITCH_STMT:
709 case CONTINUE_STMT:
710 case BREAK_STMT:
711 gcc_unreachable ();
713 case OMP_FOR:
714 case OMP_SIMD:
715 case OMP_DISTRIBUTE:
716 case OMP_TASKLOOP:
717 ret = cp_gimplify_omp_for (expr_p, pre_p);
718 break;
720 case EXPR_STMT:
721 gimplify_expr_stmt (expr_p);
722 ret = GS_OK;
723 break;
725 case UNARY_PLUS_EXPR:
727 tree arg = TREE_OPERAND (*expr_p, 0);
728 tree type = TREE_TYPE (*expr_p);
729 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
730 : arg;
731 ret = GS_OK;
733 break;
735 case CILK_SPAWN_STMT:
736 gcc_assert
737 (fn_contains_cilk_spawn_p (cfun)
738 && cilk_detect_spawn_and_unwrap (expr_p));
740 /* If errors are seen, then just process it as a CALL_EXPR. */
741 if (!seen_error ())
743 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
744 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
746 case CALL_EXPR:
747 if (fn_contains_cilk_spawn_p (cfun)
748 && cilk_detect_spawn_and_unwrap (expr_p)
749 && !seen_error ())
751 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
752 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
754 /* DR 1030 says that we need to evaluate the elements of an
755 initializer-list in forward order even when it's used as arguments to
756 a constructor. So if the target wants to evaluate them in reverse
757 order and there's more than one argument other than 'this', gimplify
758 them in order. */
759 ret = GS_OK;
760 if (PUSH_ARGS_REVERSED && CALL_EXPR_LIST_INIT_P (*expr_p)
761 && call_expr_nargs (*expr_p) > 2)
763 int nargs = call_expr_nargs (*expr_p);
764 location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
765 for (int i = 1; i < nargs; ++i)
767 enum gimplify_status t
768 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
769 if (t == GS_ERROR)
770 ret = GS_ERROR;
773 break;
775 case RETURN_EXPR:
776 if (TREE_OPERAND (*expr_p, 0)
777 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
778 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
780 expr_p = &TREE_OPERAND (*expr_p, 0);
781 code = TREE_CODE (*expr_p);
782 /* Avoid going through the INIT_EXPR case, which can
783 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
784 goto modify_expr_case;
786 /* Fall through. */
788 default:
789 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
790 break;
793 /* Restore saved state. */
794 if (STATEMENT_CODE_P (code))
795 current_stmt_tree ()->stmts_are_full_exprs_p
796 = saved_stmts_are_full_exprs_p;
798 return ret;
801 static inline bool
802 is_invisiref_parm (const_tree t)
804 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
805 && DECL_BY_REFERENCE (t));
808 /* Return true if the uid in both int tree maps are equal. */
810 bool
811 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
813 return (a->uid == b->uid);
816 /* Hash a UID in a cxx_int_tree_map. */
818 unsigned int
819 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
821 return item->uid;
824 /* A stable comparison routine for use with splay trees and DECLs. */
826 static int
827 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
829 tree a = (tree) xa;
830 tree b = (tree) xb;
832 return DECL_UID (a) - DECL_UID (b);
835 /* OpenMP context during genericization. */
837 struct cp_genericize_omp_taskreg
839 bool is_parallel;
840 bool default_shared;
841 struct cp_genericize_omp_taskreg *outer;
842 splay_tree variables;
845 /* Return true if genericization should try to determine if
846 DECL is firstprivate or shared within task regions. */
848 static bool
849 omp_var_to_track (tree decl)
851 tree type = TREE_TYPE (decl);
852 if (is_invisiref_parm (decl))
853 type = TREE_TYPE (type);
854 while (TREE_CODE (type) == ARRAY_TYPE)
855 type = TREE_TYPE (type);
856 if (type == error_mark_node || !CLASS_TYPE_P (type))
857 return false;
858 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
859 return false;
860 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
861 return false;
862 return true;
865 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
867 static void
868 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
870 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
871 (splay_tree_key) decl);
872 if (n == NULL)
874 int flags = OMP_CLAUSE_DEFAULT_SHARED;
875 if (omp_ctx->outer)
876 omp_cxx_notice_variable (omp_ctx->outer, decl);
877 if (!omp_ctx->default_shared)
879 struct cp_genericize_omp_taskreg *octx;
881 for (octx = omp_ctx->outer; octx; octx = octx->outer)
883 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
884 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
886 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
887 break;
889 if (octx->is_parallel)
890 break;
892 if (octx == NULL
893 && (TREE_CODE (decl) == PARM_DECL
894 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
895 && DECL_CONTEXT (decl) == current_function_decl)))
896 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
897 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
899 /* DECL is implicitly determined firstprivate in
900 the current task construct. Ensure copy ctor and
901 dtor are instantiated, because during gimplification
902 it will be already too late. */
903 tree type = TREE_TYPE (decl);
904 if (is_invisiref_parm (decl))
905 type = TREE_TYPE (type);
906 while (TREE_CODE (type) == ARRAY_TYPE)
907 type = TREE_TYPE (type);
908 get_copy_ctor (type, tf_none);
909 get_dtor (type, tf_none);
912 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
916 /* Genericization context. */
918 struct cp_genericize_data
920 hash_set<tree> *p_set;
921 vec<tree> bind_expr_stack;
922 struct cp_genericize_omp_taskreg *omp_ctx;
923 tree try_block;
924 bool no_sanitize_p;
927 /* Perform any pre-gimplification lowering of C++ front end trees to
928 GENERIC. */
930 static tree
931 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
933 tree stmt = *stmt_p;
934 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
935 hash_set<tree> *p_set = wtd->p_set;
937 /* If in an OpenMP context, note var uses. */
938 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
939 && (VAR_P (stmt)
940 || TREE_CODE (stmt) == PARM_DECL
941 || TREE_CODE (stmt) == RESULT_DECL)
942 && omp_var_to_track (stmt))
943 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
945 if (is_invisiref_parm (stmt)
946 /* Don't dereference parms in a thunk, pass the references through. */
947 && !(DECL_THUNK_P (current_function_decl)
948 && TREE_CODE (stmt) == PARM_DECL))
950 *stmt_p = convert_from_reference (stmt);
951 *walk_subtrees = 0;
952 return NULL;
955 /* Map block scope extern declarations to visible declarations with the
956 same name and type in outer scopes if any. */
957 if (cp_function_chain->extern_decl_map
958 && VAR_OR_FUNCTION_DECL_P (stmt)
959 && DECL_EXTERNAL (stmt))
961 struct cxx_int_tree_map *h, in;
962 in.uid = DECL_UID (stmt);
963 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
964 if (h)
966 *stmt_p = h->to;
967 *walk_subtrees = 0;
968 return NULL;
972 /* Other than invisiref parms, don't walk the same tree twice. */
973 if (p_set->contains (stmt))
975 *walk_subtrees = 0;
976 return NULL_TREE;
979 if (TREE_CODE (stmt) == ADDR_EXPR
980 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
982 /* If in an OpenMP context, note var uses. */
983 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
984 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
985 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
986 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
987 *walk_subtrees = 0;
989 else if (TREE_CODE (stmt) == RETURN_EXPR
990 && TREE_OPERAND (stmt, 0)
991 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
992 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
993 *walk_subtrees = 0;
994 else if (TREE_CODE (stmt) == OMP_CLAUSE)
995 switch (OMP_CLAUSE_CODE (stmt))
997 case OMP_CLAUSE_LASTPRIVATE:
998 /* Don't dereference an invisiref in OpenMP clauses. */
999 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1001 *walk_subtrees = 0;
1002 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1003 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1004 cp_genericize_r, data, NULL);
1006 break;
1007 case OMP_CLAUSE_PRIVATE:
1008 /* Don't dereference an invisiref in OpenMP clauses. */
1009 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1010 *walk_subtrees = 0;
1011 else if (wtd->omp_ctx != NULL)
1013 /* Private clause doesn't cause any references to the
1014 var in outer contexts, avoid calling
1015 omp_cxx_notice_variable for it. */
1016 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1017 wtd->omp_ctx = NULL;
1018 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1019 data, NULL);
1020 wtd->omp_ctx = old;
1021 *walk_subtrees = 0;
1023 break;
1024 case OMP_CLAUSE_SHARED:
1025 case OMP_CLAUSE_FIRSTPRIVATE:
1026 case OMP_CLAUSE_COPYIN:
1027 case OMP_CLAUSE_COPYPRIVATE:
1028 /* Don't dereference an invisiref in OpenMP clauses. */
1029 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1030 *walk_subtrees = 0;
1031 break;
1032 case OMP_CLAUSE_REDUCTION:
1033 /* Don't dereference an invisiref in reduction clause's
1034 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1035 still needs to be genericized. */
1036 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1038 *walk_subtrees = 0;
1039 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1040 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1041 cp_genericize_r, data, NULL);
1042 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1043 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1044 cp_genericize_r, data, NULL);
1046 break;
1047 default:
1048 break;
1050 else if (IS_TYPE_OR_DECL_P (stmt))
1051 *walk_subtrees = 0;
1053 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1054 to lower this construct before scanning it, so we need to lower these
1055 before doing anything else. */
1056 else if (TREE_CODE (stmt) == CLEANUP_STMT)
1057 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1058 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1059 : TRY_FINALLY_EXPR,
1060 void_type_node,
1061 CLEANUP_BODY (stmt),
1062 CLEANUP_EXPR (stmt));
1064 else if (TREE_CODE (stmt) == IF_STMT)
1066 genericize_if_stmt (stmt_p);
1067 /* *stmt_p has changed, tail recurse to handle it again. */
1068 return cp_genericize_r (stmt_p, walk_subtrees, data);
1071 /* COND_EXPR might have incompatible types in branches if one or both
1072 arms are bitfields. Fix it up now. */
1073 else if (TREE_CODE (stmt) == COND_EXPR)
1075 tree type_left
1076 = (TREE_OPERAND (stmt, 1)
1077 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1078 : NULL_TREE);
1079 tree type_right
1080 = (TREE_OPERAND (stmt, 2)
1081 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1082 : NULL_TREE);
1083 if (type_left
1084 && !useless_type_conversion_p (TREE_TYPE (stmt),
1085 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1087 TREE_OPERAND (stmt, 1)
1088 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1089 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1090 type_left));
1092 if (type_right
1093 && !useless_type_conversion_p (TREE_TYPE (stmt),
1094 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1096 TREE_OPERAND (stmt, 2)
1097 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1098 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1099 type_right));
1103 else if (TREE_CODE (stmt) == BIND_EXPR)
1105 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1107 tree decl;
1108 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1109 if (VAR_P (decl)
1110 && !DECL_EXTERNAL (decl)
1111 && omp_var_to_track (decl))
1113 splay_tree_node n
1114 = splay_tree_lookup (wtd->omp_ctx->variables,
1115 (splay_tree_key) decl);
1116 if (n == NULL)
1117 splay_tree_insert (wtd->omp_ctx->variables,
1118 (splay_tree_key) decl,
1119 TREE_STATIC (decl)
1120 ? OMP_CLAUSE_DEFAULT_SHARED
1121 : OMP_CLAUSE_DEFAULT_PRIVATE);
1124 if (flag_sanitize
1125 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1127 /* The point here is to not sanitize static initializers. */
1128 bool no_sanitize_p = wtd->no_sanitize_p;
1129 wtd->no_sanitize_p = true;
1130 for (tree decl = BIND_EXPR_VARS (stmt);
1131 decl;
1132 decl = DECL_CHAIN (decl))
1133 if (VAR_P (decl)
1134 && TREE_STATIC (decl)
1135 && DECL_INITIAL (decl))
1136 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1137 wtd->no_sanitize_p = no_sanitize_p;
1139 wtd->bind_expr_stack.safe_push (stmt);
1140 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1141 cp_genericize_r, data, NULL);
1142 wtd->bind_expr_stack.pop ();
1145 else if (TREE_CODE (stmt) == USING_STMT)
1147 tree block = NULL_TREE;
1149 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1150 BLOCK, and append an IMPORTED_DECL to its
1151 BLOCK_VARS chained list. */
1152 if (wtd->bind_expr_stack.exists ())
1154 int i;
1155 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1156 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1157 break;
1159 if (block)
1161 tree using_directive;
1162 gcc_assert (TREE_OPERAND (stmt, 0));
1164 using_directive = make_node (IMPORTED_DECL);
1165 TREE_TYPE (using_directive) = void_type_node;
1167 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1168 = TREE_OPERAND (stmt, 0);
1169 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1170 BLOCK_VARS (block) = using_directive;
1172 /* The USING_STMT won't appear in GENERIC. */
1173 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1174 *walk_subtrees = 0;
1177 else if (TREE_CODE (stmt) == DECL_EXPR
1178 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1180 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1181 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1182 *walk_subtrees = 0;
1184 else if (TREE_CODE (stmt) == DECL_EXPR)
1186 tree d = DECL_EXPR_DECL (stmt);
1187 if (TREE_CODE (d) == VAR_DECL)
1188 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1190 else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
1192 struct cp_genericize_omp_taskreg omp_ctx;
1193 tree c, decl;
1194 splay_tree_node n;
1196 *walk_subtrees = 0;
1197 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1198 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1199 omp_ctx.default_shared = omp_ctx.is_parallel;
1200 omp_ctx.outer = wtd->omp_ctx;
1201 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1202 wtd->omp_ctx = &omp_ctx;
1203 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1204 switch (OMP_CLAUSE_CODE (c))
1206 case OMP_CLAUSE_SHARED:
1207 case OMP_CLAUSE_PRIVATE:
1208 case OMP_CLAUSE_FIRSTPRIVATE:
1209 case OMP_CLAUSE_LASTPRIVATE:
1210 decl = OMP_CLAUSE_DECL (c);
1211 if (decl == error_mark_node || !omp_var_to_track (decl))
1212 break;
1213 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1214 if (n != NULL)
1215 break;
1216 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1217 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1218 ? OMP_CLAUSE_DEFAULT_SHARED
1219 : OMP_CLAUSE_DEFAULT_PRIVATE);
1220 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1221 && omp_ctx.outer)
1222 omp_cxx_notice_variable (omp_ctx.outer, decl);
1223 break;
1224 case OMP_CLAUSE_DEFAULT:
1225 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1226 omp_ctx.default_shared = true;
1227 default:
1228 break;
1230 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1231 wtd->omp_ctx = omp_ctx.outer;
1232 splay_tree_delete (omp_ctx.variables);
1234 else if (TREE_CODE (stmt) == TRY_BLOCK)
1236 *walk_subtrees = 0;
1237 tree try_block = wtd->try_block;
1238 wtd->try_block = stmt;
1239 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1240 wtd->try_block = try_block;
1241 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1243 else if (TREE_CODE (stmt) == MUST_NOT_THROW_EXPR)
1245 /* MUST_NOT_THROW_COND might be something else with TM. */
1246 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1248 *walk_subtrees = 0;
1249 tree try_block = wtd->try_block;
1250 wtd->try_block = stmt;
1251 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1252 wtd->try_block = try_block;
1255 else if (TREE_CODE (stmt) == THROW_EXPR)
1257 location_t loc = location_of (stmt);
1258 if (TREE_NO_WARNING (stmt))
1259 /* Never mind. */;
1260 else if (wtd->try_block)
1262 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1263 && warning_at (loc, OPT_Wterminate,
1264 "throw will always call terminate()")
1265 && cxx_dialect >= cxx11
1266 && DECL_DESTRUCTOR_P (current_function_decl))
1267 inform (loc, "in C++11 destructors default to noexcept");
1269 else
1271 if (warn_cxx11_compat && cxx_dialect < cxx11
1272 && DECL_DESTRUCTOR_P (current_function_decl)
1273 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1274 == NULL_TREE)
1275 && (get_defaulted_eh_spec (current_function_decl)
1276 == empty_except_spec))
1277 warning_at (loc, OPT_Wc__11_compat,
1278 "in C++11 this throw will terminate because "
1279 "destructors default to noexcept");
1282 else if (TREE_CODE (stmt) == CONVERT_EXPR)
1283 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1284 else if (TREE_CODE (stmt) == FOR_STMT)
1285 genericize_for_stmt (stmt_p, walk_subtrees, data);
1286 else if (TREE_CODE (stmt) == WHILE_STMT)
1287 genericize_while_stmt (stmt_p, walk_subtrees, data);
1288 else if (TREE_CODE (stmt) == DO_STMT)
1289 genericize_do_stmt (stmt_p, walk_subtrees, data);
1290 else if (TREE_CODE (stmt) == SWITCH_STMT)
1291 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1292 else if (TREE_CODE (stmt) == CONTINUE_STMT)
1293 genericize_continue_stmt (stmt_p);
1294 else if (TREE_CODE (stmt) == BREAK_STMT)
1295 genericize_break_stmt (stmt_p);
1296 else if (TREE_CODE (stmt) == OMP_FOR
1297 || TREE_CODE (stmt) == OMP_SIMD
1298 || TREE_CODE (stmt) == OMP_DISTRIBUTE
1299 || TREE_CODE (stmt) == OMP_TASKLOOP)
1300 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1301 else if (TREE_CODE (stmt) == SIZEOF_EXPR)
1303 if (SIZEOF_EXPR_TYPE_P (stmt))
1304 *stmt_p
1305 = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt, 0)),
1306 SIZEOF_EXPR, false);
1307 else if (TYPE_P (TREE_OPERAND (stmt, 0)))
1308 *stmt_p = cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt, 0),
1309 SIZEOF_EXPR, false);
1310 else
1311 *stmt_p = cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt, 0),
1312 SIZEOF_EXPR, false);
1313 if (*stmt_p == error_mark_node)
1314 *stmt_p = size_one_node;
1315 return NULL;
1317 else if ((flag_sanitize
1318 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1319 && !wtd->no_sanitize_p)
1321 if ((flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1322 && TREE_CODE (stmt) == NOP_EXPR
1323 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1324 ubsan_maybe_instrument_reference (stmt);
1325 else if (TREE_CODE (stmt) == CALL_EXPR)
1327 tree fn = CALL_EXPR_FN (stmt);
1328 if (fn != NULL_TREE
1329 && !error_operand_p (fn)
1330 && POINTER_TYPE_P (TREE_TYPE (fn))
1331 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1333 bool is_ctor
1334 = TREE_CODE (fn) == ADDR_EXPR
1335 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1336 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1337 if (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1338 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1339 if ((flag_sanitize & SANITIZE_VPTR) && !is_ctor)
1340 cp_ubsan_maybe_instrument_member_call (stmt);
1345 p_set->add (*stmt_p);
1347 return NULL;
1350 /* Lower C++ front end trees to GENERIC in T_P. */
1352 static void
1353 cp_genericize_tree (tree* t_p)
1355 struct cp_genericize_data wtd;
1357 wtd.p_set = new hash_set<tree>;
1358 wtd.bind_expr_stack.create (0);
1359 wtd.omp_ctx = NULL;
1360 wtd.try_block = NULL_TREE;
1361 wtd.no_sanitize_p = false;
1362 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1363 delete wtd.p_set;
1364 wtd.bind_expr_stack.release ();
1365 if (flag_sanitize & SANITIZE_VPTR)
1366 cp_ubsan_instrument_member_accesses (t_p);
1369 /* If a function that should end with a return in non-void
1370 function doesn't obviously end with return, add ubsan
1371 instrumentation code to verify it at runtime. */
1373 static void
1374 cp_ubsan_maybe_instrument_return (tree fndecl)
1376 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1377 || DECL_CONSTRUCTOR_P (fndecl)
1378 || DECL_DESTRUCTOR_P (fndecl)
1379 || !targetm.warn_func_return (fndecl))
1380 return;
1382 tree t = DECL_SAVED_TREE (fndecl);
1383 while (t)
1385 switch (TREE_CODE (t))
1387 case BIND_EXPR:
1388 t = BIND_EXPR_BODY (t);
1389 continue;
1390 case TRY_FINALLY_EXPR:
1391 t = TREE_OPERAND (t, 0);
1392 continue;
1393 case STATEMENT_LIST:
1395 tree_stmt_iterator i = tsi_last (t);
1396 if (!tsi_end_p (i))
1398 t = tsi_stmt (i);
1399 continue;
1402 break;
1403 case RETURN_EXPR:
1404 return;
1405 default:
1406 break;
1408 break;
1410 if (t == NULL_TREE)
1411 return;
1412 t = DECL_SAVED_TREE (fndecl);
1413 if (TREE_CODE (t) == BIND_EXPR
1414 && TREE_CODE (BIND_EXPR_BODY (t)) == STATEMENT_LIST)
1416 tree_stmt_iterator i = tsi_last (BIND_EXPR_BODY (t));
1417 t = ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl));
1418 tsi_link_after (&i, t, TSI_NEW_STMT);
1422 void
1423 cp_genericize (tree fndecl)
1425 tree t;
1427 /* Fix up the types of parms passed by invisible reference. */
1428 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1429 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1431 /* If a function's arguments are copied to create a thunk,
1432 then DECL_BY_REFERENCE will be set -- but the type of the
1433 argument will be a pointer type, so we will never get
1434 here. */
1435 gcc_assert (!DECL_BY_REFERENCE (t));
1436 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1437 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1438 DECL_BY_REFERENCE (t) = 1;
1439 TREE_ADDRESSABLE (t) = 0;
1440 relayout_decl (t);
1443 /* Do the same for the return value. */
1444 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1446 t = DECL_RESULT (fndecl);
1447 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1448 DECL_BY_REFERENCE (t) = 1;
1449 TREE_ADDRESSABLE (t) = 0;
1450 relayout_decl (t);
1451 if (DECL_NAME (t))
1453 /* Adjust DECL_VALUE_EXPR of the original var. */
1454 tree outer = outer_curly_brace_block (current_function_decl);
1455 tree var;
1457 if (outer)
1458 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1459 if (DECL_NAME (t) == DECL_NAME (var)
1460 && DECL_HAS_VALUE_EXPR_P (var)
1461 && DECL_VALUE_EXPR (var) == t)
1463 tree val = convert_from_reference (t);
1464 SET_DECL_VALUE_EXPR (var, val);
1465 break;
1470 /* If we're a clone, the body is already GIMPLE. */
1471 if (DECL_CLONED_FUNCTION_P (fndecl))
1472 return;
1474 /* Expand all the array notations here. */
1475 if (flag_cilkplus
1476 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1477 DECL_SAVED_TREE (fndecl) =
1478 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1480 /* We do want to see every occurrence of the parms, so we can't just use
1481 walk_tree's hash functionality. */
1482 cp_genericize_tree (&DECL_SAVED_TREE (fndecl));
1484 if (flag_sanitize & SANITIZE_RETURN
1485 && do_ubsan_in_current_function ())
1486 cp_ubsan_maybe_instrument_return (fndecl);
1488 /* Do everything else. */
1489 c_genericize (fndecl);
1491 gcc_assert (bc_label[bc_break] == NULL);
1492 gcc_assert (bc_label[bc_continue] == NULL);
1495 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1496 NULL if there is in fact nothing to do. ARG2 may be null if FN
1497 actually only takes one argument. */
1499 static tree
1500 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1502 tree defparm, parm, t;
1503 int i = 0;
1504 int nargs;
1505 tree *argarray;
1507 if (fn == NULL)
1508 return NULL;
1510 nargs = list_length (DECL_ARGUMENTS (fn));
1511 argarray = XALLOCAVEC (tree, nargs);
1513 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1514 if (arg2)
1515 defparm = TREE_CHAIN (defparm);
1517 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1519 tree inner_type = TREE_TYPE (arg1);
1520 tree start1, end1, p1;
1521 tree start2 = NULL, p2 = NULL;
1522 tree ret = NULL, lab;
1524 start1 = arg1;
1525 start2 = arg2;
1528 inner_type = TREE_TYPE (inner_type);
1529 start1 = build4 (ARRAY_REF, inner_type, start1,
1530 size_zero_node, NULL, NULL);
1531 if (arg2)
1532 start2 = build4 (ARRAY_REF, inner_type, start2,
1533 size_zero_node, NULL, NULL);
1535 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1536 start1 = build_fold_addr_expr_loc (input_location, start1);
1537 if (arg2)
1538 start2 = build_fold_addr_expr_loc (input_location, start2);
1540 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1541 end1 = fold_build_pointer_plus (start1, end1);
1543 p1 = create_tmp_var (TREE_TYPE (start1));
1544 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1545 append_to_statement_list (t, &ret);
1547 if (arg2)
1549 p2 = create_tmp_var (TREE_TYPE (start2));
1550 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1551 append_to_statement_list (t, &ret);
1554 lab = create_artificial_label (input_location);
1555 t = build1 (LABEL_EXPR, void_type_node, lab);
1556 append_to_statement_list (t, &ret);
1558 argarray[i++] = p1;
1559 if (arg2)
1560 argarray[i++] = p2;
1561 /* Handle default arguments. */
1562 for (parm = defparm; parm && parm != void_list_node;
1563 parm = TREE_CHAIN (parm), i++)
1564 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1565 TREE_PURPOSE (parm), fn, i,
1566 tf_warning_or_error);
1567 t = build_call_a (fn, i, argarray);
1568 t = fold_convert (void_type_node, t);
1569 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1570 append_to_statement_list (t, &ret);
1572 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1573 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1574 append_to_statement_list (t, &ret);
1576 if (arg2)
1578 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1579 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1580 append_to_statement_list (t, &ret);
1583 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1584 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1585 append_to_statement_list (t, &ret);
1587 return ret;
1589 else
1591 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1592 if (arg2)
1593 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1594 /* Handle default arguments. */
1595 for (parm = defparm; parm && parm != void_list_node;
1596 parm = TREE_CHAIN (parm), i++)
1597 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1598 TREE_PURPOSE (parm),
1599 fn, i, tf_warning_or_error);
1600 t = build_call_a (fn, i, argarray);
1601 t = fold_convert (void_type_node, t);
1602 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1606 /* Return code to initialize DECL with its default constructor, or
1607 NULL if there's nothing to do. */
1609 tree
1610 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1612 tree info = CP_OMP_CLAUSE_INFO (clause);
1613 tree ret = NULL;
1615 if (info)
1616 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1618 return ret;
1621 /* Return code to initialize DST with a copy constructor from SRC. */
1623 tree
1624 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1626 tree info = CP_OMP_CLAUSE_INFO (clause);
1627 tree ret = NULL;
1629 if (info)
1630 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1631 if (ret == NULL)
1632 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1634 return ret;
1637 /* Similarly, except use an assignment operator instead. */
1639 tree
1640 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1642 tree info = CP_OMP_CLAUSE_INFO (clause);
1643 tree ret = NULL;
1645 if (info)
1646 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1647 if (ret == NULL)
1648 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1650 return ret;
1653 /* Return code to destroy DECL. */
1655 tree
1656 cxx_omp_clause_dtor (tree clause, tree decl)
1658 tree info = CP_OMP_CLAUSE_INFO (clause);
1659 tree ret = NULL;
1661 if (info)
1662 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1664 return ret;
1667 /* True if OpenMP should privatize what this DECL points to rather
1668 than the DECL itself. */
1670 bool
1671 cxx_omp_privatize_by_reference (const_tree decl)
1673 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1674 || is_invisiref_parm (decl));
1677 /* Return true if DECL is const qualified var having no mutable member. */
1678 bool
1679 cxx_omp_const_qual_no_mutable (tree decl)
1681 tree type = TREE_TYPE (decl);
1682 if (TREE_CODE (type) == REFERENCE_TYPE)
1684 if (!is_invisiref_parm (decl))
1685 return false;
1686 type = TREE_TYPE (type);
1688 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1690 /* NVR doesn't preserve const qualification of the
1691 variable's type. */
1692 tree outer = outer_curly_brace_block (current_function_decl);
1693 tree var;
1695 if (outer)
1696 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1697 if (DECL_NAME (decl) == DECL_NAME (var)
1698 && (TYPE_MAIN_VARIANT (type)
1699 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1701 if (TYPE_READONLY (TREE_TYPE (var)))
1702 type = TREE_TYPE (var);
1703 break;
1708 if (type == error_mark_node)
1709 return false;
1711 /* Variables with const-qualified type having no mutable member
1712 are predetermined shared. */
1713 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1714 return true;
1716 return false;
1719 /* True if OpenMP sharing attribute of DECL is predetermined. */
1721 enum omp_clause_default_kind
1722 cxx_omp_predetermined_sharing (tree decl)
1724 /* Static data members are predetermined shared. */
1725 if (TREE_STATIC (decl))
1727 tree ctx = CP_DECL_CONTEXT (decl);
1728 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1729 return OMP_CLAUSE_DEFAULT_SHARED;
1732 /* Const qualified vars having no mutable member are predetermined
1733 shared. */
1734 if (cxx_omp_const_qual_no_mutable (decl))
1735 return OMP_CLAUSE_DEFAULT_SHARED;
1737 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1740 /* Finalize an implicitly determined clause. */
1742 void
1743 cxx_omp_finish_clause (tree c, gimple_seq *)
1745 tree decl, inner_type;
1746 bool make_shared = false;
1748 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1749 return;
1751 decl = OMP_CLAUSE_DECL (c);
1752 decl = require_complete_type (decl);
1753 inner_type = TREE_TYPE (decl);
1754 if (decl == error_mark_node)
1755 make_shared = true;
1756 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1757 inner_type = TREE_TYPE (inner_type);
1759 /* We're interested in the base element, not arrays. */
1760 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1761 inner_type = TREE_TYPE (inner_type);
1763 /* Check for special function availability by building a call to one.
1764 Save the results, because later we won't be in the right context
1765 for making these queries. */
1766 if (!make_shared
1767 && CLASS_TYPE_P (inner_type)
1768 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1769 make_shared = true;
1771 if (make_shared)
1772 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
1775 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1776 disregarded in OpenMP construct, because it is going to be
1777 remapped during OpenMP lowering. SHARED is true if DECL
1778 is going to be shared, false if it is going to be privatized. */
1780 bool
1781 cxx_omp_disregard_value_expr (tree decl, bool shared)
1783 return !shared
1784 && VAR_P (decl)
1785 && DECL_HAS_VALUE_EXPR_P (decl)
1786 && DECL_ARTIFICIAL (decl)
1787 && DECL_LANG_SPECIFIC (decl)
1788 && DECL_OMP_PRIVATIZED_MEMBER (decl);