1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2018 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "basic-block.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
38 /* Forward declarations. */
40 static tree
cp_genericize_r (tree
*, int *, void *);
41 static tree
cp_fold_r (tree
*, int *, void *);
42 static void cp_genericize_tree (tree
*, bool);
43 static tree
cp_fold (tree
);
45 /* Local declarations. */
47 enum bc_t
{ bc_break
= 0, bc_continue
= 1 };
49 /* Stack of labels which are targets for "break" or "continue",
50 linked through TREE_CHAIN. */
51 static tree bc_label
[2];
53 /* Begin a scope which can be exited by a break or continue statement. BC
56 Just creates a label with location LOCATION and pushes it into the current
60 begin_bc_block (enum bc_t bc
, location_t location
)
62 tree label
= create_artificial_label (location
);
63 DECL_CHAIN (label
) = bc_label
[bc
];
66 LABEL_DECL_BREAK (label
) = true;
68 LABEL_DECL_CONTINUE (label
) = true;
72 /* Finish a scope which can be exited by a break or continue statement.
73 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
74 an expression for the contents of the scope.
76 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
77 BLOCK. Otherwise, just forget the label. */
80 finish_bc_block (tree
*block
, enum bc_t bc
, tree label
)
82 gcc_assert (label
== bc_label
[bc
]);
84 if (TREE_USED (label
))
85 append_to_statement_list (build1 (LABEL_EXPR
, void_type_node
, label
),
88 bc_label
[bc
] = DECL_CHAIN (label
);
89 DECL_CHAIN (label
) = NULL_TREE
;
92 /* Get the LABEL_EXPR to represent a break or continue statement
93 in the current block scope. BC indicates which. */
96 get_bc_label (enum bc_t bc
)
98 tree label
= bc_label
[bc
];
100 /* Mark the label used for finish_bc_block. */
101 TREE_USED (label
) = 1;
105 /* Genericize a TRY_BLOCK. */
108 genericize_try_block (tree
*stmt_p
)
110 tree body
= TRY_STMTS (*stmt_p
);
111 tree cleanup
= TRY_HANDLERS (*stmt_p
);
113 *stmt_p
= build2 (TRY_CATCH_EXPR
, void_type_node
, body
, cleanup
);
116 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
119 genericize_catch_block (tree
*stmt_p
)
121 tree type
= HANDLER_TYPE (*stmt_p
);
122 tree body
= HANDLER_BODY (*stmt_p
);
124 /* FIXME should the caught type go in TREE_TYPE? */
125 *stmt_p
= build2 (CATCH_EXPR
, void_type_node
, type
, body
);
128 /* A terser interface for building a representation of an exception
132 build_gimple_eh_filter_tree (tree body
, tree allowed
, tree failure
)
136 /* FIXME should the allowed types go in TREE_TYPE? */
137 t
= build2 (EH_FILTER_EXPR
, void_type_node
, allowed
, NULL_TREE
);
138 append_to_statement_list (failure
, &EH_FILTER_FAILURE (t
));
140 t
= build2 (TRY_CATCH_EXPR
, void_type_node
, NULL_TREE
, t
);
141 append_to_statement_list (body
, &TREE_OPERAND (t
, 0));
146 /* Genericize an EH_SPEC_BLOCK by converting it to a
147 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
150 genericize_eh_spec_block (tree
*stmt_p
)
152 tree body
= EH_SPEC_STMTS (*stmt_p
);
153 tree allowed
= EH_SPEC_RAISES (*stmt_p
);
154 tree failure
= build_call_n (call_unexpected_fn
, 1, build_exc_ptr ());
156 *stmt_p
= build_gimple_eh_filter_tree (body
, allowed
, failure
);
157 TREE_NO_WARNING (*stmt_p
) = true;
158 TREE_NO_WARNING (TREE_OPERAND (*stmt_p
, 1)) = true;
161 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
164 genericize_if_stmt (tree
*stmt_p
)
166 tree stmt
, cond
, then_
, else_
;
167 location_t locus
= EXPR_LOCATION (*stmt_p
);
170 cond
= IF_COND (stmt
);
171 then_
= THEN_CLAUSE (stmt
);
172 else_
= ELSE_CLAUSE (stmt
);
175 then_
= build_empty_stmt (locus
);
177 else_
= build_empty_stmt (locus
);
179 if (integer_nonzerop (cond
) && !TREE_SIDE_EFFECTS (else_
))
181 else if (integer_zerop (cond
) && !TREE_SIDE_EFFECTS (then_
))
184 stmt
= build3 (COND_EXPR
, void_type_node
, cond
, then_
, else_
);
185 if (!EXPR_HAS_LOCATION (stmt
))
186 protected_set_expr_location (stmt
, locus
);
190 /* Build a generic representation of one of the C loop forms. COND is the
191 loop condition or NULL_TREE. BODY is the (possibly compound) statement
192 controlled by the loop. INCR is the increment expression of a for-loop,
193 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
194 evaluated before the loop body as in while and for loops, or after the
195 loop body as in do-while loops. */
198 genericize_cp_loop (tree
*stmt_p
, location_t start_locus
, tree cond
, tree body
,
199 tree incr
, bool cond_is_first
, int *walk_subtrees
,
204 tree stmt_list
= NULL
;
206 blab
= begin_bc_block (bc_break
, start_locus
);
207 clab
= begin_bc_block (bc_continue
, start_locus
);
209 protected_set_expr_location (incr
, start_locus
);
211 cp_walk_tree (&cond
, cp_genericize_r
, data
, NULL
);
212 cp_walk_tree (&body
, cp_genericize_r
, data
, NULL
);
213 cp_walk_tree (&incr
, cp_genericize_r
, data
, NULL
);
216 if (cond
&& TREE_CODE (cond
) != INTEGER_CST
)
218 /* If COND is constant, don't bother building an exit. If it's false,
219 we won't build a loop. If it's true, any exits are in the body. */
220 location_t cloc
= EXPR_LOC_OR_LOC (cond
, start_locus
);
221 exit
= build1_loc (cloc
, GOTO_EXPR
, void_type_node
,
222 get_bc_label (bc_break
));
223 exit
= fold_build3_loc (cloc
, COND_EXPR
, void_type_node
, cond
,
224 build_empty_stmt (cloc
), exit
);
227 if (exit
&& cond_is_first
)
228 append_to_statement_list (exit
, &stmt_list
);
229 append_to_statement_list (body
, &stmt_list
);
230 finish_bc_block (&stmt_list
, bc_continue
, clab
);
231 append_to_statement_list (incr
, &stmt_list
);
232 if (exit
&& !cond_is_first
)
233 append_to_statement_list (exit
, &stmt_list
);
236 stmt_list
= build_empty_stmt (start_locus
);
239 if (cond
&& integer_zerop (cond
))
242 loop
= fold_build3_loc (start_locus
, COND_EXPR
,
243 void_type_node
, cond
, stmt_list
,
244 build_empty_stmt (start_locus
));
250 location_t loc
= start_locus
;
251 if (!cond
|| integer_nonzerop (cond
))
252 loc
= EXPR_LOCATION (expr_first (body
));
253 if (loc
== UNKNOWN_LOCATION
)
255 loop
= build1_loc (loc
, LOOP_EXPR
, void_type_node
, stmt_list
);
259 append_to_statement_list (loop
, &stmt_list
);
260 finish_bc_block (&stmt_list
, bc_break
, blab
);
262 stmt_list
= build_empty_stmt (start_locus
);
267 /* Genericize a FOR_STMT node *STMT_P. */
270 genericize_for_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
275 tree init
= FOR_INIT_STMT (stmt
);
279 cp_walk_tree (&init
, cp_genericize_r
, data
, NULL
);
280 append_to_statement_list (init
, &expr
);
283 genericize_cp_loop (&loop
, EXPR_LOCATION (stmt
), FOR_COND (stmt
),
284 FOR_BODY (stmt
), FOR_EXPR (stmt
), 1, walk_subtrees
, data
);
285 append_to_statement_list (loop
, &expr
);
286 if (expr
== NULL_TREE
)
291 /* Genericize a WHILE_STMT node *STMT_P. */
294 genericize_while_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
297 genericize_cp_loop (stmt_p
, EXPR_LOCATION (stmt
), WHILE_COND (stmt
),
298 WHILE_BODY (stmt
), NULL_TREE
, 1, walk_subtrees
, data
);
301 /* Genericize a DO_STMT node *STMT_P. */
304 genericize_do_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
307 genericize_cp_loop (stmt_p
, EXPR_LOCATION (stmt
), DO_COND (stmt
),
308 DO_BODY (stmt
), NULL_TREE
, 0, walk_subtrees
, data
);
311 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
314 genericize_switch_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
317 tree break_block
, body
, cond
, type
;
318 location_t stmt_locus
= EXPR_LOCATION (stmt
);
320 break_block
= begin_bc_block (bc_break
, stmt_locus
);
322 body
= SWITCH_STMT_BODY (stmt
);
324 body
= build_empty_stmt (stmt_locus
);
325 cond
= SWITCH_STMT_COND (stmt
);
326 type
= SWITCH_STMT_TYPE (stmt
);
328 cp_walk_tree (&body
, cp_genericize_r
, data
, NULL
);
329 cp_walk_tree (&cond
, cp_genericize_r
, data
, NULL
);
330 cp_walk_tree (&type
, cp_genericize_r
, data
, NULL
);
333 if (TREE_USED (break_block
))
334 SWITCH_BREAK_LABEL_P (break_block
) = 1;
335 finish_bc_block (&body
, bc_break
, break_block
);
336 *stmt_p
= build2_loc (stmt_locus
, SWITCH_EXPR
, type
, cond
, body
);
337 SWITCH_ALL_CASES_P (*stmt_p
) = SWITCH_STMT_ALL_CASES_P (stmt
);
338 gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt
)
339 || !TREE_USED (break_block
));
342 /* Genericize a CONTINUE_STMT node *STMT_P. */
345 genericize_continue_stmt (tree
*stmt_p
)
347 tree stmt_list
= NULL
;
348 tree pred
= build_predict_expr (PRED_CONTINUE
, NOT_TAKEN
);
349 tree label
= get_bc_label (bc_continue
);
350 location_t location
= EXPR_LOCATION (*stmt_p
);
351 tree jump
= build1_loc (location
, GOTO_EXPR
, void_type_node
, label
);
352 append_to_statement_list_force (pred
, &stmt_list
);
353 append_to_statement_list (jump
, &stmt_list
);
357 /* Genericize a BREAK_STMT node *STMT_P. */
360 genericize_break_stmt (tree
*stmt_p
)
362 tree label
= get_bc_label (bc_break
);
363 location_t location
= EXPR_LOCATION (*stmt_p
);
364 *stmt_p
= build1_loc (location
, GOTO_EXPR
, void_type_node
, label
);
367 /* Genericize a OMP_FOR node *STMT_P. */
370 genericize_omp_for_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
373 location_t locus
= EXPR_LOCATION (stmt
);
374 tree clab
= begin_bc_block (bc_continue
, locus
);
376 cp_walk_tree (&OMP_FOR_BODY (stmt
), cp_genericize_r
, data
, NULL
);
377 if (TREE_CODE (stmt
) != OMP_TASKLOOP
)
378 cp_walk_tree (&OMP_FOR_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
379 cp_walk_tree (&OMP_FOR_INIT (stmt
), cp_genericize_r
, data
, NULL
);
380 cp_walk_tree (&OMP_FOR_COND (stmt
), cp_genericize_r
, data
, NULL
);
381 cp_walk_tree (&OMP_FOR_INCR (stmt
), cp_genericize_r
, data
, NULL
);
382 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt
), cp_genericize_r
, data
, NULL
);
385 finish_bc_block (&OMP_FOR_BODY (stmt
), bc_continue
, clab
);
388 /* Hook into the middle of gimplifying an OMP_FOR node. */
390 static enum gimplify_status
391 cp_gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
393 tree for_stmt
= *expr_p
;
394 gimple_seq seq
= NULL
;
396 /* Protect ourselves from recursion. */
397 if (OMP_FOR_GIMPLIFYING_P (for_stmt
))
399 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 1;
401 gimplify_and_add (for_stmt
, &seq
);
402 gimple_seq_add_seq (pre_p
, seq
);
404 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 0;
409 /* Gimplify an EXPR_STMT node. */
412 gimplify_expr_stmt (tree
*stmt_p
)
414 tree stmt
= EXPR_STMT_EXPR (*stmt_p
);
416 if (stmt
== error_mark_node
)
419 /* Gimplification of a statement expression will nullify the
420 statement if all its side effects are moved to *PRE_P and *POST_P.
422 In this case we will not want to emit the gimplified statement.
423 However, we may still want to emit a warning, so we do that before
425 if (stmt
&& warn_unused_value
)
427 if (!TREE_SIDE_EFFECTS (stmt
))
429 if (!IS_EMPTY_STMT (stmt
)
430 && !VOID_TYPE_P (TREE_TYPE (stmt
))
431 && !TREE_NO_WARNING (stmt
))
432 warning (OPT_Wunused_value
, "statement with no effect");
435 warn_if_unused_value (stmt
, input_location
);
438 if (stmt
== NULL_TREE
)
439 stmt
= alloc_stmt_list ();
444 /* Gimplify initialization from an AGGR_INIT_EXPR. */
447 cp_gimplify_init_expr (tree
*expr_p
)
449 tree from
= TREE_OPERAND (*expr_p
, 1);
450 tree to
= TREE_OPERAND (*expr_p
, 0);
453 /* What about code that pulls out the temp and uses it elsewhere? I
454 think that such code never uses the TARGET_EXPR as an initializer. If
455 I'm wrong, we'll abort because the temp won't have any RTL. In that
456 case, I guess we'll need to replace references somehow. */
457 if (TREE_CODE (from
) == TARGET_EXPR
)
458 from
= TARGET_EXPR_INITIAL (from
);
460 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
461 inside the TARGET_EXPR. */
464 tree sub
= TREE_CODE (t
) == COMPOUND_EXPR
? TREE_OPERAND (t
, 0) : t
;
466 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
467 replace the slot operand with our target.
469 Should we add a target parm to gimplify_expr instead? No, as in this
470 case we want to replace the INIT_EXPR. */
471 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
472 || TREE_CODE (sub
) == VEC_INIT_EXPR
)
474 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
)
475 AGGR_INIT_EXPR_SLOT (sub
) = to
;
477 VEC_INIT_EXPR_SLOT (sub
) = to
;
480 /* The initialization is now a side-effect, so the container can
483 TREE_TYPE (from
) = void_type_node
;
486 /* Handle aggregate NSDMI. */
487 replace_placeholders (sub
, to
);
492 t
= TREE_OPERAND (t
, 1);
497 /* Gimplify a MUST_NOT_THROW_EXPR. */
499 static enum gimplify_status
500 gimplify_must_not_throw_expr (tree
*expr_p
, gimple_seq
*pre_p
)
503 tree temp
= voidify_wrapper_expr (stmt
, NULL
);
504 tree body
= TREE_OPERAND (stmt
, 0);
505 gimple_seq try_
= NULL
;
506 gimple_seq catch_
= NULL
;
509 gimplify_and_add (body
, &try_
);
510 mnt
= gimple_build_eh_must_not_throw (terminate_fn
);
511 gimple_seq_add_stmt_without_update (&catch_
, mnt
);
512 mnt
= gimple_build_try (try_
, catch_
, GIMPLE_TRY_CATCH
);
514 gimple_seq_add_stmt_without_update (pre_p
, mnt
);
525 /* Return TRUE if an operand (OP) of a given TYPE being copied is
526 really just an empty class copy.
528 Check that the operand has a simple form so that TARGET_EXPRs and
529 non-empty CONSTRUCTORs get reduced properly, and we leave the
530 return slot optimization alone because it isn't a copy. */
533 simple_empty_class_p (tree type
, tree op
)
536 ((TREE_CODE (op
) == COMPOUND_EXPR
537 && simple_empty_class_p (type
, TREE_OPERAND (op
, 1)))
538 || TREE_CODE (op
) == EMPTY_CLASS_EXPR
539 || is_gimple_lvalue (op
)
540 || INDIRECT_REF_P (op
)
541 || (TREE_CODE (op
) == CONSTRUCTOR
542 && CONSTRUCTOR_NELTS (op
) == 0
543 && !TREE_CLOBBER_P (op
))
544 || (TREE_CODE (op
) == CALL_EXPR
545 && !CALL_EXPR_RETURN_SLOT_OPT (op
)))
546 && is_really_empty_class (type
);
549 /* Returns true if evaluating E as an lvalue has side-effects;
550 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
551 have side-effects until there is a read or write through it. */
554 lvalue_has_side_effects (tree e
)
556 if (!TREE_SIDE_EFFECTS (e
))
558 while (handled_component_p (e
))
560 if (TREE_CODE (e
) == ARRAY_REF
561 && TREE_SIDE_EFFECTS (TREE_OPERAND (e
, 1)))
563 e
= TREE_OPERAND (e
, 0);
566 /* Just naming a variable has no side-effects. */
568 else if (INDIRECT_REF_P (e
))
569 /* Similarly, indirection has no side-effects. */
570 return TREE_SIDE_EFFECTS (TREE_OPERAND (e
, 0));
572 /* For anything else, trust TREE_SIDE_EFFECTS. */
573 return TREE_SIDE_EFFECTS (e
);
576 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
579 cp_gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
581 int saved_stmts_are_full_exprs_p
= 0;
582 location_t loc
= EXPR_LOC_OR_LOC (*expr_p
, input_location
);
583 enum tree_code code
= TREE_CODE (*expr_p
);
584 enum gimplify_status ret
;
586 if (STATEMENT_CODE_P (code
))
588 saved_stmts_are_full_exprs_p
= stmts_are_full_exprs_p ();
589 current_stmt_tree ()->stmts_are_full_exprs_p
590 = STMT_IS_FULL_EXPR_P (*expr_p
);
596 simplify_aggr_init_expr (expr_p
);
602 location_t loc
= input_location
;
603 tree init
= VEC_INIT_EXPR_INIT (*expr_p
);
604 int from_array
= (init
&& TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
605 gcc_assert (EXPR_HAS_LOCATION (*expr_p
));
606 input_location
= EXPR_LOCATION (*expr_p
);
607 *expr_p
= build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p
), NULL_TREE
,
608 init
, VEC_INIT_EXPR_VALUE_INIT (*expr_p
),
610 tf_warning_or_error
);
612 cp_walk_tree (expr_p
, cp_fold_r
, &pset
, NULL
);
613 cp_genericize_tree (expr_p
, false);
615 input_location
= loc
;
620 /* FIXME communicate throw type to back end, probably by moving
621 THROW_EXPR into ../tree.def. */
622 *expr_p
= TREE_OPERAND (*expr_p
, 0);
626 case MUST_NOT_THROW_EXPR
:
627 ret
= gimplify_must_not_throw_expr (expr_p
, pre_p
);
630 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
631 LHS of an assignment might also be involved in the RHS, as in bug
634 cp_gimplify_init_expr (expr_p
);
635 if (TREE_CODE (*expr_p
) != INIT_EXPR
)
641 /* If the back end isn't clever enough to know that the lhs and rhs
642 types are the same, add an explicit conversion. */
643 tree op0
= TREE_OPERAND (*expr_p
, 0);
644 tree op1
= TREE_OPERAND (*expr_p
, 1);
646 if (!error_operand_p (op0
)
647 && !error_operand_p (op1
)
648 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0
))
649 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1
)))
650 && !useless_type_conversion_p (TREE_TYPE (op1
), TREE_TYPE (op0
)))
651 TREE_OPERAND (*expr_p
, 1) = build1 (VIEW_CONVERT_EXPR
,
652 TREE_TYPE (op0
), op1
);
654 else if (simple_empty_class_p (TREE_TYPE (op0
), op1
))
656 /* Remove any copies of empty classes. Also drop volatile
657 variables on the RHS to avoid infinite recursion from
658 gimplify_expr trying to load the value. */
659 if (TREE_SIDE_EFFECTS (op1
))
661 if (TREE_THIS_VOLATILE (op1
)
662 && (REFERENCE_CLASS_P (op1
) || DECL_P (op1
)))
663 op1
= build_fold_addr_expr (op1
);
665 gimplify_and_add (op1
, pre_p
);
667 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
668 is_gimple_lvalue
, fb_lvalue
);
669 *expr_p
= TREE_OPERAND (*expr_p
, 0);
671 /* P0145 says that the RHS is sequenced before the LHS.
672 gimplify_modify_expr gimplifies the RHS before the LHS, but that
673 isn't quite strong enough in two cases:
675 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
676 mean it's evaluated after the LHS.
678 2) the value calculation of the RHS is also sequenced before the
679 LHS, so for scalar assignment we need to preevaluate if the
680 RHS could be affected by LHS side-effects even if it has no
681 side-effects of its own. We don't need this for classes because
682 class assignment takes its RHS by reference. */
683 else if (flag_strong_eval_order
> 1
684 && TREE_CODE (*expr_p
) == MODIFY_EXPR
685 && lvalue_has_side_effects (op0
)
686 && (TREE_CODE (op1
) == CALL_EXPR
687 || (SCALAR_TYPE_P (TREE_TYPE (op1
))
688 && !TREE_CONSTANT (op1
))))
689 TREE_OPERAND (*expr_p
, 1) = get_formal_tmp_var (op1
, pre_p
);
694 case EMPTY_CLASS_EXPR
:
695 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
696 *expr_p
= build_constructor (TREE_TYPE (*expr_p
), NULL
);
701 *expr_p
= BASELINK_FUNCTIONS (*expr_p
);
706 genericize_try_block (expr_p
);
711 genericize_catch_block (expr_p
);
716 genericize_eh_spec_block (expr_p
);
735 ret
= cp_gimplify_omp_for (expr_p
, pre_p
);
739 gimplify_expr_stmt (expr_p
);
743 case UNARY_PLUS_EXPR
:
745 tree arg
= TREE_OPERAND (*expr_p
, 0);
746 tree type
= TREE_TYPE (*expr_p
);
747 *expr_p
= (TREE_TYPE (arg
) != type
) ? fold_convert (type
, arg
)
755 if (!CALL_EXPR_FN (*expr_p
))
756 /* Internal function call. */;
757 else if (CALL_EXPR_REVERSE_ARGS (*expr_p
))
759 /* This is a call to a (compound) assignment operator that used
760 the operator syntax; gimplify the RHS first. */
761 gcc_assert (call_expr_nargs (*expr_p
) == 2);
762 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p
));
763 enum gimplify_status t
764 = gimplify_arg (&CALL_EXPR_ARG (*expr_p
, 1), pre_p
, loc
);
768 else if (CALL_EXPR_ORDERED_ARGS (*expr_p
))
770 /* Leave the last argument for gimplify_call_expr, to avoid problems
771 with __builtin_va_arg_pack(). */
772 int nargs
= call_expr_nargs (*expr_p
) - 1;
773 for (int i
= 0; i
< nargs
; ++i
)
775 enum gimplify_status t
776 = gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
, loc
);
781 else if (flag_strong_eval_order
782 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p
))
784 /* If flag_strong_eval_order, evaluate the object argument first. */
785 tree fntype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
786 if (INDIRECT_TYPE_P (fntype
))
787 fntype
= TREE_TYPE (fntype
);
788 if (TREE_CODE (fntype
) == METHOD_TYPE
)
790 enum gimplify_status t
791 = gimplify_arg (&CALL_EXPR_ARG (*expr_p
, 0), pre_p
, loc
);
799 if (TREE_OPERAND (*expr_p
, 0)
800 && (TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == INIT_EXPR
801 || TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == MODIFY_EXPR
))
803 expr_p
= &TREE_OPERAND (*expr_p
, 0);
804 code
= TREE_CODE (*expr_p
);
805 /* Avoid going through the INIT_EXPR case, which can
806 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
807 goto modify_expr_case
;
812 ret
= (enum gimplify_status
) c_gimplify_expr (expr_p
, pre_p
, post_p
);
816 /* Restore saved state. */
817 if (STATEMENT_CODE_P (code
))
818 current_stmt_tree ()->stmts_are_full_exprs_p
819 = saved_stmts_are_full_exprs_p
;
825 is_invisiref_parm (const_tree t
)
827 return ((TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
828 && DECL_BY_REFERENCE (t
));
831 /* Return true if the uid in both int tree maps are equal. */
834 cxx_int_tree_map_hasher::equal (cxx_int_tree_map
*a
, cxx_int_tree_map
*b
)
836 return (a
->uid
== b
->uid
);
839 /* Hash a UID in a cxx_int_tree_map. */
842 cxx_int_tree_map_hasher::hash (cxx_int_tree_map
*item
)
847 /* A stable comparison routine for use with splay trees and DECLs. */
850 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
855 return DECL_UID (a
) - DECL_UID (b
);
858 /* OpenMP context during genericization. */
860 struct cp_genericize_omp_taskreg
864 struct cp_genericize_omp_taskreg
*outer
;
865 splay_tree variables
;
868 /* Return true if genericization should try to determine if
869 DECL is firstprivate or shared within task regions. */
872 omp_var_to_track (tree decl
)
874 tree type
= TREE_TYPE (decl
);
875 if (is_invisiref_parm (decl
))
876 type
= TREE_TYPE (type
);
877 else if (TYPE_REF_P (type
))
878 type
= TREE_TYPE (type
);
879 while (TREE_CODE (type
) == ARRAY_TYPE
)
880 type
= TREE_TYPE (type
);
881 if (type
== error_mark_node
|| !CLASS_TYPE_P (type
))
883 if (VAR_P (decl
) && CP_DECL_THREAD_LOCAL_P (decl
))
885 if (cxx_omp_predetermined_sharing (decl
) != OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
890 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
893 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg
*omp_ctx
, tree decl
)
895 splay_tree_node n
= splay_tree_lookup (omp_ctx
->variables
,
896 (splay_tree_key
) decl
);
899 int flags
= OMP_CLAUSE_DEFAULT_SHARED
;
901 omp_cxx_notice_variable (omp_ctx
->outer
, decl
);
902 if (!omp_ctx
->default_shared
)
904 struct cp_genericize_omp_taskreg
*octx
;
906 for (octx
= omp_ctx
->outer
; octx
; octx
= octx
->outer
)
908 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
909 if (n
&& n
->value
!= OMP_CLAUSE_DEFAULT_SHARED
)
911 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
914 if (octx
->is_parallel
)
918 && (TREE_CODE (decl
) == PARM_DECL
919 || (!(TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
920 && DECL_CONTEXT (decl
) == current_function_decl
)))
921 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
922 if (flags
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
924 /* DECL is implicitly determined firstprivate in
925 the current task construct. Ensure copy ctor and
926 dtor are instantiated, because during gimplification
927 it will be already too late. */
928 tree type
= TREE_TYPE (decl
);
929 if (is_invisiref_parm (decl
))
930 type
= TREE_TYPE (type
);
931 else if (TYPE_REF_P (type
))
932 type
= TREE_TYPE (type
);
933 while (TREE_CODE (type
) == ARRAY_TYPE
)
934 type
= TREE_TYPE (type
);
935 get_copy_ctor (type
, tf_none
);
936 get_dtor (type
, tf_none
);
939 splay_tree_insert (omp_ctx
->variables
, (splay_tree_key
) decl
, flags
);
943 /* Genericization context. */
945 struct cp_genericize_data
947 hash_set
<tree
> *p_set
;
948 vec
<tree
> bind_expr_stack
;
949 struct cp_genericize_omp_taskreg
*omp_ctx
;
952 bool handle_invisiref_parm_p
;
955 /* Perform any pre-gimplification folding of C++ front end trees to
957 Note: The folding of none-omp cases is something to move into
958 the middle-end. As for now we have most foldings only on GENERIC
959 in fold-const, we need to perform this before transformation to
963 cp_fold_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
968 *stmt_p
= stmt
= cp_fold (*stmt_p
);
970 if (((hash_set
<tree
> *) data
)->add (stmt
))
972 /* Don't walk subtrees of stmts we've already walked once, otherwise
973 we can have exponential complexity with e.g. lots of nested
974 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
975 always the same tree, which the first time cp_fold_r has been
976 called on it had the subtrees walked. */
981 code
= TREE_CODE (stmt
);
982 if (code
== OMP_FOR
|| code
== OMP_SIMD
|| code
== OMP_DISTRIBUTE
983 || code
== OMP_TASKLOOP
|| code
== OACC_LOOP
)
988 cp_walk_tree (&OMP_FOR_BODY (stmt
), cp_fold_r
, data
, NULL
);
989 cp_walk_tree (&OMP_FOR_CLAUSES (stmt
), cp_fold_r
, data
, NULL
);
990 cp_walk_tree (&OMP_FOR_INIT (stmt
), cp_fold_r
, data
, NULL
);
991 x
= OMP_FOR_COND (stmt
);
992 if (x
&& TREE_CODE_CLASS (TREE_CODE (x
)) == tcc_comparison
)
994 cp_walk_tree (&TREE_OPERAND (x
, 0), cp_fold_r
, data
, NULL
);
995 cp_walk_tree (&TREE_OPERAND (x
, 1), cp_fold_r
, data
, NULL
);
997 else if (x
&& TREE_CODE (x
) == TREE_VEC
)
999 n
= TREE_VEC_LENGTH (x
);
1000 for (i
= 0; i
< n
; i
++)
1002 tree o
= TREE_VEC_ELT (x
, i
);
1003 if (o
&& TREE_CODE_CLASS (TREE_CODE (o
)) == tcc_comparison
)
1004 cp_walk_tree (&TREE_OPERAND (o
, 1), cp_fold_r
, data
, NULL
);
1007 x
= OMP_FOR_INCR (stmt
);
1008 if (x
&& TREE_CODE (x
) == TREE_VEC
)
1010 n
= TREE_VEC_LENGTH (x
);
1011 for (i
= 0; i
< n
; i
++)
1013 tree o
= TREE_VEC_ELT (x
, i
);
1014 if (o
&& TREE_CODE (o
) == MODIFY_EXPR
)
1015 o
= TREE_OPERAND (o
, 1);
1016 if (o
&& (TREE_CODE (o
) == PLUS_EXPR
|| TREE_CODE (o
) == MINUS_EXPR
1017 || TREE_CODE (o
) == POINTER_PLUS_EXPR
))
1019 cp_walk_tree (&TREE_OPERAND (o
, 0), cp_fold_r
, data
, NULL
);
1020 cp_walk_tree (&TREE_OPERAND (o
, 1), cp_fold_r
, data
, NULL
);
1024 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt
), cp_fold_r
, data
, NULL
);
1031 /* Fold ALL the trees! FIXME we should be able to remove this, but
1032 apparently that still causes optimization regressions. */
1035 cp_fold_function (tree fndecl
)
1037 hash_set
<tree
> pset
;
1038 cp_walk_tree (&DECL_SAVED_TREE (fndecl
), cp_fold_r
, &pset
, NULL
);
1041 /* Perform any pre-gimplification lowering of C++ front end trees to
1045 cp_genericize_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
1047 tree stmt
= *stmt_p
;
1048 struct cp_genericize_data
*wtd
= (struct cp_genericize_data
*) data
;
1049 hash_set
<tree
> *p_set
= wtd
->p_set
;
1051 /* If in an OpenMP context, note var uses. */
1052 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0)
1054 || TREE_CODE (stmt
) == PARM_DECL
1055 || TREE_CODE (stmt
) == RESULT_DECL
)
1056 && omp_var_to_track (stmt
))
1057 omp_cxx_notice_variable (wtd
->omp_ctx
, stmt
);
1059 /* Don't dereference parms in a thunk, pass the references through. */
1060 if ((TREE_CODE (stmt
) == CALL_EXPR
&& CALL_FROM_THUNK_P (stmt
))
1061 || (TREE_CODE (stmt
) == AGGR_INIT_EXPR
&& AGGR_INIT_FROM_THUNK_P (stmt
)))
1067 /* Dereference invisible reference parms. */
1068 if (wtd
->handle_invisiref_parm_p
&& is_invisiref_parm (stmt
))
1070 *stmt_p
= convert_from_reference (stmt
);
1071 p_set
->add (*stmt_p
);
1076 /* Map block scope extern declarations to visible declarations with the
1077 same name and type in outer scopes if any. */
1078 if (cp_function_chain
->extern_decl_map
1079 && VAR_OR_FUNCTION_DECL_P (stmt
)
1080 && DECL_EXTERNAL (stmt
))
1082 struct cxx_int_tree_map
*h
, in
;
1083 in
.uid
= DECL_UID (stmt
);
1084 h
= cp_function_chain
->extern_decl_map
->find_with_hash (&in
, in
.uid
);
1093 if (TREE_CODE (stmt
) == INTEGER_CST
1094 && TYPE_REF_P (TREE_TYPE (stmt
))
1095 && (flag_sanitize
& (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1096 && !wtd
->no_sanitize_p
)
1098 ubsan_maybe_instrument_reference (stmt_p
);
1099 if (*stmt_p
!= stmt
)
1106 /* Other than invisiref parms, don't walk the same tree twice. */
1107 if (p_set
->contains (stmt
))
1113 switch (TREE_CODE (stmt
))
1116 if (is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
1118 /* If in an OpenMP context, note var uses. */
1119 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0)
1120 && omp_var_to_track (TREE_OPERAND (stmt
, 0)))
1121 omp_cxx_notice_variable (wtd
->omp_ctx
, TREE_OPERAND (stmt
, 0));
1122 *stmt_p
= fold_convert (TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
1128 if (TREE_OPERAND (stmt
, 0) && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
1129 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1134 switch (OMP_CLAUSE_CODE (stmt
))
1136 case OMP_CLAUSE_LASTPRIVATE
:
1137 /* Don't dereference an invisiref in OpenMP clauses. */
1138 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1141 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt
))
1142 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt
),
1143 cp_genericize_r
, data
, NULL
);
1146 case OMP_CLAUSE_PRIVATE
:
1147 /* Don't dereference an invisiref in OpenMP clauses. */
1148 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1150 else if (wtd
->omp_ctx
!= NULL
)
1152 /* Private clause doesn't cause any references to the
1153 var in outer contexts, avoid calling
1154 omp_cxx_notice_variable for it. */
1155 struct cp_genericize_omp_taskreg
*old
= wtd
->omp_ctx
;
1156 wtd
->omp_ctx
= NULL
;
1157 cp_walk_tree (&OMP_CLAUSE_DECL (stmt
), cp_genericize_r
,
1163 case OMP_CLAUSE_SHARED
:
1164 case OMP_CLAUSE_FIRSTPRIVATE
:
1165 case OMP_CLAUSE_COPYIN
:
1166 case OMP_CLAUSE_COPYPRIVATE
:
1167 /* Don't dereference an invisiref in OpenMP clauses. */
1168 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1171 case OMP_CLAUSE_REDUCTION
:
1172 /* Don't dereference an invisiref in reduction clause's
1173 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1174 still needs to be genericized. */
1175 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1178 if (OMP_CLAUSE_REDUCTION_INIT (stmt
))
1179 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt
),
1180 cp_genericize_r
, data
, NULL
);
1181 if (OMP_CLAUSE_REDUCTION_MERGE (stmt
))
1182 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt
),
1183 cp_genericize_r
, data
, NULL
);
1191 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1192 to lower this construct before scanning it, so we need to lower these
1193 before doing anything else. */
1195 *stmt_p
= build2_loc (EXPR_LOCATION (stmt
),
1196 CLEANUP_EH_ONLY (stmt
) ? TRY_CATCH_EXPR
1199 CLEANUP_BODY (stmt
),
1200 CLEANUP_EXPR (stmt
));
1204 genericize_if_stmt (stmt_p
);
1205 /* *stmt_p has changed, tail recurse to handle it again. */
1206 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
1208 /* COND_EXPR might have incompatible types in branches if one or both
1209 arms are bitfields. Fix it up now. */
1213 = (TREE_OPERAND (stmt
, 1)
1214 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 1))
1217 = (TREE_OPERAND (stmt
, 2)
1218 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 2))
1221 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1222 TREE_TYPE (TREE_OPERAND (stmt
, 1))))
1224 TREE_OPERAND (stmt
, 1)
1225 = fold_convert (type_left
, TREE_OPERAND (stmt
, 1));
1226 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1230 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1231 TREE_TYPE (TREE_OPERAND (stmt
, 2))))
1233 TREE_OPERAND (stmt
, 2)
1234 = fold_convert (type_right
, TREE_OPERAND (stmt
, 2));
1235 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1242 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0))
1245 for (decl
= BIND_EXPR_VARS (stmt
); decl
; decl
= DECL_CHAIN (decl
))
1247 && !DECL_EXTERNAL (decl
)
1248 && omp_var_to_track (decl
))
1251 = splay_tree_lookup (wtd
->omp_ctx
->variables
,
1252 (splay_tree_key
) decl
);
1254 splay_tree_insert (wtd
->omp_ctx
->variables
,
1255 (splay_tree_key
) decl
,
1257 ? OMP_CLAUSE_DEFAULT_SHARED
1258 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1261 if (sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
| SANITIZE_VPTR
))
1263 /* The point here is to not sanitize static initializers. */
1264 bool no_sanitize_p
= wtd
->no_sanitize_p
;
1265 wtd
->no_sanitize_p
= true;
1266 for (tree decl
= BIND_EXPR_VARS (stmt
);
1268 decl
= DECL_CHAIN (decl
))
1270 && TREE_STATIC (decl
)
1271 && DECL_INITIAL (decl
))
1272 cp_walk_tree (&DECL_INITIAL (decl
), cp_genericize_r
, data
, NULL
);
1273 wtd
->no_sanitize_p
= no_sanitize_p
;
1275 wtd
->bind_expr_stack
.safe_push (stmt
);
1276 cp_walk_tree (&BIND_EXPR_BODY (stmt
),
1277 cp_genericize_r
, data
, NULL
);
1278 wtd
->bind_expr_stack
.pop ();
1283 tree block
= NULL_TREE
;
1285 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1286 BLOCK, and append an IMPORTED_DECL to its
1287 BLOCK_VARS chained list. */
1288 if (wtd
->bind_expr_stack
.exists ())
1291 for (i
= wtd
->bind_expr_stack
.length () - 1; i
>= 0; i
--)
1292 if ((block
= BIND_EXPR_BLOCK (wtd
->bind_expr_stack
[i
])))
1297 tree decl
= TREE_OPERAND (stmt
, 0);
1300 if (undeduced_auto_decl (decl
))
1301 /* Omit from the GENERIC, the back-end can't handle it. */;
1304 tree using_directive
= make_node (IMPORTED_DECL
);
1305 TREE_TYPE (using_directive
) = void_type_node
;
1307 IMPORTED_DECL_ASSOCIATED_DECL (using_directive
) = decl
;
1308 DECL_CHAIN (using_directive
) = BLOCK_VARS (block
);
1309 BLOCK_VARS (block
) = using_directive
;
1312 /* The USING_STMT won't appear in GENERIC. */
1313 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1319 if (TREE_CODE (DECL_EXPR_DECL (stmt
)) == USING_DECL
)
1321 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1322 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1327 tree d
= DECL_EXPR_DECL (stmt
);
1329 gcc_assert (CP_DECL_THREAD_LOCAL_P (d
) == DECL_THREAD_LOCAL_P (d
));
1337 struct cp_genericize_omp_taskreg omp_ctx
;
1342 cp_walk_tree (&OMP_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
1343 omp_ctx
.is_parallel
= TREE_CODE (stmt
) == OMP_PARALLEL
;
1344 omp_ctx
.default_shared
= omp_ctx
.is_parallel
;
1345 omp_ctx
.outer
= wtd
->omp_ctx
;
1346 omp_ctx
.variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
1347 wtd
->omp_ctx
= &omp_ctx
;
1348 for (c
= OMP_CLAUSES (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
1349 switch (OMP_CLAUSE_CODE (c
))
1351 case OMP_CLAUSE_SHARED
:
1352 case OMP_CLAUSE_PRIVATE
:
1353 case OMP_CLAUSE_FIRSTPRIVATE
:
1354 case OMP_CLAUSE_LASTPRIVATE
:
1355 decl
= OMP_CLAUSE_DECL (c
);
1356 if (decl
== error_mark_node
|| !omp_var_to_track (decl
))
1358 n
= splay_tree_lookup (omp_ctx
.variables
, (splay_tree_key
) decl
);
1361 splay_tree_insert (omp_ctx
.variables
, (splay_tree_key
) decl
,
1362 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
1363 ? OMP_CLAUSE_DEFAULT_SHARED
1364 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1365 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
&& omp_ctx
.outer
)
1366 omp_cxx_notice_variable (omp_ctx
.outer
, decl
);
1368 case OMP_CLAUSE_DEFAULT
:
1369 if (OMP_CLAUSE_DEFAULT_KIND (c
) == OMP_CLAUSE_DEFAULT_SHARED
)
1370 omp_ctx
.default_shared
= true;
1374 if (TREE_CODE (stmt
) == OMP_TASKLOOP
)
1375 genericize_omp_for_stmt (stmt_p
, walk_subtrees
, data
);
1377 cp_walk_tree (&OMP_BODY (stmt
), cp_genericize_r
, data
, NULL
);
1378 wtd
->omp_ctx
= omp_ctx
.outer
;
1379 splay_tree_delete (omp_ctx
.variables
);
1386 tree try_block
= wtd
->try_block
;
1387 wtd
->try_block
= stmt
;
1388 cp_walk_tree (&TRY_STMTS (stmt
), cp_genericize_r
, data
, NULL
);
1389 wtd
->try_block
= try_block
;
1390 cp_walk_tree (&TRY_HANDLERS (stmt
), cp_genericize_r
, data
, NULL
);
1394 case MUST_NOT_THROW_EXPR
:
1395 /* MUST_NOT_THROW_COND might be something else with TM. */
1396 if (MUST_NOT_THROW_COND (stmt
) == NULL_TREE
)
1399 tree try_block
= wtd
->try_block
;
1400 wtd
->try_block
= stmt
;
1401 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_genericize_r
, data
, NULL
);
1402 wtd
->try_block
= try_block
;
1408 location_t loc
= location_of (stmt
);
1409 if (TREE_NO_WARNING (stmt
))
1411 else if (wtd
->try_block
)
1413 if (TREE_CODE (wtd
->try_block
) == MUST_NOT_THROW_EXPR
1414 && warning_at (loc
, OPT_Wterminate
,
1415 "throw will always call terminate()")
1416 && cxx_dialect
>= cxx11
1417 && DECL_DESTRUCTOR_P (current_function_decl
))
1418 inform (loc
, "in C++11 destructors default to noexcept");
1422 if (warn_cxx11_compat
&& cxx_dialect
< cxx11
1423 && DECL_DESTRUCTOR_P (current_function_decl
)
1424 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl
))
1426 && (get_defaulted_eh_spec (current_function_decl
)
1427 == empty_except_spec
))
1428 warning_at (loc
, OPT_Wc__11_compat
,
1429 "in C++11 this throw will terminate because "
1430 "destructors default to noexcept");
1436 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt
));
1440 genericize_for_stmt (stmt_p
, walk_subtrees
, data
);
1444 genericize_while_stmt (stmt_p
, walk_subtrees
, data
);
1448 genericize_do_stmt (stmt_p
, walk_subtrees
, data
);
1452 genericize_switch_stmt (stmt_p
, walk_subtrees
, data
);
1456 genericize_continue_stmt (stmt_p
);
1460 genericize_break_stmt (stmt_p
);
1465 case OMP_DISTRIBUTE
:
1467 genericize_omp_for_stmt (stmt_p
, walk_subtrees
, data
);
1471 /* By the time we get here we're handing off to the back end, so we don't
1472 need or want to preserve PTRMEM_CST anymore. */
1473 *stmt_p
= cplus_expand_constant (stmt
);
1478 /* For MEM_REF, make sure not to sanitize the second operand even
1479 if it has reference type. It is just an offset with a type
1480 holding other information. There is no other processing we
1481 need to do for INTEGER_CSTs, so just ignore the second argument
1483 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_genericize_r
, data
, NULL
);
1488 if (!wtd
->no_sanitize_p
1489 && sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
)
1490 && TYPE_REF_P (TREE_TYPE (stmt
)))
1491 ubsan_maybe_instrument_reference (stmt_p
);
1495 if (!wtd
->no_sanitize_p
1496 && sanitize_flags_p ((SANITIZE_NULL
1497 | SANITIZE_ALIGNMENT
| SANITIZE_VPTR
)))
1499 tree fn
= CALL_EXPR_FN (stmt
);
1501 && !error_operand_p (fn
)
1502 && INDIRECT_TYPE_P (TREE_TYPE (fn
))
1503 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) == METHOD_TYPE
)
1506 = TREE_CODE (fn
) == ADDR_EXPR
1507 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
1508 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn
, 0));
1509 if (sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1510 ubsan_maybe_instrument_member_call (stmt
, is_ctor
);
1511 if (sanitize_flags_p (SANITIZE_VPTR
) && !is_ctor
)
1512 cp_ubsan_maybe_instrument_member_call (stmt
);
1514 else if (fn
== NULL_TREE
1515 && CALL_EXPR_IFN (stmt
) == IFN_UBSAN_NULL
1516 && TREE_CODE (CALL_EXPR_ARG (stmt
, 0)) == INTEGER_CST
1517 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt
, 0))))
1521 case AGGR_INIT_EXPR
:
1522 /* For calls to a multi-versioned function, overload resolution
1523 returns the function with the highest target priority, that is,
1524 the version that will checked for dispatching first. If this
1525 version is inlinable, a direct call to this version can be made
1526 otherwise the call should go through the dispatcher. */
1528 tree fn
= cp_get_callee_fndecl_nofold (stmt
);
1529 if (fn
&& DECL_FUNCTION_VERSIONED (fn
)
1530 && (current_function_decl
== NULL
1531 || !targetm
.target_option
.can_inline_p (current_function_decl
,
1533 if (tree dis
= get_function_version_dispatcher (fn
))
1535 mark_versions_used (dis
);
1536 dis
= build_address (dis
);
1537 if (TREE_CODE (stmt
) == CALL_EXPR
)
1538 CALL_EXPR_FN (stmt
) = dis
;
1540 AGGR_INIT_EXPR_FN (stmt
) = dis
;
1546 if (TARGET_EXPR_INITIAL (stmt
)
1547 && TREE_CODE (TARGET_EXPR_INITIAL (stmt
)) == CONSTRUCTOR
1548 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt
)))
1549 TARGET_EXPR_NO_ELIDE (stmt
) = 1;
1553 if (IS_TYPE_OR_DECL_P (stmt
))
1558 p_set
->add (*stmt_p
);
1563 /* Lower C++ front end trees to GENERIC in T_P. */
1566 cp_genericize_tree (tree
* t_p
, bool handle_invisiref_parm_p
)
1568 struct cp_genericize_data wtd
;
1570 wtd
.p_set
= new hash_set
<tree
>;
1571 wtd
.bind_expr_stack
.create (0);
1573 wtd
.try_block
= NULL_TREE
;
1574 wtd
.no_sanitize_p
= false;
1575 wtd
.handle_invisiref_parm_p
= handle_invisiref_parm_p
;
1576 cp_walk_tree (t_p
, cp_genericize_r
, &wtd
, NULL
);
1578 wtd
.bind_expr_stack
.release ();
1579 if (sanitize_flags_p (SANITIZE_VPTR
))
1580 cp_ubsan_instrument_member_accesses (t_p
);
1583 /* If a function that should end with a return in non-void
1584 function doesn't obviously end with return, add ubsan
1585 instrumentation code to verify it at runtime. If -fsanitize=return
1586 is not enabled, instrument __builtin_unreachable. */
1589 cp_maybe_instrument_return (tree fndecl
)
1591 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
1592 || DECL_CONSTRUCTOR_P (fndecl
)
1593 || DECL_DESTRUCTOR_P (fndecl
)
1594 || !targetm
.warn_func_return (fndecl
))
1597 if (!sanitize_flags_p (SANITIZE_RETURN
, fndecl
)
1598 /* Don't add __builtin_unreachable () if not optimizing, it will not
1599 improve any optimizations in that case, just break UB code.
1600 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1601 UBSan covers this with ubsan_instrument_return above where sufficient
1602 information is provided, while the __builtin_unreachable () below
1603 if return sanitization is disabled will just result in hard to
1604 understand runtime error without location. */
1606 || sanitize_flags_p (SANITIZE_UNREACHABLE
, fndecl
)))
1609 tree t
= DECL_SAVED_TREE (fndecl
);
1612 switch (TREE_CODE (t
))
1615 t
= BIND_EXPR_BODY (t
);
1617 case TRY_FINALLY_EXPR
:
1618 case CLEANUP_POINT_EXPR
:
1619 t
= TREE_OPERAND (t
, 0);
1621 case STATEMENT_LIST
:
1623 tree_stmt_iterator i
= tsi_last (t
);
1640 tree
*p
= &DECL_SAVED_TREE (fndecl
);
1641 if (TREE_CODE (*p
) == BIND_EXPR
)
1642 p
= &BIND_EXPR_BODY (*p
);
1644 location_t loc
= DECL_SOURCE_LOCATION (fndecl
);
1645 if (sanitize_flags_p (SANITIZE_RETURN
, fndecl
))
1646 t
= ubsan_instrument_return (loc
);
1649 tree fndecl
= builtin_decl_explicit (BUILT_IN_UNREACHABLE
);
1650 t
= build_call_expr_loc (BUILTINS_LOCATION
, fndecl
, 0);
1653 append_to_statement_list (t
, p
);
1657 cp_genericize (tree fndecl
)
1661 /* Fix up the types of parms passed by invisible reference. */
1662 for (t
= DECL_ARGUMENTS (fndecl
); t
; t
= DECL_CHAIN (t
))
1663 if (TREE_ADDRESSABLE (TREE_TYPE (t
)))
1665 /* If a function's arguments are copied to create a thunk,
1666 then DECL_BY_REFERENCE will be set -- but the type of the
1667 argument will be a pointer type, so we will never get
1669 gcc_assert (!DECL_BY_REFERENCE (t
));
1670 gcc_assert (DECL_ARG_TYPE (t
) != TREE_TYPE (t
));
1671 TREE_TYPE (t
) = DECL_ARG_TYPE (t
);
1672 DECL_BY_REFERENCE (t
) = 1;
1673 TREE_ADDRESSABLE (t
) = 0;
1677 /* Do the same for the return value. */
1678 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl
))))
1680 t
= DECL_RESULT (fndecl
);
1681 TREE_TYPE (t
) = build_reference_type (TREE_TYPE (t
));
1682 DECL_BY_REFERENCE (t
) = 1;
1683 TREE_ADDRESSABLE (t
) = 0;
1687 /* Adjust DECL_VALUE_EXPR of the original var. */
1688 tree outer
= outer_curly_brace_block (current_function_decl
);
1692 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1694 && DECL_NAME (t
) == DECL_NAME (var
)
1695 && DECL_HAS_VALUE_EXPR_P (var
)
1696 && DECL_VALUE_EXPR (var
) == t
)
1698 tree val
= convert_from_reference (t
);
1699 SET_DECL_VALUE_EXPR (var
, val
);
1705 /* If we're a clone, the body is already GIMPLE. */
1706 if (DECL_CLONED_FUNCTION_P (fndecl
))
1709 /* Allow cp_genericize calls to be nested. */
1710 tree save_bc_label
[2];
1711 save_bc_label
[bc_break
] = bc_label
[bc_break
];
1712 save_bc_label
[bc_continue
] = bc_label
[bc_continue
];
1713 bc_label
[bc_break
] = NULL_TREE
;
1714 bc_label
[bc_continue
] = NULL_TREE
;
1716 /* We do want to see every occurrence of the parms, so we can't just use
1717 walk_tree's hash functionality. */
1718 cp_genericize_tree (&DECL_SAVED_TREE (fndecl
), true);
1720 cp_maybe_instrument_return (fndecl
);
1722 /* Do everything else. */
1723 c_genericize (fndecl
);
1725 gcc_assert (bc_label
[bc_break
] == NULL
);
1726 gcc_assert (bc_label
[bc_continue
] == NULL
);
1727 bc_label
[bc_break
] = save_bc_label
[bc_break
];
1728 bc_label
[bc_continue
] = save_bc_label
[bc_continue
];
1731 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1732 NULL if there is in fact nothing to do. ARG2 may be null if FN
1733 actually only takes one argument. */
1736 cxx_omp_clause_apply_fn (tree fn
, tree arg1
, tree arg2
)
1738 tree defparm
, parm
, t
;
1746 nargs
= list_length (DECL_ARGUMENTS (fn
));
1747 argarray
= XALLOCAVEC (tree
, nargs
);
1749 defparm
= TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn
)));
1751 defparm
= TREE_CHAIN (defparm
);
1753 bool is_method
= TREE_CODE (TREE_TYPE (fn
)) == METHOD_TYPE
;
1754 if (TREE_CODE (TREE_TYPE (arg1
)) == ARRAY_TYPE
)
1756 tree inner_type
= TREE_TYPE (arg1
);
1757 tree start1
, end1
, p1
;
1758 tree start2
= NULL
, p2
= NULL
;
1759 tree ret
= NULL
, lab
;
1765 inner_type
= TREE_TYPE (inner_type
);
1766 start1
= build4 (ARRAY_REF
, inner_type
, start1
,
1767 size_zero_node
, NULL
, NULL
);
1769 start2
= build4 (ARRAY_REF
, inner_type
, start2
,
1770 size_zero_node
, NULL
, NULL
);
1772 while (TREE_CODE (inner_type
) == ARRAY_TYPE
);
1773 start1
= build_fold_addr_expr_loc (input_location
, start1
);
1775 start2
= build_fold_addr_expr_loc (input_location
, start2
);
1777 end1
= TYPE_SIZE_UNIT (TREE_TYPE (arg1
));
1778 end1
= fold_build_pointer_plus (start1
, end1
);
1780 p1
= create_tmp_var (TREE_TYPE (start1
));
1781 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, start1
);
1782 append_to_statement_list (t
, &ret
);
1786 p2
= create_tmp_var (TREE_TYPE (start2
));
1787 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, start2
);
1788 append_to_statement_list (t
, &ret
);
1791 lab
= create_artificial_label (input_location
);
1792 t
= build1 (LABEL_EXPR
, void_type_node
, lab
);
1793 append_to_statement_list (t
, &ret
);
1798 /* Handle default arguments. */
1799 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1800 parm
= TREE_CHAIN (parm
), i
++)
1801 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1802 TREE_PURPOSE (parm
), fn
,
1803 i
- is_method
, tf_warning_or_error
);
1804 t
= build_call_a (fn
, i
, argarray
);
1805 t
= fold_convert (void_type_node
, t
);
1806 t
= fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1807 append_to_statement_list (t
, &ret
);
1809 t
= fold_build_pointer_plus (p1
, TYPE_SIZE_UNIT (inner_type
));
1810 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, t
);
1811 append_to_statement_list (t
, &ret
);
1815 t
= fold_build_pointer_plus (p2
, TYPE_SIZE_UNIT (inner_type
));
1816 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, t
);
1817 append_to_statement_list (t
, &ret
);
1820 t
= build2 (NE_EXPR
, boolean_type_node
, p1
, end1
);
1821 t
= build3 (COND_EXPR
, void_type_node
, t
, build_and_jump (&lab
), NULL
);
1822 append_to_statement_list (t
, &ret
);
1828 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg1
);
1830 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg2
);
1831 /* Handle default arguments. */
1832 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1833 parm
= TREE_CHAIN (parm
), i
++)
1834 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1835 TREE_PURPOSE (parm
), fn
,
1836 i
- is_method
, tf_warning_or_error
);
1837 t
= build_call_a (fn
, i
, argarray
);
1838 t
= fold_convert (void_type_node
, t
);
1839 return fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1843 /* Return code to initialize DECL with its default constructor, or
1844 NULL if there's nothing to do. */
1847 cxx_omp_clause_default_ctor (tree clause
, tree decl
, tree
/*outer*/)
1849 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1853 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), decl
, NULL
);
1858 /* Return code to initialize DST with a copy constructor from SRC. */
1861 cxx_omp_clause_copy_ctor (tree clause
, tree dst
, tree src
)
1863 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1867 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), dst
, src
);
1869 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1874 /* Similarly, except use an assignment operator instead. */
1877 cxx_omp_clause_assign_op (tree clause
, tree dst
, tree src
)
1879 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1883 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 2), dst
, src
);
1885 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1890 /* Return code to destroy DECL. */
1893 cxx_omp_clause_dtor (tree clause
, tree decl
)
1895 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1899 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 1), decl
, NULL
);
1904 /* True if OpenMP should privatize what this DECL points to rather
1905 than the DECL itself. */
1908 cxx_omp_privatize_by_reference (const_tree decl
)
1910 return (TYPE_REF_P (TREE_TYPE (decl
))
1911 || is_invisiref_parm (decl
));
1914 /* Return true if DECL is const qualified var having no mutable member. */
1916 cxx_omp_const_qual_no_mutable (tree decl
)
1918 tree type
= TREE_TYPE (decl
);
1919 if (TYPE_REF_P (type
))
1921 if (!is_invisiref_parm (decl
))
1923 type
= TREE_TYPE (type
);
1925 if (TREE_CODE (decl
) == RESULT_DECL
&& DECL_NAME (decl
))
1927 /* NVR doesn't preserve const qualification of the
1929 tree outer
= outer_curly_brace_block (current_function_decl
);
1933 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1935 && DECL_NAME (decl
) == DECL_NAME (var
)
1936 && (TYPE_MAIN_VARIANT (type
)
1937 == TYPE_MAIN_VARIANT (TREE_TYPE (var
))))
1939 if (TYPE_READONLY (TREE_TYPE (var
)))
1940 type
= TREE_TYPE (var
);
1946 if (type
== error_mark_node
)
1949 /* Variables with const-qualified type having no mutable member
1950 are predetermined shared. */
1951 if (TYPE_READONLY (type
) && !cp_has_mutable_p (type
))
1957 /* True if OpenMP sharing attribute of DECL is predetermined. */
1959 enum omp_clause_default_kind
1960 cxx_omp_predetermined_sharing_1 (tree decl
)
1962 /* Static data members are predetermined shared. */
1963 if (TREE_STATIC (decl
))
1965 tree ctx
= CP_DECL_CONTEXT (decl
);
1966 if (TYPE_P (ctx
) && MAYBE_CLASS_TYPE_P (ctx
))
1967 return OMP_CLAUSE_DEFAULT_SHARED
;
1970 /* Const qualified vars having no mutable member are predetermined
1972 if (cxx_omp_const_qual_no_mutable (decl
))
1973 return OMP_CLAUSE_DEFAULT_SHARED
;
1975 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
1978 /* Likewise, but also include the artificial vars. We don't want to
1979 disallow the artificial vars being mentioned in explicit clauses,
1980 as we use artificial vars e.g. for loop constructs with random
1981 access iterators other than pointers, but during gimplification
1982 we want to treat them as predetermined. */
1984 enum omp_clause_default_kind
1985 cxx_omp_predetermined_sharing (tree decl
)
1987 enum omp_clause_default_kind ret
= cxx_omp_predetermined_sharing_1 (decl
);
1988 if (ret
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
1991 /* Predetermine artificial variables holding integral values, those
1992 are usually result of gimplify_one_sizepos or SAVE_EXPR
1995 && DECL_ARTIFICIAL (decl
)
1996 && INTEGRAL_TYPE_P (TREE_TYPE (decl
))
1997 && !(DECL_LANG_SPECIFIC (decl
)
1998 && DECL_OMP_PRIVATIZED_MEMBER (decl
)))
1999 return OMP_CLAUSE_DEFAULT_SHARED
;
2001 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
2004 /* Finalize an implicitly determined clause. */
2007 cxx_omp_finish_clause (tree c
, gimple_seq
*)
2009 tree decl
, inner_type
;
2010 bool make_shared
= false;
2012 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
2015 decl
= OMP_CLAUSE_DECL (c
);
2016 decl
= require_complete_type (decl
);
2017 inner_type
= TREE_TYPE (decl
);
2018 if (decl
== error_mark_node
)
2020 else if (TYPE_REF_P (TREE_TYPE (decl
)))
2021 inner_type
= TREE_TYPE (inner_type
);
2023 /* We're interested in the base element, not arrays. */
2024 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
2025 inner_type
= TREE_TYPE (inner_type
);
2027 /* Check for special function availability by building a call to one.
2028 Save the results, because later we won't be in the right context
2029 for making these queries. */
2031 && CLASS_TYPE_P (inner_type
)
2032 && cxx_omp_create_clause_info (c
, inner_type
, false, true, false, true))
2037 OMP_CLAUSE_CODE (c
) = OMP_CLAUSE_SHARED
;
2038 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
) = 0;
2039 OMP_CLAUSE_SHARED_READONLY (c
) = 0;
2043 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2044 disregarded in OpenMP construct, because it is going to be
2045 remapped during OpenMP lowering. SHARED is true if DECL
2046 is going to be shared, false if it is going to be privatized. */
2049 cxx_omp_disregard_value_expr (tree decl
, bool shared
)
2053 && DECL_HAS_VALUE_EXPR_P (decl
)
2054 && DECL_ARTIFICIAL (decl
)
2055 && DECL_LANG_SPECIFIC (decl
)
2056 && DECL_OMP_PRIVATIZED_MEMBER (decl
);
2059 /* Fold expression X which is used as an rvalue if RVAL is true. */
2062 cp_fold_maybe_rvalue (tree x
, bool rval
)
2067 if (rval
&& DECL_P (x
)
2068 && !TYPE_REF_P (TREE_TYPE (x
)))
2070 tree v
= decl_constant_value (x
);
2071 if (v
!= x
&& v
!= error_mark_node
)
2082 /* Fold expression X which is used as an rvalue. */
2085 cp_fold_rvalue (tree x
)
2087 return cp_fold_maybe_rvalue (x
, true);
2090 /* Perform folding on expression X. */
2093 cp_fully_fold (tree x
)
2095 if (processing_template_decl
)
2097 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2098 have to call both. */
2099 if (cxx_dialect
>= cxx11
)
2101 x
= maybe_constant_value (x
);
2102 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2103 a TARGET_EXPR; undo that here. */
2104 if (TREE_CODE (x
) == TARGET_EXPR
)
2105 x
= TARGET_EXPR_INITIAL (x
);
2106 else if (TREE_CODE (x
) == VIEW_CONVERT_EXPR
2107 && TREE_CODE (TREE_OPERAND (x
, 0)) == CONSTRUCTOR
2108 && TREE_TYPE (TREE_OPERAND (x
, 0)) == TREE_TYPE (x
))
2109 x
= TREE_OPERAND (x
, 0);
2111 return cp_fold_rvalue (x
);
2114 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2115 and certain changes are made to the folding done. Or should be (FIXME). We
2116 never touch maybe_const, as it is only used for the C front-end
2117 C_MAYBE_CONST_EXPR. */
2120 c_fully_fold (tree x
, bool /*in_init*/, bool */
*maybe_const*/
, bool lval
)
2122 return cp_fold_maybe_rvalue (x
, !lval
);
2125 static GTY((deletable
)) hash_map
<tree
, tree
> *fold_cache
;
2127 /* Dispose of the whole FOLD_CACHE. */
2130 clear_fold_cache (void)
2132 if (fold_cache
!= NULL
)
2133 fold_cache
->empty ();
2136 /* This function tries to fold an expression X.
2137 To avoid combinatorial explosion, folding results are kept in fold_cache.
2138 If X is invalid, we don't fold at all.
2139 For performance reasons we don't cache expressions representing a
2140 declaration or constant.
2141 Function returns X or its folded variant. */
2146 tree op0
, op1
, op2
, op3
;
2147 tree org_x
= x
, r
= NULL_TREE
;
2148 enum tree_code code
;
2150 bool rval_ops
= true;
2152 if (!x
|| x
== error_mark_node
)
2155 if (EXPR_P (x
) && (!TREE_TYPE (x
) || TREE_TYPE (x
) == error_mark_node
))
2158 /* Don't bother to cache DECLs or constants. */
2159 if (DECL_P (x
) || CONSTANT_CLASS_P (x
))
2162 if (fold_cache
== NULL
)
2163 fold_cache
= hash_map
<tree
, tree
>::create_ggc (101);
2165 if (tree
*cached
= fold_cache
->get (x
))
2168 code
= TREE_CODE (x
);
2171 case CLEANUP_POINT_EXPR
:
2172 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2174 r
= cp_fold_rvalue (TREE_OPERAND (x
, 0));
2175 if (!TREE_SIDE_EFFECTS (r
))
2180 x
= fold_sizeof_expr (x
);
2183 case VIEW_CONVERT_EXPR
:
2188 case NON_LVALUE_EXPR
:
2190 if (VOID_TYPE_P (TREE_TYPE (x
)))
2192 /* This is just to make sure we don't end up with casts to
2193 void from error_mark_node. If we just return x, then
2194 cp_fold_r might fold the operand into error_mark_node and
2195 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2196 during gimplification doesn't like such casts.
2197 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2198 folding of the operand should be in the caches and if in cp_fold_r
2199 it will modify it in place. */
2200 op0
= cp_fold (TREE_OPERAND (x
, 0));
2201 if (op0
== error_mark_node
)
2202 x
= error_mark_node
;
2206 loc
= EXPR_LOCATION (x
);
2207 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
2209 if (code
== CONVERT_EXPR
2210 && SCALAR_TYPE_P (TREE_TYPE (x
))
2211 && op0
!= void_node
)
2212 /* During parsing we used convert_to_*_nofold; re-convert now using the
2213 folding variants, since fold() doesn't do those transformations. */
2214 x
= fold (convert (TREE_TYPE (x
), op0
));
2215 else if (op0
!= TREE_OPERAND (x
, 0))
2217 if (op0
== error_mark_node
)
2218 x
= error_mark_node
;
2220 x
= fold_build1_loc (loc
, code
, TREE_TYPE (x
), op0
);
2225 /* Conversion of an out-of-range value has implementation-defined
2226 behavior; the language considers it different from arithmetic
2227 overflow, which is undefined. */
2228 if (TREE_CODE (op0
) == INTEGER_CST
2229 && TREE_OVERFLOW_P (x
) && !TREE_OVERFLOW_P (op0
))
2230 TREE_OVERFLOW (x
) = false;
2235 /* We don't need the decltype(auto) obfuscation anymore. */
2236 if (REF_PARENTHESIZED_P (x
))
2238 tree p
= maybe_undo_parenthesized_ref (x
);
2244 loc
= EXPR_LOCATION (x
);
2245 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), false);
2247 /* Cope with user tricks that amount to offsetof. */
2248 if (op0
!= error_mark_node
2249 && TREE_CODE (TREE_TYPE (op0
)) != FUNCTION_TYPE
2250 && TREE_CODE (TREE_TYPE (op0
)) != METHOD_TYPE
)
2252 tree val
= get_base_address (op0
);
2254 && INDIRECT_REF_P (val
)
2255 && COMPLETE_TYPE_P (TREE_TYPE (val
))
2256 && TREE_CONSTANT (TREE_OPERAND (val
, 0)))
2258 val
= TREE_OPERAND (val
, 0);
2260 if (TREE_CODE (val
) == INTEGER_CST
)
2261 return fold_offsetof (op0
, TREE_TYPE (x
));
2271 case FIX_TRUNC_EXPR
:
2276 case TRUTH_NOT_EXPR
:
2277 case FIXED_CONVERT_EXPR
:
2280 loc
= EXPR_LOCATION (x
);
2281 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
2284 if (op0
!= TREE_OPERAND (x
, 0))
2286 if (op0
== error_mark_node
)
2287 x
= error_mark_node
;
2290 x
= fold_build1_loc (loc
, code
, TREE_TYPE (x
), op0
);
2291 if (code
== INDIRECT_REF
2292 && (INDIRECT_REF_P (x
) || TREE_CODE (x
) == MEM_REF
))
2294 TREE_READONLY (x
) = TREE_READONLY (org_x
);
2295 TREE_SIDE_EFFECTS (x
) = TREE_SIDE_EFFECTS (org_x
);
2296 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
2303 gcc_assert (TREE_CODE (x
) != COND_EXPR
2304 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x
, 0))));
2307 case UNARY_PLUS_EXPR
:
2308 op0
= cp_fold_rvalue (TREE_OPERAND (x
, 0));
2309 if (op0
== error_mark_node
)
2310 x
= error_mark_node
;
2312 x
= fold_convert (TREE_TYPE (x
), op0
);
2315 case POSTDECREMENT_EXPR
:
2316 case POSTINCREMENT_EXPR
:
2318 case PREDECREMENT_EXPR
:
2319 case PREINCREMENT_EXPR
:
2324 case POINTER_PLUS_EXPR
:
2326 case POINTER_DIFF_EXPR
:
2329 case TRUNC_DIV_EXPR
:
2331 case FLOOR_DIV_EXPR
:
2332 case ROUND_DIV_EXPR
:
2333 case TRUNC_MOD_EXPR
:
2335 case ROUND_MOD_EXPR
:
2337 case EXACT_DIV_EXPR
:
2347 case TRUTH_AND_EXPR
:
2348 case TRUTH_ANDIF_EXPR
:
2350 case TRUTH_ORIF_EXPR
:
2351 case TRUTH_XOR_EXPR
:
2352 case LT_EXPR
: case LE_EXPR
:
2353 case GT_EXPR
: case GE_EXPR
:
2354 case EQ_EXPR
: case NE_EXPR
:
2355 case UNORDERED_EXPR
: case ORDERED_EXPR
:
2356 case UNLT_EXPR
: case UNLE_EXPR
:
2357 case UNGT_EXPR
: case UNGE_EXPR
:
2358 case UNEQ_EXPR
: case LTGT_EXPR
:
2359 case RANGE_EXPR
: case COMPLEX_EXPR
:
2361 loc
= EXPR_LOCATION (x
);
2362 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
2363 op1
= cp_fold_rvalue (TREE_OPERAND (x
, 1));
2365 if (op0
!= TREE_OPERAND (x
, 0) || op1
!= TREE_OPERAND (x
, 1))
2367 if (op0
== error_mark_node
|| op1
== error_mark_node
)
2368 x
= error_mark_node
;
2370 x
= fold_build2_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
);
2375 if (TREE_NO_WARNING (org_x
)
2376 && warn_nonnull_compare
2377 && COMPARISON_CLASS_P (org_x
))
2379 if (x
== error_mark_node
|| TREE_CODE (x
) == INTEGER_CST
)
2381 else if (COMPARISON_CLASS_P (x
))
2382 TREE_NO_WARNING (x
) = 1;
2383 /* Otherwise give up on optimizing these, let GIMPLE folders
2384 optimize those later on. */
2385 else if (op0
!= TREE_OPERAND (org_x
, 0)
2386 || op1
!= TREE_OPERAND (org_x
, 1))
2388 x
= build2_loc (loc
, code
, TREE_TYPE (org_x
), op0
, op1
);
2389 TREE_NO_WARNING (x
) = 1;
2398 loc
= EXPR_LOCATION (x
);
2399 op0
= cp_fold_rvalue (TREE_OPERAND (x
, 0));
2400 op1
= cp_fold (TREE_OPERAND (x
, 1));
2401 op2
= cp_fold (TREE_OPERAND (x
, 2));
2403 if (TREE_CODE (TREE_TYPE (x
)) == BOOLEAN_TYPE
)
2405 warning_sentinel
s (warn_int_in_bool_context
);
2406 if (!VOID_TYPE_P (TREE_TYPE (op1
)))
2407 op1
= cp_truthvalue_conversion (op1
);
2408 if (!VOID_TYPE_P (TREE_TYPE (op2
)))
2409 op2
= cp_truthvalue_conversion (op2
);
2411 else if (VOID_TYPE_P (TREE_TYPE (x
)))
2413 if (TREE_CODE (op0
) == INTEGER_CST
)
2415 /* If the condition is constant, fold can fold away
2416 the COND_EXPR. If some statement-level uses of COND_EXPR
2417 have one of the branches NULL, avoid folding crash. */
2419 op1
= build_empty_stmt (loc
);
2421 op2
= build_empty_stmt (loc
);
2425 /* Otherwise, don't bother folding a void condition, since
2426 it can't produce a constant value. */
2427 if (op0
!= TREE_OPERAND (x
, 0)
2428 || op1
!= TREE_OPERAND (x
, 1)
2429 || op2
!= TREE_OPERAND (x
, 2))
2430 x
= build3_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
);
2435 if (op0
!= TREE_OPERAND (x
, 0)
2436 || op1
!= TREE_OPERAND (x
, 1)
2437 || op2
!= TREE_OPERAND (x
, 2))
2439 if (op0
== error_mark_node
2440 || op1
== error_mark_node
2441 || op2
== error_mark_node
)
2442 x
= error_mark_node
;
2444 x
= fold_build3_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
);
2449 /* A COND_EXPR might have incompatible types in branches if one or both
2450 arms are bitfields. If folding exposed such a branch, fix it up. */
2451 if (TREE_CODE (x
) != code
2452 && x
!= error_mark_node
2453 && !useless_type_conversion_p (TREE_TYPE (org_x
), TREE_TYPE (x
)))
2454 x
= fold_convert (TREE_TYPE (org_x
), x
);
2460 int i
, m
, sv
= optimize
, nw
= sv
, changed
= 0;
2461 tree callee
= get_callee_fndecl (x
);
2463 /* Some built-in function calls will be evaluated at compile-time in
2464 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2465 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2466 if (callee
&& DECL_BUILT_IN (callee
) && !optimize
2467 && DECL_IS_BUILTIN_CONSTANT_P (callee
)
2468 && current_function_decl
2469 && DECL_DECLARED_CONSTEXPR_P (current_function_decl
))
2474 m
= call_expr_nargs (x
);
2475 for (i
= 0; i
< m
; i
++)
2477 r
= cp_fold (CALL_EXPR_ARG (x
, i
));
2478 if (r
!= CALL_EXPR_ARG (x
, i
))
2480 if (r
== error_mark_node
)
2482 x
= error_mark_node
;
2487 CALL_EXPR_ARG (x
, i
) = r
;
2489 if (x
== error_mark_node
)
2496 if (TREE_CODE (r
) != CALL_EXPR
)
2504 /* Invoke maybe_constant_value for functions declared
2505 constexpr and not called with AGGR_INIT_EXPRs.
2507 Do constexpr expansion of expressions where the call itself is not
2508 constant, but the call followed by an INDIRECT_REF is. */
2509 if (callee
&& DECL_DECLARED_CONSTEXPR_P (callee
)
2511 r
= maybe_constant_value (x
);
2514 if (TREE_CODE (r
) != CALL_EXPR
)
2516 if (DECL_CONSTRUCTOR_P (callee
))
2518 loc
= EXPR_LOCATION (x
);
2519 tree s
= build_fold_indirect_ref_loc (loc
,
2520 CALL_EXPR_ARG (x
, 0));
2521 r
= build2_loc (loc
, INIT_EXPR
, TREE_TYPE (s
), s
, r
);
2536 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (x
);
2537 vec
<constructor_elt
, va_gc
> *nelts
= NULL
;
2538 FOR_EACH_VEC_SAFE_ELT (elts
, i
, p
)
2540 tree op
= cp_fold (p
->value
);
2543 if (op
== error_mark_node
)
2545 x
= error_mark_node
;
2550 nelts
= elts
->copy ();
2551 (*nelts
)[i
].value
= op
;
2556 x
= build_constructor (TREE_TYPE (x
), nelts
);
2557 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x
)
2558 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x
);
2560 if (VECTOR_TYPE_P (TREE_TYPE (x
)))
2566 bool changed
= false;
2567 vec
<tree
, va_gc
> *vec
= make_tree_vector ();
2568 int i
, n
= TREE_VEC_LENGTH (x
);
2569 vec_safe_reserve (vec
, n
);
2571 for (i
= 0; i
< n
; i
++)
2573 tree op
= cp_fold (TREE_VEC_ELT (x
, i
));
2574 vec
->quick_push (op
);
2575 if (op
!= TREE_VEC_ELT (x
, i
))
2582 for (i
= 0; i
< n
; i
++)
2583 TREE_VEC_ELT (r
, i
) = (*vec
)[i
];
2587 release_tree_vector (vec
);
2593 case ARRAY_RANGE_REF
:
2595 loc
= EXPR_LOCATION (x
);
2596 op0
= cp_fold (TREE_OPERAND (x
, 0));
2597 op1
= cp_fold (TREE_OPERAND (x
, 1));
2598 op2
= cp_fold (TREE_OPERAND (x
, 2));
2599 op3
= cp_fold (TREE_OPERAND (x
, 3));
2601 if (op0
!= TREE_OPERAND (x
, 0)
2602 || op1
!= TREE_OPERAND (x
, 1)
2603 || op2
!= TREE_OPERAND (x
, 2)
2604 || op3
!= TREE_OPERAND (x
, 3))
2606 if (op0
== error_mark_node
2607 || op1
== error_mark_node
2608 || op2
== error_mark_node
2609 || op3
== error_mark_node
)
2610 x
= error_mark_node
;
2613 x
= build4_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
, op3
);
2614 TREE_READONLY (x
) = TREE_READONLY (org_x
);
2615 TREE_SIDE_EFFECTS (x
) = TREE_SIDE_EFFECTS (org_x
);
2616 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
2624 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2625 folding, evaluates to an invariant. In that case no need to wrap
2626 this folded tree with a SAVE_EXPR. */
2627 r
= cp_fold (TREE_OPERAND (x
, 0));
2628 if (tree_invariant_p (r
))
2636 fold_cache
->put (org_x
, x
);
2637 /* Prevent that we try to fold an already folded result again. */
2639 fold_cache
->put (x
, x
);
2644 #include "gt-cp-cp-gimplify.h"