1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2018 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "basic-block.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
38 /* Forward declarations. */
40 static tree
cp_genericize_r (tree
*, int *, void *);
41 static tree
cp_fold_r (tree
*, int *, void *);
42 static void cp_genericize_tree (tree
*, bool);
43 static tree
cp_fold (tree
);
45 /* Local declarations. */
47 enum bc_t
{ bc_break
= 0, bc_continue
= 1 };
49 /* Stack of labels which are targets for "break" or "continue",
50 linked through TREE_CHAIN. */
51 static tree bc_label
[2];
53 /* Begin a scope which can be exited by a break or continue statement. BC
56 Just creates a label with location LOCATION and pushes it into the current
60 begin_bc_block (enum bc_t bc
, location_t location
)
62 tree label
= create_artificial_label (location
);
63 DECL_CHAIN (label
) = bc_label
[bc
];
66 LABEL_DECL_BREAK (label
) = true;
68 LABEL_DECL_CONTINUE (label
) = true;
72 /* Finish a scope which can be exited by a break or continue statement.
73 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
74 an expression for the contents of the scope.
76 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
77 BLOCK. Otherwise, just forget the label. */
80 finish_bc_block (tree
*block
, enum bc_t bc
, tree label
)
82 gcc_assert (label
== bc_label
[bc
]);
84 if (TREE_USED (label
))
85 append_to_statement_list (build1 (LABEL_EXPR
, void_type_node
, label
),
88 bc_label
[bc
] = DECL_CHAIN (label
);
89 DECL_CHAIN (label
) = NULL_TREE
;
92 /* Get the LABEL_EXPR to represent a break or continue statement
93 in the current block scope. BC indicates which. */
96 get_bc_label (enum bc_t bc
)
98 tree label
= bc_label
[bc
];
100 /* Mark the label used for finish_bc_block. */
101 TREE_USED (label
) = 1;
105 /* Genericize a TRY_BLOCK. */
108 genericize_try_block (tree
*stmt_p
)
110 tree body
= TRY_STMTS (*stmt_p
);
111 tree cleanup
= TRY_HANDLERS (*stmt_p
);
113 *stmt_p
= build2 (TRY_CATCH_EXPR
, void_type_node
, body
, cleanup
);
116 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
119 genericize_catch_block (tree
*stmt_p
)
121 tree type
= HANDLER_TYPE (*stmt_p
);
122 tree body
= HANDLER_BODY (*stmt_p
);
124 /* FIXME should the caught type go in TREE_TYPE? */
125 *stmt_p
= build2 (CATCH_EXPR
, void_type_node
, type
, body
);
128 /* A terser interface for building a representation of an exception
132 build_gimple_eh_filter_tree (tree body
, tree allowed
, tree failure
)
136 /* FIXME should the allowed types go in TREE_TYPE? */
137 t
= build2 (EH_FILTER_EXPR
, void_type_node
, allowed
, NULL_TREE
);
138 append_to_statement_list (failure
, &EH_FILTER_FAILURE (t
));
140 t
= build2 (TRY_CATCH_EXPR
, void_type_node
, NULL_TREE
, t
);
141 append_to_statement_list (body
, &TREE_OPERAND (t
, 0));
146 /* Genericize an EH_SPEC_BLOCK by converting it to a
147 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
150 genericize_eh_spec_block (tree
*stmt_p
)
152 tree body
= EH_SPEC_STMTS (*stmt_p
);
153 tree allowed
= EH_SPEC_RAISES (*stmt_p
);
154 tree failure
= build_call_n (call_unexpected_fn
, 1, build_exc_ptr ());
156 *stmt_p
= build_gimple_eh_filter_tree (body
, allowed
, failure
);
157 TREE_NO_WARNING (*stmt_p
) = true;
158 TREE_NO_WARNING (TREE_OPERAND (*stmt_p
, 1)) = true;
161 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
164 genericize_if_stmt (tree
*stmt_p
)
166 tree stmt
, cond
, then_
, else_
;
167 location_t locus
= EXPR_LOCATION (*stmt_p
);
170 cond
= IF_COND (stmt
);
171 then_
= THEN_CLAUSE (stmt
);
172 else_
= ELSE_CLAUSE (stmt
);
175 then_
= build_empty_stmt (locus
);
177 else_
= build_empty_stmt (locus
);
179 if (integer_nonzerop (cond
) && !TREE_SIDE_EFFECTS (else_
))
181 else if (integer_zerop (cond
) && !TREE_SIDE_EFFECTS (then_
))
184 stmt
= build3 (COND_EXPR
, void_type_node
, cond
, then_
, else_
);
185 if (!EXPR_HAS_LOCATION (stmt
))
186 protected_set_expr_location (stmt
, locus
);
190 /* Build a generic representation of one of the C loop forms. COND is the
191 loop condition or NULL_TREE. BODY is the (possibly compound) statement
192 controlled by the loop. INCR is the increment expression of a for-loop,
193 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
194 evaluated before the loop body as in while and for loops, or after the
195 loop body as in do-while loops. */
198 genericize_cp_loop (tree
*stmt_p
, location_t start_locus
, tree cond
, tree body
,
199 tree incr
, bool cond_is_first
, int *walk_subtrees
,
204 tree stmt_list
= NULL
;
206 blab
= begin_bc_block (bc_break
, start_locus
);
207 clab
= begin_bc_block (bc_continue
, start_locus
);
209 protected_set_expr_location (incr
, start_locus
);
211 cp_walk_tree (&cond
, cp_genericize_r
, data
, NULL
);
212 cp_walk_tree (&body
, cp_genericize_r
, data
, NULL
);
213 cp_walk_tree (&incr
, cp_genericize_r
, data
, NULL
);
216 if (cond
&& TREE_CODE (cond
) != INTEGER_CST
)
218 /* If COND is constant, don't bother building an exit. If it's false,
219 we won't build a loop. If it's true, any exits are in the body. */
220 location_t cloc
= EXPR_LOC_OR_LOC (cond
, start_locus
);
221 exit
= build1_loc (cloc
, GOTO_EXPR
, void_type_node
,
222 get_bc_label (bc_break
));
223 exit
= fold_build3_loc (cloc
, COND_EXPR
, void_type_node
, cond
,
224 build_empty_stmt (cloc
), exit
);
227 if (exit
&& cond_is_first
)
228 append_to_statement_list (exit
, &stmt_list
);
229 append_to_statement_list (body
, &stmt_list
);
230 finish_bc_block (&stmt_list
, bc_continue
, clab
);
231 append_to_statement_list (incr
, &stmt_list
);
232 if (exit
&& !cond_is_first
)
233 append_to_statement_list (exit
, &stmt_list
);
236 stmt_list
= build_empty_stmt (start_locus
);
239 if (cond
&& integer_zerop (cond
))
242 loop
= fold_build3_loc (start_locus
, COND_EXPR
,
243 void_type_node
, cond
, stmt_list
,
244 build_empty_stmt (start_locus
));
250 location_t loc
= start_locus
;
251 if (!cond
|| integer_nonzerop (cond
))
252 loc
= EXPR_LOCATION (expr_first (body
));
253 if (loc
== UNKNOWN_LOCATION
)
255 loop
= build1_loc (loc
, LOOP_EXPR
, void_type_node
, stmt_list
);
259 append_to_statement_list (loop
, &stmt_list
);
260 finish_bc_block (&stmt_list
, bc_break
, blab
);
262 stmt_list
= build_empty_stmt (start_locus
);
267 /* Genericize a FOR_STMT node *STMT_P. */
270 genericize_for_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
275 tree init
= FOR_INIT_STMT (stmt
);
279 cp_walk_tree (&init
, cp_genericize_r
, data
, NULL
);
280 append_to_statement_list (init
, &expr
);
283 genericize_cp_loop (&loop
, EXPR_LOCATION (stmt
), FOR_COND (stmt
),
284 FOR_BODY (stmt
), FOR_EXPR (stmt
), 1, walk_subtrees
, data
);
285 append_to_statement_list (loop
, &expr
);
286 if (expr
== NULL_TREE
)
291 /* Genericize a WHILE_STMT node *STMT_P. */
294 genericize_while_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
297 genericize_cp_loop (stmt_p
, EXPR_LOCATION (stmt
), WHILE_COND (stmt
),
298 WHILE_BODY (stmt
), NULL_TREE
, 1, walk_subtrees
, data
);
301 /* Genericize a DO_STMT node *STMT_P. */
304 genericize_do_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
307 genericize_cp_loop (stmt_p
, EXPR_LOCATION (stmt
), DO_COND (stmt
),
308 DO_BODY (stmt
), NULL_TREE
, 0, walk_subtrees
, data
);
311 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
314 genericize_switch_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
317 tree break_block
, body
, cond
, type
;
318 location_t stmt_locus
= EXPR_LOCATION (stmt
);
320 break_block
= begin_bc_block (bc_break
, stmt_locus
);
322 body
= SWITCH_STMT_BODY (stmt
);
324 body
= build_empty_stmt (stmt_locus
);
325 cond
= SWITCH_STMT_COND (stmt
);
326 type
= SWITCH_STMT_TYPE (stmt
);
328 cp_walk_tree (&body
, cp_genericize_r
, data
, NULL
);
329 cp_walk_tree (&cond
, cp_genericize_r
, data
, NULL
);
330 cp_walk_tree (&type
, cp_genericize_r
, data
, NULL
);
333 if (TREE_USED (break_block
))
334 SWITCH_BREAK_LABEL_P (break_block
) = 1;
335 finish_bc_block (&body
, bc_break
, break_block
);
336 *stmt_p
= build2_loc (stmt_locus
, SWITCH_EXPR
, type
, cond
, body
);
337 SWITCH_ALL_CASES_P (*stmt_p
) = SWITCH_STMT_ALL_CASES_P (stmt
);
338 gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt
)
339 || !TREE_USED (break_block
));
342 /* Genericize a CONTINUE_STMT node *STMT_P. */
345 genericize_continue_stmt (tree
*stmt_p
)
347 tree stmt_list
= NULL
;
348 tree pred
= build_predict_expr (PRED_CONTINUE
, NOT_TAKEN
);
349 tree label
= get_bc_label (bc_continue
);
350 location_t location
= EXPR_LOCATION (*stmt_p
);
351 tree jump
= build1_loc (location
, GOTO_EXPR
, void_type_node
, label
);
352 append_to_statement_list_force (pred
, &stmt_list
);
353 append_to_statement_list (jump
, &stmt_list
);
357 /* Genericize a BREAK_STMT node *STMT_P. */
360 genericize_break_stmt (tree
*stmt_p
)
362 tree label
= get_bc_label (bc_break
);
363 location_t location
= EXPR_LOCATION (*stmt_p
);
364 *stmt_p
= build1_loc (location
, GOTO_EXPR
, void_type_node
, label
);
367 /* Genericize a OMP_FOR node *STMT_P. */
370 genericize_omp_for_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
373 location_t locus
= EXPR_LOCATION (stmt
);
374 tree clab
= begin_bc_block (bc_continue
, locus
);
376 cp_walk_tree (&OMP_FOR_BODY (stmt
), cp_genericize_r
, data
, NULL
);
377 if (TREE_CODE (stmt
) != OMP_TASKLOOP
)
378 cp_walk_tree (&OMP_FOR_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
379 cp_walk_tree (&OMP_FOR_INIT (stmt
), cp_genericize_r
, data
, NULL
);
380 cp_walk_tree (&OMP_FOR_COND (stmt
), cp_genericize_r
, data
, NULL
);
381 cp_walk_tree (&OMP_FOR_INCR (stmt
), cp_genericize_r
, data
, NULL
);
382 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt
), cp_genericize_r
, data
, NULL
);
385 finish_bc_block (&OMP_FOR_BODY (stmt
), bc_continue
, clab
);
388 /* Hook into the middle of gimplifying an OMP_FOR node. */
390 static enum gimplify_status
391 cp_gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
393 tree for_stmt
= *expr_p
;
394 gimple_seq seq
= NULL
;
396 /* Protect ourselves from recursion. */
397 if (OMP_FOR_GIMPLIFYING_P (for_stmt
))
399 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 1;
401 gimplify_and_add (for_stmt
, &seq
);
402 gimple_seq_add_seq (pre_p
, seq
);
404 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 0;
409 /* Gimplify an EXPR_STMT node. */
412 gimplify_expr_stmt (tree
*stmt_p
)
414 tree stmt
= EXPR_STMT_EXPR (*stmt_p
);
416 if (stmt
== error_mark_node
)
419 /* Gimplification of a statement expression will nullify the
420 statement if all its side effects are moved to *PRE_P and *POST_P.
422 In this case we will not want to emit the gimplified statement.
423 However, we may still want to emit a warning, so we do that before
425 if (stmt
&& warn_unused_value
)
427 if (!TREE_SIDE_EFFECTS (stmt
))
429 if (!IS_EMPTY_STMT (stmt
)
430 && !VOID_TYPE_P (TREE_TYPE (stmt
))
431 && !TREE_NO_WARNING (stmt
))
432 warning (OPT_Wunused_value
, "statement with no effect");
435 warn_if_unused_value (stmt
, input_location
);
438 if (stmt
== NULL_TREE
)
439 stmt
= alloc_stmt_list ();
444 /* Gimplify initialization from an AGGR_INIT_EXPR. */
447 cp_gimplify_init_expr (tree
*expr_p
)
449 tree from
= TREE_OPERAND (*expr_p
, 1);
450 tree to
= TREE_OPERAND (*expr_p
, 0);
453 /* What about code that pulls out the temp and uses it elsewhere? I
454 think that such code never uses the TARGET_EXPR as an initializer. If
455 I'm wrong, we'll abort because the temp won't have any RTL. In that
456 case, I guess we'll need to replace references somehow. */
457 if (TREE_CODE (from
) == TARGET_EXPR
)
458 from
= TARGET_EXPR_INITIAL (from
);
460 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
461 inside the TARGET_EXPR. */
464 tree sub
= TREE_CODE (t
) == COMPOUND_EXPR
? TREE_OPERAND (t
, 0) : t
;
466 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
467 replace the slot operand with our target.
469 Should we add a target parm to gimplify_expr instead? No, as in this
470 case we want to replace the INIT_EXPR. */
471 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
472 || TREE_CODE (sub
) == VEC_INIT_EXPR
)
474 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
)
475 AGGR_INIT_EXPR_SLOT (sub
) = to
;
477 VEC_INIT_EXPR_SLOT (sub
) = to
;
480 /* The initialization is now a side-effect, so the container can
483 TREE_TYPE (from
) = void_type_node
;
486 /* Handle aggregate NSDMI. */
487 replace_placeholders (sub
, to
);
492 t
= TREE_OPERAND (t
, 1);
497 /* Gimplify a MUST_NOT_THROW_EXPR. */
499 static enum gimplify_status
500 gimplify_must_not_throw_expr (tree
*expr_p
, gimple_seq
*pre_p
)
503 tree temp
= voidify_wrapper_expr (stmt
, NULL
);
504 tree body
= TREE_OPERAND (stmt
, 0);
505 gimple_seq try_
= NULL
;
506 gimple_seq catch_
= NULL
;
509 gimplify_and_add (body
, &try_
);
510 mnt
= gimple_build_eh_must_not_throw (terminate_fn
);
511 gimple_seq_add_stmt_without_update (&catch_
, mnt
);
512 mnt
= gimple_build_try (try_
, catch_
, GIMPLE_TRY_CATCH
);
514 gimple_seq_add_stmt_without_update (pre_p
, mnt
);
525 /* Return TRUE if an operand (OP) of a given TYPE being copied is
526 really just an empty class copy.
528 Check that the operand has a simple form so that TARGET_EXPRs and
529 non-empty CONSTRUCTORs get reduced properly, and we leave the
530 return slot optimization alone because it isn't a copy. */
533 simple_empty_class_p (tree type
, tree op
)
536 ((TREE_CODE (op
) == COMPOUND_EXPR
537 && simple_empty_class_p (type
, TREE_OPERAND (op
, 1)))
538 || TREE_CODE (op
) == EMPTY_CLASS_EXPR
539 || is_gimple_lvalue (op
)
540 || INDIRECT_REF_P (op
)
541 || (TREE_CODE (op
) == CONSTRUCTOR
542 && CONSTRUCTOR_NELTS (op
) == 0
543 && !TREE_CLOBBER_P (op
))
544 || (TREE_CODE (op
) == CALL_EXPR
545 && !CALL_EXPR_RETURN_SLOT_OPT (op
)))
546 && is_really_empty_class (type
);
549 /* Returns true if evaluating E as an lvalue has side-effects;
550 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
551 have side-effects until there is a read or write through it. */
554 lvalue_has_side_effects (tree e
)
556 if (!TREE_SIDE_EFFECTS (e
))
558 while (handled_component_p (e
))
560 if (TREE_CODE (e
) == ARRAY_REF
561 && TREE_SIDE_EFFECTS (TREE_OPERAND (e
, 1)))
563 e
= TREE_OPERAND (e
, 0);
566 /* Just naming a variable has no side-effects. */
568 else if (INDIRECT_REF_P (e
))
569 /* Similarly, indirection has no side-effects. */
570 return TREE_SIDE_EFFECTS (TREE_OPERAND (e
, 0));
572 /* For anything else, trust TREE_SIDE_EFFECTS. */
573 return TREE_SIDE_EFFECTS (e
);
576 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
579 cp_gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
581 int saved_stmts_are_full_exprs_p
= 0;
582 location_t loc
= EXPR_LOC_OR_LOC (*expr_p
, input_location
);
583 enum tree_code code
= TREE_CODE (*expr_p
);
584 enum gimplify_status ret
;
586 if (STATEMENT_CODE_P (code
))
588 saved_stmts_are_full_exprs_p
= stmts_are_full_exprs_p ();
589 current_stmt_tree ()->stmts_are_full_exprs_p
590 = STMT_IS_FULL_EXPR_P (*expr_p
);
596 simplify_aggr_init_expr (expr_p
);
602 location_t loc
= input_location
;
603 tree init
= VEC_INIT_EXPR_INIT (*expr_p
);
604 int from_array
= (init
&& TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
605 gcc_assert (EXPR_HAS_LOCATION (*expr_p
));
606 input_location
= EXPR_LOCATION (*expr_p
);
607 *expr_p
= build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p
), NULL_TREE
,
608 init
, VEC_INIT_EXPR_VALUE_INIT (*expr_p
),
610 tf_warning_or_error
);
612 cp_walk_tree (expr_p
, cp_fold_r
, &pset
, NULL
);
613 cp_genericize_tree (expr_p
, false);
615 input_location
= loc
;
620 /* FIXME communicate throw type to back end, probably by moving
621 THROW_EXPR into ../tree.def. */
622 *expr_p
= TREE_OPERAND (*expr_p
, 0);
626 case MUST_NOT_THROW_EXPR
:
627 ret
= gimplify_must_not_throw_expr (expr_p
, pre_p
);
630 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
631 LHS of an assignment might also be involved in the RHS, as in bug
634 cp_gimplify_init_expr (expr_p
);
635 if (TREE_CODE (*expr_p
) != INIT_EXPR
)
641 /* If the back end isn't clever enough to know that the lhs and rhs
642 types are the same, add an explicit conversion. */
643 tree op0
= TREE_OPERAND (*expr_p
, 0);
644 tree op1
= TREE_OPERAND (*expr_p
, 1);
646 if (!error_operand_p (op0
)
647 && !error_operand_p (op1
)
648 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0
))
649 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1
)))
650 && !useless_type_conversion_p (TREE_TYPE (op1
), TREE_TYPE (op0
)))
651 TREE_OPERAND (*expr_p
, 1) = build1 (VIEW_CONVERT_EXPR
,
652 TREE_TYPE (op0
), op1
);
654 else if (simple_empty_class_p (TREE_TYPE (op0
), op1
))
656 /* Remove any copies of empty classes. Also drop volatile
657 variables on the RHS to avoid infinite recursion from
658 gimplify_expr trying to load the value. */
659 if (TREE_SIDE_EFFECTS (op1
))
661 if (TREE_THIS_VOLATILE (op1
)
662 && (REFERENCE_CLASS_P (op1
) || DECL_P (op1
)))
663 op1
= build_fold_addr_expr (op1
);
665 gimplify_and_add (op1
, pre_p
);
667 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
668 is_gimple_lvalue
, fb_lvalue
);
669 *expr_p
= TREE_OPERAND (*expr_p
, 0);
671 /* P0145 says that the RHS is sequenced before the LHS.
672 gimplify_modify_expr gimplifies the RHS before the LHS, but that
673 isn't quite strong enough in two cases:
675 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
676 mean it's evaluated after the LHS.
678 2) the value calculation of the RHS is also sequenced before the
679 LHS, so for scalar assignment we need to preevaluate if the
680 RHS could be affected by LHS side-effects even if it has no
681 side-effects of its own. We don't need this for classes because
682 class assignment takes its RHS by reference. */
683 else if (flag_strong_eval_order
> 1
684 && TREE_CODE (*expr_p
) == MODIFY_EXPR
685 && lvalue_has_side_effects (op0
)
686 && (TREE_CODE (op1
) == CALL_EXPR
687 || (SCALAR_TYPE_P (TREE_TYPE (op1
))
688 && !TREE_CONSTANT (op1
))))
689 TREE_OPERAND (*expr_p
, 1) = get_formal_tmp_var (op1
, pre_p
);
694 case EMPTY_CLASS_EXPR
:
695 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
696 *expr_p
= build_constructor (TREE_TYPE (*expr_p
), NULL
);
701 *expr_p
= BASELINK_FUNCTIONS (*expr_p
);
706 genericize_try_block (expr_p
);
711 genericize_catch_block (expr_p
);
716 genericize_eh_spec_block (expr_p
);
735 ret
= cp_gimplify_omp_for (expr_p
, pre_p
);
739 gimplify_expr_stmt (expr_p
);
743 case UNARY_PLUS_EXPR
:
745 tree arg
= TREE_OPERAND (*expr_p
, 0);
746 tree type
= TREE_TYPE (*expr_p
);
747 *expr_p
= (TREE_TYPE (arg
) != type
) ? fold_convert (type
, arg
)
755 if (!CALL_EXPR_FN (*expr_p
))
756 /* Internal function call. */;
757 else if (CALL_EXPR_REVERSE_ARGS (*expr_p
))
759 /* This is a call to a (compound) assignment operator that used
760 the operator syntax; gimplify the RHS first. */
761 gcc_assert (call_expr_nargs (*expr_p
) == 2);
762 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p
));
763 enum gimplify_status t
764 = gimplify_arg (&CALL_EXPR_ARG (*expr_p
, 1), pre_p
, loc
);
768 else if (CALL_EXPR_ORDERED_ARGS (*expr_p
))
770 /* Leave the last argument for gimplify_call_expr, to avoid problems
771 with __builtin_va_arg_pack(). */
772 int nargs
= call_expr_nargs (*expr_p
) - 1;
773 for (int i
= 0; i
< nargs
; ++i
)
775 enum gimplify_status t
776 = gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
, loc
);
781 else if (flag_strong_eval_order
782 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p
))
784 /* If flag_strong_eval_order, evaluate the object argument first. */
785 tree fntype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
786 if (POINTER_TYPE_P (fntype
))
787 fntype
= TREE_TYPE (fntype
);
788 if (TREE_CODE (fntype
) == METHOD_TYPE
)
790 enum gimplify_status t
791 = gimplify_arg (&CALL_EXPR_ARG (*expr_p
, 0), pre_p
, loc
);
799 if (TREE_OPERAND (*expr_p
, 0)
800 && (TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == INIT_EXPR
801 || TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == MODIFY_EXPR
))
803 expr_p
= &TREE_OPERAND (*expr_p
, 0);
804 code
= TREE_CODE (*expr_p
);
805 /* Avoid going through the INIT_EXPR case, which can
806 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
807 goto modify_expr_case
;
812 ret
= (enum gimplify_status
) c_gimplify_expr (expr_p
, pre_p
, post_p
);
816 /* Restore saved state. */
817 if (STATEMENT_CODE_P (code
))
818 current_stmt_tree ()->stmts_are_full_exprs_p
819 = saved_stmts_are_full_exprs_p
;
825 is_invisiref_parm (const_tree t
)
827 return ((TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
828 && DECL_BY_REFERENCE (t
));
831 /* Return true if the uid in both int tree maps are equal. */
834 cxx_int_tree_map_hasher::equal (cxx_int_tree_map
*a
, cxx_int_tree_map
*b
)
836 return (a
->uid
== b
->uid
);
839 /* Hash a UID in a cxx_int_tree_map. */
842 cxx_int_tree_map_hasher::hash (cxx_int_tree_map
*item
)
847 /* A stable comparison routine for use with splay trees and DECLs. */
850 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
855 return DECL_UID (a
) - DECL_UID (b
);
858 /* OpenMP context during genericization. */
860 struct cp_genericize_omp_taskreg
864 struct cp_genericize_omp_taskreg
*outer
;
865 splay_tree variables
;
868 /* Return true if genericization should try to determine if
869 DECL is firstprivate or shared within task regions. */
872 omp_var_to_track (tree decl
)
874 tree type
= TREE_TYPE (decl
);
875 if (is_invisiref_parm (decl
))
876 type
= TREE_TYPE (type
);
877 else if (TYPE_REF_P (type
))
878 type
= TREE_TYPE (type
);
879 while (TREE_CODE (type
) == ARRAY_TYPE
)
880 type
= TREE_TYPE (type
);
881 if (type
== error_mark_node
|| !CLASS_TYPE_P (type
))
883 if (VAR_P (decl
) && CP_DECL_THREAD_LOCAL_P (decl
))
885 if (cxx_omp_predetermined_sharing (decl
) != OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
890 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
893 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg
*omp_ctx
, tree decl
)
895 splay_tree_node n
= splay_tree_lookup (omp_ctx
->variables
,
896 (splay_tree_key
) decl
);
899 int flags
= OMP_CLAUSE_DEFAULT_SHARED
;
901 omp_cxx_notice_variable (omp_ctx
->outer
, decl
);
902 if (!omp_ctx
->default_shared
)
904 struct cp_genericize_omp_taskreg
*octx
;
906 for (octx
= omp_ctx
->outer
; octx
; octx
= octx
->outer
)
908 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
909 if (n
&& n
->value
!= OMP_CLAUSE_DEFAULT_SHARED
)
911 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
914 if (octx
->is_parallel
)
918 && (TREE_CODE (decl
) == PARM_DECL
919 || (!(TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
920 && DECL_CONTEXT (decl
) == current_function_decl
)))
921 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
922 if (flags
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
924 /* DECL is implicitly determined firstprivate in
925 the current task construct. Ensure copy ctor and
926 dtor are instantiated, because during gimplification
927 it will be already too late. */
928 tree type
= TREE_TYPE (decl
);
929 if (is_invisiref_parm (decl
))
930 type
= TREE_TYPE (type
);
931 else if (TYPE_REF_P (type
))
932 type
= TREE_TYPE (type
);
933 while (TREE_CODE (type
) == ARRAY_TYPE
)
934 type
= TREE_TYPE (type
);
935 get_copy_ctor (type
, tf_none
);
936 get_dtor (type
, tf_none
);
939 splay_tree_insert (omp_ctx
->variables
, (splay_tree_key
) decl
, flags
);
943 /* Genericization context. */
945 struct cp_genericize_data
947 hash_set
<tree
> *p_set
;
948 vec
<tree
> bind_expr_stack
;
949 struct cp_genericize_omp_taskreg
*omp_ctx
;
952 bool handle_invisiref_parm_p
;
955 /* Perform any pre-gimplification folding of C++ front end trees to
957 Note: The folding of none-omp cases is something to move into
958 the middle-end. As for now we have most foldings only on GENERIC
959 in fold-const, we need to perform this before transformation to
963 cp_fold_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
968 *stmt_p
= stmt
= cp_fold (*stmt_p
);
970 if (((hash_set
<tree
> *) data
)->add (stmt
))
972 /* Don't walk subtrees of stmts we've already walked once, otherwise
973 we can have exponential complexity with e.g. lots of nested
974 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
975 always the same tree, which the first time cp_fold_r has been
976 called on it had the subtrees walked. */
981 code
= TREE_CODE (stmt
);
982 if (code
== OMP_FOR
|| code
== OMP_SIMD
|| code
== OMP_DISTRIBUTE
983 || code
== OMP_TASKLOOP
|| code
== OACC_LOOP
)
988 cp_walk_tree (&OMP_FOR_BODY (stmt
), cp_fold_r
, data
, NULL
);
989 cp_walk_tree (&OMP_FOR_CLAUSES (stmt
), cp_fold_r
, data
, NULL
);
990 cp_walk_tree (&OMP_FOR_INIT (stmt
), cp_fold_r
, data
, NULL
);
991 x
= OMP_FOR_COND (stmt
);
992 if (x
&& TREE_CODE_CLASS (TREE_CODE (x
)) == tcc_comparison
)
994 cp_walk_tree (&TREE_OPERAND (x
, 0), cp_fold_r
, data
, NULL
);
995 cp_walk_tree (&TREE_OPERAND (x
, 1), cp_fold_r
, data
, NULL
);
997 else if (x
&& TREE_CODE (x
) == TREE_VEC
)
999 n
= TREE_VEC_LENGTH (x
);
1000 for (i
= 0; i
< n
; i
++)
1002 tree o
= TREE_VEC_ELT (x
, i
);
1003 if (o
&& TREE_CODE_CLASS (TREE_CODE (o
)) == tcc_comparison
)
1004 cp_walk_tree (&TREE_OPERAND (o
, 1), cp_fold_r
, data
, NULL
);
1007 x
= OMP_FOR_INCR (stmt
);
1008 if (x
&& TREE_CODE (x
) == TREE_VEC
)
1010 n
= TREE_VEC_LENGTH (x
);
1011 for (i
= 0; i
< n
; i
++)
1013 tree o
= TREE_VEC_ELT (x
, i
);
1014 if (o
&& TREE_CODE (o
) == MODIFY_EXPR
)
1015 o
= TREE_OPERAND (o
, 1);
1016 if (o
&& (TREE_CODE (o
) == PLUS_EXPR
|| TREE_CODE (o
) == MINUS_EXPR
1017 || TREE_CODE (o
) == POINTER_PLUS_EXPR
))
1019 cp_walk_tree (&TREE_OPERAND (o
, 0), cp_fold_r
, data
, NULL
);
1020 cp_walk_tree (&TREE_OPERAND (o
, 1), cp_fold_r
, data
, NULL
);
1024 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt
), cp_fold_r
, data
, NULL
);
1031 /* Fold ALL the trees! FIXME we should be able to remove this, but
1032 apparently that still causes optimization regressions. */
1035 cp_fold_function (tree fndecl
)
1037 hash_set
<tree
> pset
;
1038 cp_walk_tree (&DECL_SAVED_TREE (fndecl
), cp_fold_r
, &pset
, NULL
);
1041 /* Perform any pre-gimplification lowering of C++ front end trees to
1045 cp_genericize_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
1047 tree stmt
= *stmt_p
;
1048 struct cp_genericize_data
*wtd
= (struct cp_genericize_data
*) data
;
1049 hash_set
<tree
> *p_set
= wtd
->p_set
;
1051 /* If in an OpenMP context, note var uses. */
1052 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0)
1054 || TREE_CODE (stmt
) == PARM_DECL
1055 || TREE_CODE (stmt
) == RESULT_DECL
)
1056 && omp_var_to_track (stmt
))
1057 omp_cxx_notice_variable (wtd
->omp_ctx
, stmt
);
1059 /* Don't dereference parms in a thunk, pass the references through. */
1060 if ((TREE_CODE (stmt
) == CALL_EXPR
&& CALL_FROM_THUNK_P (stmt
))
1061 || (TREE_CODE (stmt
) == AGGR_INIT_EXPR
&& AGGR_INIT_FROM_THUNK_P (stmt
)))
1067 /* Dereference invisible reference parms. */
1068 if (wtd
->handle_invisiref_parm_p
&& is_invisiref_parm (stmt
))
1070 *stmt_p
= convert_from_reference (stmt
);
1071 p_set
->add (*stmt_p
);
1076 /* Map block scope extern declarations to visible declarations with the
1077 same name and type in outer scopes if any. */
1078 if (cp_function_chain
->extern_decl_map
1079 && VAR_OR_FUNCTION_DECL_P (stmt
)
1080 && DECL_EXTERNAL (stmt
))
1082 struct cxx_int_tree_map
*h
, in
;
1083 in
.uid
= DECL_UID (stmt
);
1084 h
= cp_function_chain
->extern_decl_map
->find_with_hash (&in
, in
.uid
);
1093 if (TREE_CODE (stmt
) == INTEGER_CST
1094 && TYPE_REF_P (TREE_TYPE (stmt
))
1095 && (flag_sanitize
& (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1096 && !wtd
->no_sanitize_p
)
1098 ubsan_maybe_instrument_reference (stmt_p
);
1099 if (*stmt_p
!= stmt
)
1106 /* Other than invisiref parms, don't walk the same tree twice. */
1107 if (p_set
->contains (stmt
))
1113 switch (TREE_CODE (stmt
))
1116 if (is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
1118 /* If in an OpenMP context, note var uses. */
1119 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0)
1120 && omp_var_to_track (TREE_OPERAND (stmt
, 0)))
1121 omp_cxx_notice_variable (wtd
->omp_ctx
, TREE_OPERAND (stmt
, 0));
1122 *stmt_p
= fold_convert (TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
1128 if (TREE_OPERAND (stmt
, 0) && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
1129 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1134 switch (OMP_CLAUSE_CODE (stmt
))
1136 case OMP_CLAUSE_LASTPRIVATE
:
1137 /* Don't dereference an invisiref in OpenMP clauses. */
1138 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1141 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt
))
1142 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt
),
1143 cp_genericize_r
, data
, NULL
);
1146 case OMP_CLAUSE_PRIVATE
:
1147 /* Don't dereference an invisiref in OpenMP clauses. */
1148 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1150 else if (wtd
->omp_ctx
!= NULL
)
1152 /* Private clause doesn't cause any references to the
1153 var in outer contexts, avoid calling
1154 omp_cxx_notice_variable for it. */
1155 struct cp_genericize_omp_taskreg
*old
= wtd
->omp_ctx
;
1156 wtd
->omp_ctx
= NULL
;
1157 cp_walk_tree (&OMP_CLAUSE_DECL (stmt
), cp_genericize_r
,
1163 case OMP_CLAUSE_SHARED
:
1164 case OMP_CLAUSE_FIRSTPRIVATE
:
1165 case OMP_CLAUSE_COPYIN
:
1166 case OMP_CLAUSE_COPYPRIVATE
:
1167 /* Don't dereference an invisiref in OpenMP clauses. */
1168 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1171 case OMP_CLAUSE_REDUCTION
:
1172 /* Don't dereference an invisiref in reduction clause's
1173 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1174 still needs to be genericized. */
1175 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1178 if (OMP_CLAUSE_REDUCTION_INIT (stmt
))
1179 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt
),
1180 cp_genericize_r
, data
, NULL
);
1181 if (OMP_CLAUSE_REDUCTION_MERGE (stmt
))
1182 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt
),
1183 cp_genericize_r
, data
, NULL
);
1191 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1192 to lower this construct before scanning it, so we need to lower these
1193 before doing anything else. */
1195 *stmt_p
= build2_loc (EXPR_LOCATION (stmt
),
1196 CLEANUP_EH_ONLY (stmt
) ? TRY_CATCH_EXPR
1199 CLEANUP_BODY (stmt
),
1200 CLEANUP_EXPR (stmt
));
1204 genericize_if_stmt (stmt_p
);
1205 /* *stmt_p has changed, tail recurse to handle it again. */
1206 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
1208 /* COND_EXPR might have incompatible types in branches if one or both
1209 arms are bitfields. Fix it up now. */
1213 = (TREE_OPERAND (stmt
, 1)
1214 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 1))
1217 = (TREE_OPERAND (stmt
, 2)
1218 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 2))
1221 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1222 TREE_TYPE (TREE_OPERAND (stmt
, 1))))
1224 TREE_OPERAND (stmt
, 1)
1225 = fold_convert (type_left
, TREE_OPERAND (stmt
, 1));
1226 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1230 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1231 TREE_TYPE (TREE_OPERAND (stmt
, 2))))
1233 TREE_OPERAND (stmt
, 2)
1234 = fold_convert (type_right
, TREE_OPERAND (stmt
, 2));
1235 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1242 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0))
1245 for (decl
= BIND_EXPR_VARS (stmt
); decl
; decl
= DECL_CHAIN (decl
))
1247 && !DECL_EXTERNAL (decl
)
1248 && omp_var_to_track (decl
))
1251 = splay_tree_lookup (wtd
->omp_ctx
->variables
,
1252 (splay_tree_key
) decl
);
1254 splay_tree_insert (wtd
->omp_ctx
->variables
,
1255 (splay_tree_key
) decl
,
1257 ? OMP_CLAUSE_DEFAULT_SHARED
1258 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1261 if (sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
| SANITIZE_VPTR
))
1263 /* The point here is to not sanitize static initializers. */
1264 bool no_sanitize_p
= wtd
->no_sanitize_p
;
1265 wtd
->no_sanitize_p
= true;
1266 for (tree decl
= BIND_EXPR_VARS (stmt
);
1268 decl
= DECL_CHAIN (decl
))
1270 && TREE_STATIC (decl
)
1271 && DECL_INITIAL (decl
))
1272 cp_walk_tree (&DECL_INITIAL (decl
), cp_genericize_r
, data
, NULL
);
1273 wtd
->no_sanitize_p
= no_sanitize_p
;
1275 wtd
->bind_expr_stack
.safe_push (stmt
);
1276 cp_walk_tree (&BIND_EXPR_BODY (stmt
),
1277 cp_genericize_r
, data
, NULL
);
1278 wtd
->bind_expr_stack
.pop ();
1283 tree block
= NULL_TREE
;
1285 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1286 BLOCK, and append an IMPORTED_DECL to its
1287 BLOCK_VARS chained list. */
1288 if (wtd
->bind_expr_stack
.exists ())
1291 for (i
= wtd
->bind_expr_stack
.length () - 1; i
>= 0; i
--)
1292 if ((block
= BIND_EXPR_BLOCK (wtd
->bind_expr_stack
[i
])))
1297 tree decl
= TREE_OPERAND (stmt
, 0);
1300 if (undeduced_auto_decl (decl
))
1301 /* Omit from the GENERIC, the back-end can't handle it. */;
1304 tree using_directive
= make_node (IMPORTED_DECL
);
1305 TREE_TYPE (using_directive
) = void_type_node
;
1307 IMPORTED_DECL_ASSOCIATED_DECL (using_directive
) = decl
;
1308 DECL_CHAIN (using_directive
) = BLOCK_VARS (block
);
1309 BLOCK_VARS (block
) = using_directive
;
1312 /* The USING_STMT won't appear in GENERIC. */
1313 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1319 if (TREE_CODE (DECL_EXPR_DECL (stmt
)) == USING_DECL
)
1321 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1322 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1327 tree d
= DECL_EXPR_DECL (stmt
);
1329 gcc_assert (CP_DECL_THREAD_LOCAL_P (d
) == DECL_THREAD_LOCAL_P (d
));
1337 struct cp_genericize_omp_taskreg omp_ctx
;
1342 cp_walk_tree (&OMP_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
1343 omp_ctx
.is_parallel
= TREE_CODE (stmt
) == OMP_PARALLEL
;
1344 omp_ctx
.default_shared
= omp_ctx
.is_parallel
;
1345 omp_ctx
.outer
= wtd
->omp_ctx
;
1346 omp_ctx
.variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
1347 wtd
->omp_ctx
= &omp_ctx
;
1348 for (c
= OMP_CLAUSES (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
1349 switch (OMP_CLAUSE_CODE (c
))
1351 case OMP_CLAUSE_SHARED
:
1352 case OMP_CLAUSE_PRIVATE
:
1353 case OMP_CLAUSE_FIRSTPRIVATE
:
1354 case OMP_CLAUSE_LASTPRIVATE
:
1355 decl
= OMP_CLAUSE_DECL (c
);
1356 if (decl
== error_mark_node
|| !omp_var_to_track (decl
))
1358 n
= splay_tree_lookup (omp_ctx
.variables
, (splay_tree_key
) decl
);
1361 splay_tree_insert (omp_ctx
.variables
, (splay_tree_key
) decl
,
1362 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
1363 ? OMP_CLAUSE_DEFAULT_SHARED
1364 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1365 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
&& omp_ctx
.outer
)
1366 omp_cxx_notice_variable (omp_ctx
.outer
, decl
);
1368 case OMP_CLAUSE_DEFAULT
:
1369 if (OMP_CLAUSE_DEFAULT_KIND (c
) == OMP_CLAUSE_DEFAULT_SHARED
)
1370 omp_ctx
.default_shared
= true;
1374 if (TREE_CODE (stmt
) == OMP_TASKLOOP
)
1375 genericize_omp_for_stmt (stmt_p
, walk_subtrees
, data
);
1377 cp_walk_tree (&OMP_BODY (stmt
), cp_genericize_r
, data
, NULL
);
1378 wtd
->omp_ctx
= omp_ctx
.outer
;
1379 splay_tree_delete (omp_ctx
.variables
);
1386 tree try_block
= wtd
->try_block
;
1387 wtd
->try_block
= stmt
;
1388 cp_walk_tree (&TRY_STMTS (stmt
), cp_genericize_r
, data
, NULL
);
1389 wtd
->try_block
= try_block
;
1390 cp_walk_tree (&TRY_HANDLERS (stmt
), cp_genericize_r
, data
, NULL
);
1394 case MUST_NOT_THROW_EXPR
:
1395 /* MUST_NOT_THROW_COND might be something else with TM. */
1396 if (MUST_NOT_THROW_COND (stmt
) == NULL_TREE
)
1399 tree try_block
= wtd
->try_block
;
1400 wtd
->try_block
= stmt
;
1401 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_genericize_r
, data
, NULL
);
1402 wtd
->try_block
= try_block
;
1408 location_t loc
= location_of (stmt
);
1409 if (TREE_NO_WARNING (stmt
))
1411 else if (wtd
->try_block
)
1413 if (TREE_CODE (wtd
->try_block
) == MUST_NOT_THROW_EXPR
1414 && warning_at (loc
, OPT_Wterminate
,
1415 "throw will always call terminate()")
1416 && cxx_dialect
>= cxx11
1417 && DECL_DESTRUCTOR_P (current_function_decl
))
1418 inform (loc
, "in C++11 destructors default to noexcept");
1422 if (warn_cxx11_compat
&& cxx_dialect
< cxx11
1423 && DECL_DESTRUCTOR_P (current_function_decl
)
1424 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl
))
1426 && (get_defaulted_eh_spec (current_function_decl
)
1427 == empty_except_spec
))
1428 warning_at (loc
, OPT_Wc__11_compat
,
1429 "in C++11 this throw will terminate because "
1430 "destructors default to noexcept");
1436 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt
));
1440 genericize_for_stmt (stmt_p
, walk_subtrees
, data
);
1444 genericize_while_stmt (stmt_p
, walk_subtrees
, data
);
1448 genericize_do_stmt (stmt_p
, walk_subtrees
, data
);
1452 genericize_switch_stmt (stmt_p
, walk_subtrees
, data
);
1456 genericize_continue_stmt (stmt_p
);
1460 genericize_break_stmt (stmt_p
);
1465 case OMP_DISTRIBUTE
:
1466 genericize_omp_for_stmt (stmt_p
, walk_subtrees
, data
);
1470 /* By the time we get here we're handing off to the back end, so we don't
1471 need or want to preserve PTRMEM_CST anymore. */
1472 *stmt_p
= cplus_expand_constant (stmt
);
1477 /* For MEM_REF, make sure not to sanitize the second operand even
1478 if it has reference type. It is just an offset with a type
1479 holding other information. There is no other processing we
1480 need to do for INTEGER_CSTs, so just ignore the second argument
1482 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_genericize_r
, data
, NULL
);
1487 if (!wtd
->no_sanitize_p
1488 && sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
)
1489 && TYPE_REF_P (TREE_TYPE (stmt
)))
1490 ubsan_maybe_instrument_reference (stmt_p
);
1494 if (!wtd
->no_sanitize_p
1495 && sanitize_flags_p ((SANITIZE_NULL
1496 | SANITIZE_ALIGNMENT
| SANITIZE_VPTR
)))
1498 tree fn
= CALL_EXPR_FN (stmt
);
1500 && !error_operand_p (fn
)
1501 && POINTER_TYPE_P (TREE_TYPE (fn
))
1502 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) == METHOD_TYPE
)
1505 = TREE_CODE (fn
) == ADDR_EXPR
1506 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
1507 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn
, 0));
1508 if (sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1509 ubsan_maybe_instrument_member_call (stmt
, is_ctor
);
1510 if (sanitize_flags_p (SANITIZE_VPTR
) && !is_ctor
)
1511 cp_ubsan_maybe_instrument_member_call (stmt
);
1513 else if (fn
== NULL_TREE
1514 && CALL_EXPR_IFN (stmt
) == IFN_UBSAN_NULL
1515 && TREE_CODE (CALL_EXPR_ARG (stmt
, 0)) == INTEGER_CST
1516 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt
, 0))))
1520 case AGGR_INIT_EXPR
:
1521 /* For calls to a multi-versioned function, overload resolution
1522 returns the function with the highest target priority, that is,
1523 the version that will checked for dispatching first. If this
1524 version is inlinable, a direct call to this version can be made
1525 otherwise the call should go through the dispatcher. */
1527 tree fn
= cp_get_callee_fndecl_nofold (stmt
);
1528 if (fn
&& DECL_FUNCTION_VERSIONED (fn
)
1529 && (current_function_decl
== NULL
1530 || !targetm
.target_option
.can_inline_p (current_function_decl
,
1532 if (tree dis
= get_function_version_dispatcher (fn
))
1534 mark_versions_used (dis
);
1535 dis
= build_address (dis
);
1536 if (TREE_CODE (stmt
) == CALL_EXPR
)
1537 CALL_EXPR_FN (stmt
) = dis
;
1539 AGGR_INIT_EXPR_FN (stmt
) = dis
;
1545 if (TARGET_EXPR_INITIAL (stmt
)
1546 && TREE_CODE (TARGET_EXPR_INITIAL (stmt
)) == CONSTRUCTOR
1547 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt
)))
1548 TARGET_EXPR_NO_ELIDE (stmt
) = 1;
1552 if (IS_TYPE_OR_DECL_P (stmt
))
1557 p_set
->add (*stmt_p
);
1562 /* Lower C++ front end trees to GENERIC in T_P. */
1565 cp_genericize_tree (tree
* t_p
, bool handle_invisiref_parm_p
)
1567 struct cp_genericize_data wtd
;
1569 wtd
.p_set
= new hash_set
<tree
>;
1570 wtd
.bind_expr_stack
.create (0);
1572 wtd
.try_block
= NULL_TREE
;
1573 wtd
.no_sanitize_p
= false;
1574 wtd
.handle_invisiref_parm_p
= handle_invisiref_parm_p
;
1575 cp_walk_tree (t_p
, cp_genericize_r
, &wtd
, NULL
);
1577 wtd
.bind_expr_stack
.release ();
1578 if (sanitize_flags_p (SANITIZE_VPTR
))
1579 cp_ubsan_instrument_member_accesses (t_p
);
1582 /* If a function that should end with a return in non-void
1583 function doesn't obviously end with return, add ubsan
1584 instrumentation code to verify it at runtime. If -fsanitize=return
1585 is not enabled, instrument __builtin_unreachable. */
1588 cp_maybe_instrument_return (tree fndecl
)
1590 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
1591 || DECL_CONSTRUCTOR_P (fndecl
)
1592 || DECL_DESTRUCTOR_P (fndecl
)
1593 || !targetm
.warn_func_return (fndecl
))
1596 if (!sanitize_flags_p (SANITIZE_RETURN
, fndecl
)
1597 /* Don't add __builtin_unreachable () if not optimizing, it will not
1598 improve any optimizations in that case, just break UB code.
1599 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1600 UBSan covers this with ubsan_instrument_return above where sufficient
1601 information is provided, while the __builtin_unreachable () below
1602 if return sanitization is disabled will just result in hard to
1603 understand runtime error without location. */
1605 || sanitize_flags_p (SANITIZE_UNREACHABLE
, fndecl
)))
1608 tree t
= DECL_SAVED_TREE (fndecl
);
1611 switch (TREE_CODE (t
))
1614 t
= BIND_EXPR_BODY (t
);
1616 case TRY_FINALLY_EXPR
:
1617 case CLEANUP_POINT_EXPR
:
1618 t
= TREE_OPERAND (t
, 0);
1620 case STATEMENT_LIST
:
1622 tree_stmt_iterator i
= tsi_last (t
);
1639 tree
*p
= &DECL_SAVED_TREE (fndecl
);
1640 if (TREE_CODE (*p
) == BIND_EXPR
)
1641 p
= &BIND_EXPR_BODY (*p
);
1643 location_t loc
= DECL_SOURCE_LOCATION (fndecl
);
1644 if (sanitize_flags_p (SANITIZE_RETURN
, fndecl
))
1645 t
= ubsan_instrument_return (loc
);
1648 tree fndecl
= builtin_decl_explicit (BUILT_IN_UNREACHABLE
);
1649 t
= build_call_expr_loc (BUILTINS_LOCATION
, fndecl
, 0);
1652 append_to_statement_list (t
, p
);
1656 cp_genericize (tree fndecl
)
1660 /* Fix up the types of parms passed by invisible reference. */
1661 for (t
= DECL_ARGUMENTS (fndecl
); t
; t
= DECL_CHAIN (t
))
1662 if (TREE_ADDRESSABLE (TREE_TYPE (t
)))
1664 /* If a function's arguments are copied to create a thunk,
1665 then DECL_BY_REFERENCE will be set -- but the type of the
1666 argument will be a pointer type, so we will never get
1668 gcc_assert (!DECL_BY_REFERENCE (t
));
1669 gcc_assert (DECL_ARG_TYPE (t
) != TREE_TYPE (t
));
1670 TREE_TYPE (t
) = DECL_ARG_TYPE (t
);
1671 DECL_BY_REFERENCE (t
) = 1;
1672 TREE_ADDRESSABLE (t
) = 0;
1676 /* Do the same for the return value. */
1677 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl
))))
1679 t
= DECL_RESULT (fndecl
);
1680 TREE_TYPE (t
) = build_reference_type (TREE_TYPE (t
));
1681 DECL_BY_REFERENCE (t
) = 1;
1682 TREE_ADDRESSABLE (t
) = 0;
1686 /* Adjust DECL_VALUE_EXPR of the original var. */
1687 tree outer
= outer_curly_brace_block (current_function_decl
);
1691 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1693 && DECL_NAME (t
) == DECL_NAME (var
)
1694 && DECL_HAS_VALUE_EXPR_P (var
)
1695 && DECL_VALUE_EXPR (var
) == t
)
1697 tree val
= convert_from_reference (t
);
1698 SET_DECL_VALUE_EXPR (var
, val
);
1704 /* If we're a clone, the body is already GIMPLE. */
1705 if (DECL_CLONED_FUNCTION_P (fndecl
))
1708 /* Allow cp_genericize calls to be nested. */
1709 tree save_bc_label
[2];
1710 save_bc_label
[bc_break
] = bc_label
[bc_break
];
1711 save_bc_label
[bc_continue
] = bc_label
[bc_continue
];
1712 bc_label
[bc_break
] = NULL_TREE
;
1713 bc_label
[bc_continue
] = NULL_TREE
;
1715 /* We do want to see every occurrence of the parms, so we can't just use
1716 walk_tree's hash functionality. */
1717 cp_genericize_tree (&DECL_SAVED_TREE (fndecl
), true);
1719 cp_maybe_instrument_return (fndecl
);
1721 /* Do everything else. */
1722 c_genericize (fndecl
);
1724 gcc_assert (bc_label
[bc_break
] == NULL
);
1725 gcc_assert (bc_label
[bc_continue
] == NULL
);
1726 bc_label
[bc_break
] = save_bc_label
[bc_break
];
1727 bc_label
[bc_continue
] = save_bc_label
[bc_continue
];
1730 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1731 NULL if there is in fact nothing to do. ARG2 may be null if FN
1732 actually only takes one argument. */
1735 cxx_omp_clause_apply_fn (tree fn
, tree arg1
, tree arg2
)
1737 tree defparm
, parm
, t
;
1745 nargs
= list_length (DECL_ARGUMENTS (fn
));
1746 argarray
= XALLOCAVEC (tree
, nargs
);
1748 defparm
= TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn
)));
1750 defparm
= TREE_CHAIN (defparm
);
1752 bool is_method
= TREE_CODE (TREE_TYPE (fn
)) == METHOD_TYPE
;
1753 if (TREE_CODE (TREE_TYPE (arg1
)) == ARRAY_TYPE
)
1755 tree inner_type
= TREE_TYPE (arg1
);
1756 tree start1
, end1
, p1
;
1757 tree start2
= NULL
, p2
= NULL
;
1758 tree ret
= NULL
, lab
;
1764 inner_type
= TREE_TYPE (inner_type
);
1765 start1
= build4 (ARRAY_REF
, inner_type
, start1
,
1766 size_zero_node
, NULL
, NULL
);
1768 start2
= build4 (ARRAY_REF
, inner_type
, start2
,
1769 size_zero_node
, NULL
, NULL
);
1771 while (TREE_CODE (inner_type
) == ARRAY_TYPE
);
1772 start1
= build_fold_addr_expr_loc (input_location
, start1
);
1774 start2
= build_fold_addr_expr_loc (input_location
, start2
);
1776 end1
= TYPE_SIZE_UNIT (TREE_TYPE (arg1
));
1777 end1
= fold_build_pointer_plus (start1
, end1
);
1779 p1
= create_tmp_var (TREE_TYPE (start1
));
1780 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, start1
);
1781 append_to_statement_list (t
, &ret
);
1785 p2
= create_tmp_var (TREE_TYPE (start2
));
1786 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, start2
);
1787 append_to_statement_list (t
, &ret
);
1790 lab
= create_artificial_label (input_location
);
1791 t
= build1 (LABEL_EXPR
, void_type_node
, lab
);
1792 append_to_statement_list (t
, &ret
);
1797 /* Handle default arguments. */
1798 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1799 parm
= TREE_CHAIN (parm
), i
++)
1800 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1801 TREE_PURPOSE (parm
), fn
,
1802 i
- is_method
, tf_warning_or_error
);
1803 t
= build_call_a (fn
, i
, argarray
);
1804 t
= fold_convert (void_type_node
, t
);
1805 t
= fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1806 append_to_statement_list (t
, &ret
);
1808 t
= fold_build_pointer_plus (p1
, TYPE_SIZE_UNIT (inner_type
));
1809 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, t
);
1810 append_to_statement_list (t
, &ret
);
1814 t
= fold_build_pointer_plus (p2
, TYPE_SIZE_UNIT (inner_type
));
1815 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, t
);
1816 append_to_statement_list (t
, &ret
);
1819 t
= build2 (NE_EXPR
, boolean_type_node
, p1
, end1
);
1820 t
= build3 (COND_EXPR
, void_type_node
, t
, build_and_jump (&lab
), NULL
);
1821 append_to_statement_list (t
, &ret
);
1827 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg1
);
1829 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg2
);
1830 /* Handle default arguments. */
1831 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1832 parm
= TREE_CHAIN (parm
), i
++)
1833 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1834 TREE_PURPOSE (parm
), fn
,
1835 i
- is_method
, tf_warning_or_error
);
1836 t
= build_call_a (fn
, i
, argarray
);
1837 t
= fold_convert (void_type_node
, t
);
1838 return fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1842 /* Return code to initialize DECL with its default constructor, or
1843 NULL if there's nothing to do. */
1846 cxx_omp_clause_default_ctor (tree clause
, tree decl
, tree
/*outer*/)
1848 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1852 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), decl
, NULL
);
1857 /* Return code to initialize DST with a copy constructor from SRC. */
1860 cxx_omp_clause_copy_ctor (tree clause
, tree dst
, tree src
)
1862 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1866 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), dst
, src
);
1868 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1873 /* Similarly, except use an assignment operator instead. */
1876 cxx_omp_clause_assign_op (tree clause
, tree dst
, tree src
)
1878 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1882 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 2), dst
, src
);
1884 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1889 /* Return code to destroy DECL. */
1892 cxx_omp_clause_dtor (tree clause
, tree decl
)
1894 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1898 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 1), decl
, NULL
);
1903 /* True if OpenMP should privatize what this DECL points to rather
1904 than the DECL itself. */
1907 cxx_omp_privatize_by_reference (const_tree decl
)
1909 return (TYPE_REF_P (TREE_TYPE (decl
))
1910 || is_invisiref_parm (decl
));
1913 /* Return true if DECL is const qualified var having no mutable member. */
1915 cxx_omp_const_qual_no_mutable (tree decl
)
1917 tree type
= TREE_TYPE (decl
);
1918 if (TYPE_REF_P (type
))
1920 if (!is_invisiref_parm (decl
))
1922 type
= TREE_TYPE (type
);
1924 if (TREE_CODE (decl
) == RESULT_DECL
&& DECL_NAME (decl
))
1926 /* NVR doesn't preserve const qualification of the
1928 tree outer
= outer_curly_brace_block (current_function_decl
);
1932 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1934 && DECL_NAME (decl
) == DECL_NAME (var
)
1935 && (TYPE_MAIN_VARIANT (type
)
1936 == TYPE_MAIN_VARIANT (TREE_TYPE (var
))))
1938 if (TYPE_READONLY (TREE_TYPE (var
)))
1939 type
= TREE_TYPE (var
);
1945 if (type
== error_mark_node
)
1948 /* Variables with const-qualified type having no mutable member
1949 are predetermined shared. */
1950 if (TYPE_READONLY (type
) && !cp_has_mutable_p (type
))
1956 /* True if OpenMP sharing attribute of DECL is predetermined. */
1958 enum omp_clause_default_kind
1959 cxx_omp_predetermined_sharing_1 (tree decl
)
1961 /* Static data members are predetermined shared. */
1962 if (TREE_STATIC (decl
))
1964 tree ctx
= CP_DECL_CONTEXT (decl
);
1965 if (TYPE_P (ctx
) && MAYBE_CLASS_TYPE_P (ctx
))
1966 return OMP_CLAUSE_DEFAULT_SHARED
;
1969 /* Const qualified vars having no mutable member are predetermined
1971 if (cxx_omp_const_qual_no_mutable (decl
))
1972 return OMP_CLAUSE_DEFAULT_SHARED
;
1974 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
1977 /* Likewise, but also include the artificial vars. We don't want to
1978 disallow the artificial vars being mentioned in explicit clauses,
1979 as we use artificial vars e.g. for loop constructs with random
1980 access iterators other than pointers, but during gimplification
1981 we want to treat them as predetermined. */
1983 enum omp_clause_default_kind
1984 cxx_omp_predetermined_sharing (tree decl
)
1986 enum omp_clause_default_kind ret
= cxx_omp_predetermined_sharing_1 (decl
);
1987 if (ret
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
1990 /* Predetermine artificial variables holding integral values, those
1991 are usually result of gimplify_one_sizepos or SAVE_EXPR
1994 && DECL_ARTIFICIAL (decl
)
1995 && INTEGRAL_TYPE_P (TREE_TYPE (decl
))
1996 && !(DECL_LANG_SPECIFIC (decl
)
1997 && DECL_OMP_PRIVATIZED_MEMBER (decl
)))
1998 return OMP_CLAUSE_DEFAULT_SHARED
;
2000 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
2003 /* Finalize an implicitly determined clause. */
2006 cxx_omp_finish_clause (tree c
, gimple_seq
*)
2008 tree decl
, inner_type
;
2009 bool make_shared
= false;
2011 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
2014 decl
= OMP_CLAUSE_DECL (c
);
2015 decl
= require_complete_type (decl
);
2016 inner_type
= TREE_TYPE (decl
);
2017 if (decl
== error_mark_node
)
2019 else if (TYPE_REF_P (TREE_TYPE (decl
)))
2020 inner_type
= TREE_TYPE (inner_type
);
2022 /* We're interested in the base element, not arrays. */
2023 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
2024 inner_type
= TREE_TYPE (inner_type
);
2026 /* Check for special function availability by building a call to one.
2027 Save the results, because later we won't be in the right context
2028 for making these queries. */
2030 && CLASS_TYPE_P (inner_type
)
2031 && cxx_omp_create_clause_info (c
, inner_type
, false, true, false, true))
2036 OMP_CLAUSE_CODE (c
) = OMP_CLAUSE_SHARED
;
2037 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
) = 0;
2038 OMP_CLAUSE_SHARED_READONLY (c
) = 0;
2042 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2043 disregarded in OpenMP construct, because it is going to be
2044 remapped during OpenMP lowering. SHARED is true if DECL
2045 is going to be shared, false if it is going to be privatized. */
2048 cxx_omp_disregard_value_expr (tree decl
, bool shared
)
2052 && DECL_HAS_VALUE_EXPR_P (decl
)
2053 && DECL_ARTIFICIAL (decl
)
2054 && DECL_LANG_SPECIFIC (decl
)
2055 && DECL_OMP_PRIVATIZED_MEMBER (decl
);
2058 /* Fold expression X which is used as an rvalue if RVAL is true. */
2061 cp_fold_maybe_rvalue (tree x
, bool rval
)
2066 if (rval
&& DECL_P (x
)
2067 && !TYPE_REF_P (TREE_TYPE (x
)))
2069 tree v
= decl_constant_value (x
);
2070 if (v
!= x
&& v
!= error_mark_node
)
2081 /* Fold expression X which is used as an rvalue. */
2084 cp_fold_rvalue (tree x
)
2086 return cp_fold_maybe_rvalue (x
, true);
2089 /* Perform folding on expression X. */
2092 cp_fully_fold (tree x
)
2094 if (processing_template_decl
)
2096 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2097 have to call both. */
2098 if (cxx_dialect
>= cxx11
)
2100 x
= maybe_constant_value (x
);
2101 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2102 a TARGET_EXPR; undo that here. */
2103 if (TREE_CODE (x
) == TARGET_EXPR
)
2104 x
= TARGET_EXPR_INITIAL (x
);
2105 else if (TREE_CODE (x
) == VIEW_CONVERT_EXPR
2106 && TREE_CODE (TREE_OPERAND (x
, 0)) == CONSTRUCTOR
2107 && TREE_TYPE (TREE_OPERAND (x
, 0)) == TREE_TYPE (x
))
2108 x
= TREE_OPERAND (x
, 0);
2110 return cp_fold_rvalue (x
);
2113 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2114 and certain changes are made to the folding done. Or should be (FIXME). We
2115 never touch maybe_const, as it is only used for the C front-end
2116 C_MAYBE_CONST_EXPR. */
2119 c_fully_fold (tree x
, bool /*in_init*/, bool */
*maybe_const*/
, bool lval
)
2121 return cp_fold_maybe_rvalue (x
, !lval
);
2124 static GTY((deletable
)) hash_map
<tree
, tree
> *fold_cache
;
2126 /* Dispose of the whole FOLD_CACHE. */
2129 clear_fold_cache (void)
2131 if (fold_cache
!= NULL
)
2132 fold_cache
->empty ();
2135 /* This function tries to fold an expression X.
2136 To avoid combinatorial explosion, folding results are kept in fold_cache.
2137 If X is invalid, we don't fold at all.
2138 For performance reasons we don't cache expressions representing a
2139 declaration or constant.
2140 Function returns X or its folded variant. */
2145 tree op0
, op1
, op2
, op3
;
2146 tree org_x
= x
, r
= NULL_TREE
;
2147 enum tree_code code
;
2149 bool rval_ops
= true;
2151 if (!x
|| x
== error_mark_node
)
2154 if (EXPR_P (x
) && (!TREE_TYPE (x
) || TREE_TYPE (x
) == error_mark_node
))
2157 /* Don't bother to cache DECLs or constants. */
2158 if (DECL_P (x
) || CONSTANT_CLASS_P (x
))
2161 if (fold_cache
== NULL
)
2162 fold_cache
= hash_map
<tree
, tree
>::create_ggc (101);
2164 if (tree
*cached
= fold_cache
->get (x
))
2167 code
= TREE_CODE (x
);
2170 case CLEANUP_POINT_EXPR
:
2171 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2173 r
= cp_fold_rvalue (TREE_OPERAND (x
, 0));
2174 if (!TREE_SIDE_EFFECTS (r
))
2179 x
= fold_sizeof_expr (x
);
2182 case VIEW_CONVERT_EXPR
:
2187 case NON_LVALUE_EXPR
:
2189 if (VOID_TYPE_P (TREE_TYPE (x
)))
2191 /* This is just to make sure we don't end up with casts to
2192 void from error_mark_node. If we just return x, then
2193 cp_fold_r might fold the operand into error_mark_node and
2194 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2195 during gimplification doesn't like such casts.
2196 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2197 folding of the operand should be in the caches and if in cp_fold_r
2198 it will modify it in place. */
2199 op0
= cp_fold (TREE_OPERAND (x
, 0));
2200 if (op0
== error_mark_node
)
2201 x
= error_mark_node
;
2205 loc
= EXPR_LOCATION (x
);
2206 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
2208 if (code
== CONVERT_EXPR
2209 && SCALAR_TYPE_P (TREE_TYPE (x
))
2210 && op0
!= void_node
)
2211 /* During parsing we used convert_to_*_nofold; re-convert now using the
2212 folding variants, since fold() doesn't do those transformations. */
2213 x
= fold (convert (TREE_TYPE (x
), op0
));
2214 else if (op0
!= TREE_OPERAND (x
, 0))
2216 if (op0
== error_mark_node
)
2217 x
= error_mark_node
;
2219 x
= fold_build1_loc (loc
, code
, TREE_TYPE (x
), op0
);
2224 /* Conversion of an out-of-range value has implementation-defined
2225 behavior; the language considers it different from arithmetic
2226 overflow, which is undefined. */
2227 if (TREE_CODE (op0
) == INTEGER_CST
2228 && TREE_OVERFLOW_P (x
) && !TREE_OVERFLOW_P (op0
))
2229 TREE_OVERFLOW (x
) = false;
2234 /* We don't need the decltype(auto) obfuscation anymore. */
2235 if (REF_PARENTHESIZED_P (x
))
2237 tree p
= maybe_undo_parenthesized_ref (x
);
2243 loc
= EXPR_LOCATION (x
);
2244 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), false);
2246 /* Cope with user tricks that amount to offsetof. */
2247 if (op0
!= error_mark_node
2248 && TREE_CODE (TREE_TYPE (op0
)) != FUNCTION_TYPE
2249 && TREE_CODE (TREE_TYPE (op0
)) != METHOD_TYPE
)
2251 tree val
= get_base_address (op0
);
2253 && INDIRECT_REF_P (val
)
2254 && COMPLETE_TYPE_P (TREE_TYPE (val
))
2255 && TREE_CONSTANT (TREE_OPERAND (val
, 0)))
2257 val
= TREE_OPERAND (val
, 0);
2259 if (TREE_CODE (val
) == INTEGER_CST
)
2260 return fold_offsetof (op0
, TREE_TYPE (x
));
2270 case FIX_TRUNC_EXPR
:
2275 case TRUTH_NOT_EXPR
:
2276 case FIXED_CONVERT_EXPR
:
2279 loc
= EXPR_LOCATION (x
);
2280 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
2283 if (op0
!= TREE_OPERAND (x
, 0))
2285 if (op0
== error_mark_node
)
2286 x
= error_mark_node
;
2289 x
= fold_build1_loc (loc
, code
, TREE_TYPE (x
), op0
);
2290 if (code
== INDIRECT_REF
2291 && (INDIRECT_REF_P (x
) || TREE_CODE (x
) == MEM_REF
))
2293 TREE_READONLY (x
) = TREE_READONLY (org_x
);
2294 TREE_SIDE_EFFECTS (x
) = TREE_SIDE_EFFECTS (org_x
);
2295 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
2302 gcc_assert (TREE_CODE (x
) != COND_EXPR
2303 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x
, 0))));
2306 case UNARY_PLUS_EXPR
:
2307 op0
= cp_fold_rvalue (TREE_OPERAND (x
, 0));
2308 if (op0
== error_mark_node
)
2309 x
= error_mark_node
;
2311 x
= fold_convert (TREE_TYPE (x
), op0
);
2314 case POSTDECREMENT_EXPR
:
2315 case POSTINCREMENT_EXPR
:
2317 case PREDECREMENT_EXPR
:
2318 case PREINCREMENT_EXPR
:
2323 case POINTER_PLUS_EXPR
:
2325 case POINTER_DIFF_EXPR
:
2328 case TRUNC_DIV_EXPR
:
2330 case FLOOR_DIV_EXPR
:
2331 case ROUND_DIV_EXPR
:
2332 case TRUNC_MOD_EXPR
:
2334 case ROUND_MOD_EXPR
:
2336 case EXACT_DIV_EXPR
:
2346 case TRUTH_AND_EXPR
:
2347 case TRUTH_ANDIF_EXPR
:
2349 case TRUTH_ORIF_EXPR
:
2350 case TRUTH_XOR_EXPR
:
2351 case LT_EXPR
: case LE_EXPR
:
2352 case GT_EXPR
: case GE_EXPR
:
2353 case EQ_EXPR
: case NE_EXPR
:
2354 case UNORDERED_EXPR
: case ORDERED_EXPR
:
2355 case UNLT_EXPR
: case UNLE_EXPR
:
2356 case UNGT_EXPR
: case UNGE_EXPR
:
2357 case UNEQ_EXPR
: case LTGT_EXPR
:
2358 case RANGE_EXPR
: case COMPLEX_EXPR
:
2360 loc
= EXPR_LOCATION (x
);
2361 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
2362 op1
= cp_fold_rvalue (TREE_OPERAND (x
, 1));
2364 if (op0
!= TREE_OPERAND (x
, 0) || op1
!= TREE_OPERAND (x
, 1))
2366 if (op0
== error_mark_node
|| op1
== error_mark_node
)
2367 x
= error_mark_node
;
2369 x
= fold_build2_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
);
2374 if (TREE_NO_WARNING (org_x
)
2375 && warn_nonnull_compare
2376 && COMPARISON_CLASS_P (org_x
))
2378 if (x
== error_mark_node
|| TREE_CODE (x
) == INTEGER_CST
)
2380 else if (COMPARISON_CLASS_P (x
))
2381 TREE_NO_WARNING (x
) = 1;
2382 /* Otherwise give up on optimizing these, let GIMPLE folders
2383 optimize those later on. */
2384 else if (op0
!= TREE_OPERAND (org_x
, 0)
2385 || op1
!= TREE_OPERAND (org_x
, 1))
2387 x
= build2_loc (loc
, code
, TREE_TYPE (org_x
), op0
, op1
);
2388 TREE_NO_WARNING (x
) = 1;
2397 loc
= EXPR_LOCATION (x
);
2398 op0
= cp_fold_rvalue (TREE_OPERAND (x
, 0));
2399 op1
= cp_fold (TREE_OPERAND (x
, 1));
2400 op2
= cp_fold (TREE_OPERAND (x
, 2));
2402 if (TREE_CODE (TREE_TYPE (x
)) == BOOLEAN_TYPE
)
2404 warning_sentinel
s (warn_int_in_bool_context
);
2405 if (!VOID_TYPE_P (TREE_TYPE (op1
)))
2406 op1
= cp_truthvalue_conversion (op1
);
2407 if (!VOID_TYPE_P (TREE_TYPE (op2
)))
2408 op2
= cp_truthvalue_conversion (op2
);
2410 else if (VOID_TYPE_P (TREE_TYPE (x
)))
2412 if (TREE_CODE (op0
) == INTEGER_CST
)
2414 /* If the condition is constant, fold can fold away
2415 the COND_EXPR. If some statement-level uses of COND_EXPR
2416 have one of the branches NULL, avoid folding crash. */
2418 op1
= build_empty_stmt (loc
);
2420 op2
= build_empty_stmt (loc
);
2424 /* Otherwise, don't bother folding a void condition, since
2425 it can't produce a constant value. */
2426 if (op0
!= TREE_OPERAND (x
, 0)
2427 || op1
!= TREE_OPERAND (x
, 1)
2428 || op2
!= TREE_OPERAND (x
, 2))
2429 x
= build3_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
);
2434 if (op0
!= TREE_OPERAND (x
, 0)
2435 || op1
!= TREE_OPERAND (x
, 1)
2436 || op2
!= TREE_OPERAND (x
, 2))
2438 if (op0
== error_mark_node
2439 || op1
== error_mark_node
2440 || op2
== error_mark_node
)
2441 x
= error_mark_node
;
2443 x
= fold_build3_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
);
2448 /* A COND_EXPR might have incompatible types in branches if one or both
2449 arms are bitfields. If folding exposed such a branch, fix it up. */
2450 if (TREE_CODE (x
) != code
2451 && x
!= error_mark_node
2452 && !useless_type_conversion_p (TREE_TYPE (org_x
), TREE_TYPE (x
)))
2453 x
= fold_convert (TREE_TYPE (org_x
), x
);
2459 int i
, m
, sv
= optimize
, nw
= sv
, changed
= 0;
2460 tree callee
= get_callee_fndecl (x
);
2462 /* Some built-in function calls will be evaluated at compile-time in
2463 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2464 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2465 if (callee
&& DECL_BUILT_IN (callee
) && !optimize
2466 && DECL_IS_BUILTIN_CONSTANT_P (callee
)
2467 && current_function_decl
2468 && DECL_DECLARED_CONSTEXPR_P (current_function_decl
))
2473 m
= call_expr_nargs (x
);
2474 for (i
= 0; i
< m
; i
++)
2476 r
= cp_fold (CALL_EXPR_ARG (x
, i
));
2477 if (r
!= CALL_EXPR_ARG (x
, i
))
2479 if (r
== error_mark_node
)
2481 x
= error_mark_node
;
2486 CALL_EXPR_ARG (x
, i
) = r
;
2488 if (x
== error_mark_node
)
2495 if (TREE_CODE (r
) != CALL_EXPR
)
2503 /* Invoke maybe_constant_value for functions declared
2504 constexpr and not called with AGGR_INIT_EXPRs.
2506 Do constexpr expansion of expressions where the call itself is not
2507 constant, but the call followed by an INDIRECT_REF is. */
2508 if (callee
&& DECL_DECLARED_CONSTEXPR_P (callee
)
2510 r
= maybe_constant_value (x
);
2513 if (TREE_CODE (r
) != CALL_EXPR
)
2515 if (DECL_CONSTRUCTOR_P (callee
))
2517 loc
= EXPR_LOCATION (x
);
2518 tree s
= build_fold_indirect_ref_loc (loc
,
2519 CALL_EXPR_ARG (x
, 0));
2520 r
= build2_loc (loc
, INIT_EXPR
, TREE_TYPE (s
), s
, r
);
2535 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (x
);
2536 vec
<constructor_elt
, va_gc
> *nelts
= NULL
;
2537 FOR_EACH_VEC_SAFE_ELT (elts
, i
, p
)
2539 tree op
= cp_fold (p
->value
);
2542 if (op
== error_mark_node
)
2544 x
= error_mark_node
;
2549 nelts
= elts
->copy ();
2550 (*nelts
)[i
].value
= op
;
2555 x
= build_constructor (TREE_TYPE (x
), nelts
);
2556 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x
)
2557 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x
);
2559 if (VECTOR_TYPE_P (TREE_TYPE (x
)))
2565 bool changed
= false;
2566 vec
<tree
, va_gc
> *vec
= make_tree_vector ();
2567 int i
, n
= TREE_VEC_LENGTH (x
);
2568 vec_safe_reserve (vec
, n
);
2570 for (i
= 0; i
< n
; i
++)
2572 tree op
= cp_fold (TREE_VEC_ELT (x
, i
));
2573 vec
->quick_push (op
);
2574 if (op
!= TREE_VEC_ELT (x
, i
))
2581 for (i
= 0; i
< n
; i
++)
2582 TREE_VEC_ELT (r
, i
) = (*vec
)[i
];
2586 release_tree_vector (vec
);
2592 case ARRAY_RANGE_REF
:
2594 loc
= EXPR_LOCATION (x
);
2595 op0
= cp_fold (TREE_OPERAND (x
, 0));
2596 op1
= cp_fold (TREE_OPERAND (x
, 1));
2597 op2
= cp_fold (TREE_OPERAND (x
, 2));
2598 op3
= cp_fold (TREE_OPERAND (x
, 3));
2600 if (op0
!= TREE_OPERAND (x
, 0)
2601 || op1
!= TREE_OPERAND (x
, 1)
2602 || op2
!= TREE_OPERAND (x
, 2)
2603 || op3
!= TREE_OPERAND (x
, 3))
2605 if (op0
== error_mark_node
2606 || op1
== error_mark_node
2607 || op2
== error_mark_node
2608 || op3
== error_mark_node
)
2609 x
= error_mark_node
;
2612 x
= build4_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
, op3
);
2613 TREE_READONLY (x
) = TREE_READONLY (org_x
);
2614 TREE_SIDE_EFFECTS (x
) = TREE_SIDE_EFFECTS (org_x
);
2615 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
2623 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2624 folding, evaluates to an invariant. In that case no need to wrap
2625 this folded tree with a SAVE_EXPR. */
2626 r
= cp_fold (TREE_OPERAND (x
, 0));
2627 if (tree_invariant_p (r
))
2635 fold_cache
->put (org_x
, x
);
2636 /* Prevent that we try to fold an already folded result again. */
2638 fold_cache
->put (x
, x
);
2643 #include "gt-cp-cp-gimplify.h"