1 /* C++-specific tree lowering bits; see also c-gimplify.cc and gimple.cc.
3 Copyright (C) 2002-2022 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "basic-block.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
37 #include "gcc-rich-location.h"
41 #include "file-prefix-map.h"
43 #include "omp-general.h"
46 /* Forward declarations. */
48 static tree
cp_genericize_r (tree
*, int *, void *);
49 static tree
cp_fold_r (tree
*, int *, void *);
50 static void cp_genericize_tree (tree
*, bool);
51 static tree
cp_fold (tree
);
53 /* Genericize a TRY_BLOCK. */
56 genericize_try_block (tree
*stmt_p
)
58 tree body
= TRY_STMTS (*stmt_p
);
59 tree cleanup
= TRY_HANDLERS (*stmt_p
);
61 *stmt_p
= build2 (TRY_CATCH_EXPR
, void_type_node
, body
, cleanup
);
64 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
67 genericize_catch_block (tree
*stmt_p
)
69 tree type
= HANDLER_TYPE (*stmt_p
);
70 tree body
= HANDLER_BODY (*stmt_p
);
72 /* FIXME should the caught type go in TREE_TYPE? */
73 *stmt_p
= build2 (CATCH_EXPR
, void_type_node
, type
, body
);
76 /* A terser interface for building a representation of an exception
80 build_gimple_eh_filter_tree (tree body
, tree allowed
, tree failure
)
84 /* FIXME should the allowed types go in TREE_TYPE? */
85 t
= build2 (EH_FILTER_EXPR
, void_type_node
, allowed
, NULL_TREE
);
86 append_to_statement_list (failure
, &EH_FILTER_FAILURE (t
));
88 t
= build2 (TRY_CATCH_EXPR
, void_type_node
, NULL_TREE
, t
);
89 append_to_statement_list (body
, &TREE_OPERAND (t
, 0));
94 /* Genericize an EH_SPEC_BLOCK by converting it to a
95 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
98 genericize_eh_spec_block (tree
*stmt_p
)
100 tree body
= EH_SPEC_STMTS (*stmt_p
);
101 tree allowed
= EH_SPEC_RAISES (*stmt_p
);
102 tree failure
= build_call_n (call_unexpected_fn
, 1, build_exc_ptr ());
104 *stmt_p
= build_gimple_eh_filter_tree (body
, allowed
, failure
);
105 suppress_warning (*stmt_p
);
106 suppress_warning (TREE_OPERAND (*stmt_p
, 1));
109 /* Return the first non-compound statement in STMT. */
112 first_stmt (tree stmt
)
114 switch (TREE_CODE (stmt
))
117 if (tree_statement_list_node
*p
= STATEMENT_LIST_HEAD (stmt
))
118 return first_stmt (p
->stmt
);
122 return first_stmt (BIND_EXPR_BODY (stmt
));
129 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
132 genericize_if_stmt (tree
*stmt_p
)
134 tree stmt
, cond
, then_
, else_
;
135 location_t locus
= EXPR_LOCATION (*stmt_p
);
138 cond
= IF_COND (stmt
);
139 then_
= THEN_CLAUSE (stmt
);
140 else_
= ELSE_CLAUSE (stmt
);
144 tree ft
= first_stmt (then_
);
145 tree fe
= first_stmt (else_
);
147 if (TREE_CODE (ft
) == PREDICT_EXPR
148 && TREE_CODE (fe
) == PREDICT_EXPR
149 && (pr
= PREDICT_EXPR_PREDICTOR (ft
)) == PREDICT_EXPR_PREDICTOR (fe
)
150 && (pr
== PRED_HOT_LABEL
|| pr
== PRED_COLD_LABEL
))
152 gcc_rich_location
richloc (EXPR_LOC_OR_LOC (ft
, locus
));
153 richloc
.add_range (EXPR_LOC_OR_LOC (fe
, locus
));
154 warning_at (&richloc
, OPT_Wattributes
,
155 "both branches of %<if%> statement marked as %qs",
156 pr
== PRED_HOT_LABEL
? "likely" : "unlikely");
161 then_
= build_empty_stmt (locus
);
163 else_
= build_empty_stmt (locus
);
165 /* consteval if has been verified not to have the then_/else_ blocks
166 entered by gotos/case labels from elsewhere, and as then_ block
167 can contain unfolded immediate function calls, we have to discard
168 the then_ block regardless of whether else_ has side-effects or not. */
169 if (IF_STMT_CONSTEVAL_P (stmt
))
171 if (block_may_fallthru (then_
))
172 stmt
= build3 (COND_EXPR
, void_type_node
, boolean_false_node
,
177 else if (IF_STMT_CONSTEXPR_P (stmt
))
178 stmt
= integer_nonzerop (cond
) ? then_
: else_
;
180 stmt
= build3 (COND_EXPR
, void_type_node
, cond
, then_
, else_
);
181 protected_set_expr_location_if_unset (stmt
, locus
);
185 /* Hook into the middle of gimplifying an OMP_FOR node. */
187 static enum gimplify_status
188 cp_gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
190 tree for_stmt
= *expr_p
;
191 gimple_seq seq
= NULL
;
193 /* Protect ourselves from recursion. */
194 if (OMP_FOR_GIMPLIFYING_P (for_stmt
))
196 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 1;
198 gimplify_and_add (for_stmt
, &seq
);
199 gimple_seq_add_seq (pre_p
, seq
);
201 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 0;
206 /* Gimplify an EXPR_STMT node. */
209 gimplify_expr_stmt (tree
*stmt_p
)
211 tree stmt
= EXPR_STMT_EXPR (*stmt_p
);
213 if (stmt
== error_mark_node
)
216 /* Gimplification of a statement expression will nullify the
217 statement if all its side effects are moved to *PRE_P and *POST_P.
219 In this case we will not want to emit the gimplified statement.
220 However, we may still want to emit a warning, so we do that before
222 if (stmt
&& warn_unused_value
)
224 if (!TREE_SIDE_EFFECTS (stmt
))
226 if (!IS_EMPTY_STMT (stmt
)
227 && !VOID_TYPE_P (TREE_TYPE (stmt
))
228 && !warning_suppressed_p (stmt
, OPT_Wunused_value
))
229 warning (OPT_Wunused_value
, "statement with no effect");
232 warn_if_unused_value (stmt
, input_location
);
235 if (stmt
== NULL_TREE
)
236 stmt
= alloc_stmt_list ();
241 /* Gimplify initialization from an AGGR_INIT_EXPR. */
244 cp_gimplify_init_expr (tree
*expr_p
)
246 tree from
= TREE_OPERAND (*expr_p
, 1);
247 tree to
= TREE_OPERAND (*expr_p
, 0);
250 if (TREE_CODE (from
) == TARGET_EXPR
)
251 if (tree init
= TARGET_EXPR_INITIAL (from
))
253 gcc_checking_assert (TARGET_EXPR_ELIDING_P (from
));
254 if (target_expr_needs_replace (from
))
256 /* If this was changed by cp_genericize_target_expr, we need to
257 walk into it to replace uses of the slot. */
258 replace_decl (&init
, TARGET_EXPR_SLOT (from
), to
);
266 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
267 inside the TARGET_EXPR. */
270 tree sub
= TREE_CODE (t
) == COMPOUND_EXPR
? TREE_OPERAND (t
, 0) : t
;
272 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
273 replace the slot operand with our target.
275 Should we add a target parm to gimplify_expr instead? No, as in this
276 case we want to replace the INIT_EXPR. */
277 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
278 || TREE_CODE (sub
) == VEC_INIT_EXPR
)
280 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
)
281 AGGR_INIT_EXPR_SLOT (sub
) = to
;
283 VEC_INIT_EXPR_SLOT (sub
) = to
;
286 /* The initialization is now a side-effect, so the container can
289 TREE_TYPE (from
) = void_type_node
;
292 /* Handle aggregate NSDMI. */
293 replace_placeholders (sub
, to
);
298 t
= TREE_OPERAND (t
, 1);
303 /* Gimplify a MUST_NOT_THROW_EXPR. */
305 static enum gimplify_status
306 gimplify_must_not_throw_expr (tree
*expr_p
, gimple_seq
*pre_p
)
309 tree temp
= voidify_wrapper_expr (stmt
, NULL
);
310 tree body
= TREE_OPERAND (stmt
, 0);
311 gimple_seq try_
= NULL
;
312 gimple_seq catch_
= NULL
;
315 gimplify_and_add (body
, &try_
);
316 mnt
= gimple_build_eh_must_not_throw (terminate_fn
);
317 gimple_seq_add_stmt_without_update (&catch_
, mnt
);
318 mnt
= gimple_build_try (try_
, catch_
, GIMPLE_TRY_CATCH
);
320 gimple_seq_add_stmt_without_update (pre_p
, mnt
);
331 /* Return TRUE if an operand (OP) of a given TYPE being copied is
332 really just an empty class copy.
334 Check that the operand has a simple form so that TARGET_EXPRs and
335 non-empty CONSTRUCTORs get reduced properly, and we leave the
336 return slot optimization alone because it isn't a copy. */
339 simple_empty_class_p (tree type
, tree op
, tree_code code
)
341 if (TREE_CODE (op
) == COMPOUND_EXPR
)
342 return simple_empty_class_p (type
, TREE_OPERAND (op
, 1), code
);
343 if (SIMPLE_TARGET_EXPR_P (op
)
344 && TYPE_HAS_TRIVIAL_DESTRUCTOR (type
))
345 /* The TARGET_EXPR is itself a simple copy, look through it. */
346 return simple_empty_class_p (type
, TARGET_EXPR_INITIAL (op
), code
);
348 if (TREE_CODE (op
) == PARM_DECL
349 && TREE_ADDRESSABLE (TREE_TYPE (op
)))
351 tree fn
= DECL_CONTEXT (op
);
352 if (DECL_THUNK_P (fn
)
353 || lambda_static_thunk_p (fn
))
354 /* In a thunk, we pass through invisible reference parms, so this isn't
360 (TREE_CODE (op
) == EMPTY_CLASS_EXPR
361 || code
== MODIFY_EXPR
362 || is_gimple_lvalue (op
)
363 || INDIRECT_REF_P (op
)
364 || (TREE_CODE (op
) == CONSTRUCTOR
365 && CONSTRUCTOR_NELTS (op
) == 0)
366 || (TREE_CODE (op
) == CALL_EXPR
367 && !CALL_EXPR_RETURN_SLOT_OPT (op
)))
368 && !TREE_CLOBBER_P (op
)
369 && is_really_empty_class (type
, /*ignore_vptr*/true);
372 /* Returns true if evaluating E as an lvalue has side-effects;
373 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
374 have side-effects until there is a read or write through it. */
377 lvalue_has_side_effects (tree e
)
379 if (!TREE_SIDE_EFFECTS (e
))
381 while (handled_component_p (e
))
383 if (TREE_CODE (e
) == ARRAY_REF
384 && TREE_SIDE_EFFECTS (TREE_OPERAND (e
, 1)))
386 e
= TREE_OPERAND (e
, 0);
389 /* Just naming a variable has no side-effects. */
391 else if (INDIRECT_REF_P (e
))
392 /* Similarly, indirection has no side-effects. */
393 return TREE_SIDE_EFFECTS (TREE_OPERAND (e
, 0));
395 /* For anything else, trust TREE_SIDE_EFFECTS. */
396 return TREE_SIDE_EFFECTS (e
);
399 /* Gimplify *EXPR_P as rvalue into an expression that can't be modified
400 by expressions with side-effects in other operands. */
402 static enum gimplify_status
403 gimplify_to_rvalue (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
404 bool (*gimple_test_f
) (tree
))
406 enum gimplify_status t
407 = gimplify_expr (expr_p
, pre_p
, post_p
, gimple_test_f
, fb_rvalue
);
410 else if (is_gimple_variable (*expr_p
) && TREE_CODE (*expr_p
) != SSA_NAME
)
411 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
);
415 /* Like gimplify_arg, but if ORDERED is set (which should be set if
416 any of the arguments this argument is sequenced before has
417 TREE_SIDE_EFFECTS set, make sure expressions with is_gimple_reg_type type
418 are gimplified into SSA_NAME or a fresh temporary and for
419 non-is_gimple_reg_type we don't optimize away TARGET_EXPRs. */
421 static enum gimplify_status
422 cp_gimplify_arg (tree
*arg_p
, gimple_seq
*pre_p
, location_t call_location
,
425 enum gimplify_status t
;
427 && !is_gimple_reg_type (TREE_TYPE (*arg_p
))
428 && TREE_CODE (*arg_p
) == TARGET_EXPR
)
430 /* gimplify_arg would strip away the TARGET_EXPR, but
431 that can mean we don't copy the argument and some following
432 argument with side-effect could modify it. */
433 protected_set_expr_location (*arg_p
, call_location
);
434 return gimplify_expr (arg_p
, pre_p
, NULL
, is_gimple_lvalue
, fb_either
);
438 t
= gimplify_arg (arg_p
, pre_p
, call_location
);
442 && is_gimple_reg_type (TREE_TYPE (*arg_p
))
443 && is_gimple_variable (*arg_p
)
444 && TREE_CODE (*arg_p
) != SSA_NAME
445 /* No need to force references into register, references
446 can't be modified. */
447 && !TYPE_REF_P (TREE_TYPE (*arg_p
))
448 /* And this can't be modified either. */
449 && *arg_p
!= current_class_ptr
)
450 *arg_p
= get_initialized_tmp_var (*arg_p
, pre_p
);
456 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
459 cp_gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
461 int saved_stmts_are_full_exprs_p
= 0;
462 location_t loc
= cp_expr_loc_or_input_loc (*expr_p
);
463 enum tree_code code
= TREE_CODE (*expr_p
);
464 enum gimplify_status ret
;
466 if (STATEMENT_CODE_P (code
))
468 saved_stmts_are_full_exprs_p
= stmts_are_full_exprs_p ();
469 current_stmt_tree ()->stmts_are_full_exprs_p
470 = STMT_IS_FULL_EXPR_P (*expr_p
);
476 simplify_aggr_init_expr (expr_p
);
482 *expr_p
= expand_vec_init_expr (NULL_TREE
, *expr_p
,
483 tf_warning_or_error
);
486 cp_walk_tree (expr_p
, cp_fold_r
, &pset
, NULL
);
487 cp_genericize_tree (expr_p
, false);
488 copy_if_shared (expr_p
);
494 /* FIXME communicate throw type to back end, probably by moving
495 THROW_EXPR into ../tree.def. */
496 *expr_p
= TREE_OPERAND (*expr_p
, 0);
500 case MUST_NOT_THROW_EXPR
:
501 ret
= gimplify_must_not_throw_expr (expr_p
, pre_p
);
504 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
505 LHS of an assignment might also be involved in the RHS, as in bug
508 cp_gimplify_init_expr (expr_p
);
509 if (TREE_CODE (*expr_p
) != INIT_EXPR
)
515 /* If the back end isn't clever enough to know that the lhs and rhs
516 types are the same, add an explicit conversion. */
517 tree op0
= TREE_OPERAND (*expr_p
, 0);
518 tree op1
= TREE_OPERAND (*expr_p
, 1);
520 if (!error_operand_p (op0
)
521 && !error_operand_p (op1
)
522 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0
))
523 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1
)))
524 && !useless_type_conversion_p (TREE_TYPE (op1
), TREE_TYPE (op0
)))
525 TREE_OPERAND (*expr_p
, 1) = build1 (VIEW_CONVERT_EXPR
,
526 TREE_TYPE (op0
), op1
);
528 else if (simple_empty_class_p (TREE_TYPE (op0
), op1
, code
))
530 while (TREE_CODE (op1
) == TARGET_EXPR
)
531 /* We're disconnecting the initializer from its target,
532 don't create a temporary. */
533 op1
= TARGET_EXPR_INITIAL (op1
);
535 /* Remove any copies of empty classes. Also drop volatile
536 variables on the RHS to avoid infinite recursion from
537 gimplify_expr trying to load the value. */
538 if (TREE_SIDE_EFFECTS (op1
))
540 if (TREE_THIS_VOLATILE (op1
)
541 && (REFERENCE_CLASS_P (op1
) || DECL_P (op1
)))
542 op1
= build_fold_addr_expr (op1
);
544 gimplify_and_add (op1
, pre_p
);
546 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
547 is_gimple_lvalue
, fb_lvalue
);
548 *expr_p
= TREE_OPERAND (*expr_p
, 0);
549 if (code
== RETURN_EXPR
&& REFERENCE_CLASS_P (*expr_p
))
550 /* Avoid 'return *<retval>;' */
551 *expr_p
= TREE_OPERAND (*expr_p
, 0);
553 /* P0145 says that the RHS is sequenced before the LHS.
554 gimplify_modify_expr gimplifies the RHS before the LHS, but that
555 isn't quite strong enough in two cases:
557 1) gimplify.cc wants to leave a CALL_EXPR on the RHS, which would
558 mean it's evaluated after the LHS.
560 2) the value calculation of the RHS is also sequenced before the
561 LHS, so for scalar assignment we need to preevaluate if the
562 RHS could be affected by LHS side-effects even if it has no
563 side-effects of its own. We don't need this for classes because
564 class assignment takes its RHS by reference. */
565 else if (flag_strong_eval_order
> 1
566 && TREE_CODE (*expr_p
) == MODIFY_EXPR
567 && lvalue_has_side_effects (op0
)
568 && (TREE_CODE (op1
) == CALL_EXPR
569 || (SCALAR_TYPE_P (TREE_TYPE (op1
))
570 && !TREE_CONSTANT (op1
))))
571 TREE_OPERAND (*expr_p
, 1) = get_initialized_tmp_var (op1
, pre_p
);
576 case EMPTY_CLASS_EXPR
:
577 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
578 *expr_p
= build_constructor (TREE_TYPE (*expr_p
), NULL
);
583 *expr_p
= BASELINK_FUNCTIONS (*expr_p
);
588 genericize_try_block (expr_p
);
593 genericize_catch_block (expr_p
);
598 genericize_eh_spec_block (expr_p
);
618 ret
= cp_gimplify_omp_for (expr_p
, pre_p
);
622 gimplify_expr_stmt (expr_p
);
626 case UNARY_PLUS_EXPR
:
628 tree arg
= TREE_OPERAND (*expr_p
, 0);
629 tree type
= TREE_TYPE (*expr_p
);
630 *expr_p
= (TREE_TYPE (arg
) != type
) ? fold_convert (type
, arg
)
638 if (flag_strong_eval_order
== 2
639 && CALL_EXPR_FN (*expr_p
)
640 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p
)
641 && cp_get_callee_fndecl_nofold (*expr_p
) == NULL_TREE
)
643 tree fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
644 enum gimplify_status t
645 = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p
), pre_p
, NULL
,
646 is_gimple_call_addr
);
649 /* GIMPLE considers most pointer conversion useless, but for
650 calls we actually care about the exact function pointer type. */
651 else if (TREE_TYPE (CALL_EXPR_FN (*expr_p
)) != fnptrtype
)
652 CALL_EXPR_FN (*expr_p
)
653 = build1 (NOP_EXPR
, fnptrtype
, CALL_EXPR_FN (*expr_p
));
655 if (!CALL_EXPR_FN (*expr_p
))
656 /* Internal function call. */;
657 else if (CALL_EXPR_REVERSE_ARGS (*expr_p
))
659 /* This is a call to a (compound) assignment operator that used
660 the operator syntax; gimplify the RHS first. */
661 gcc_assert (call_expr_nargs (*expr_p
) == 2);
662 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p
));
663 enum gimplify_status t
664 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p
, 1), pre_p
, loc
,
665 TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p
, 0)));
669 else if (CALL_EXPR_ORDERED_ARGS (*expr_p
))
671 /* Leave the last argument for gimplify_call_expr, to avoid problems
672 with __builtin_va_arg_pack(). */
673 int nargs
= call_expr_nargs (*expr_p
) - 1;
674 int last_side_effects_arg
= -1;
675 for (int i
= nargs
; i
> 0; --i
)
676 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p
, i
)))
678 last_side_effects_arg
= i
;
681 for (int i
= 0; i
< nargs
; ++i
)
683 enum gimplify_status t
684 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
, loc
,
685 i
< last_side_effects_arg
);
690 else if (flag_strong_eval_order
691 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p
))
693 /* If flag_strong_eval_order, evaluate the object argument first. */
694 tree fntype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
695 if (INDIRECT_TYPE_P (fntype
))
696 fntype
= TREE_TYPE (fntype
);
697 if (TREE_CODE (fntype
) == METHOD_TYPE
)
699 int nargs
= call_expr_nargs (*expr_p
);
700 bool side_effects
= false;
701 for (int i
= 1; i
< nargs
; ++i
)
702 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p
, i
)))
707 enum gimplify_status t
708 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p
, 0), pre_p
, loc
,
716 tree decl
= cp_get_callee_fndecl_nofold (*expr_p
);
717 if (decl
&& fndecl_built_in_p (decl
, BUILT_IN_FRONTEND
))
718 switch (DECL_FE_FUNCTION_CODE (decl
))
720 case CP_BUILT_IN_IS_CONSTANT_EVALUATED
:
721 *expr_p
= boolean_false_node
;
723 case CP_BUILT_IN_SOURCE_LOCATION
:
725 = fold_builtin_source_location (EXPR_LOCATION (*expr_p
));
727 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER
:
729 = fold_builtin_is_corresponding_member
730 (EXPR_LOCATION (*expr_p
), call_expr_nargs (*expr_p
),
731 &CALL_EXPR_ARG (*expr_p
, 0));
733 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS
:
735 = fold_builtin_is_pointer_inverconvertible_with_class
736 (EXPR_LOCATION (*expr_p
), call_expr_nargs (*expr_p
),
737 &CALL_EXPR_ARG (*expr_p
, 0));
746 /* A TARGET_EXPR that expresses direct-initialization should have been
747 elided by cp_gimplify_init_expr. */
748 gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p
));
749 /* Likewise, but allow extra temps of trivial type so that
750 gimplify_init_ctor_preeval can materialize subobjects of a CONSTRUCTOR
751 on the rhs of an assignment, as in constexpr-aggr1.C. */
752 gcc_checking_assert (!TARGET_EXPR_ELIDING_P (*expr_p
)
753 || !TREE_ADDRESSABLE (TREE_TYPE (*expr_p
)));
758 *expr_p
= cplus_expand_constant (*expr_p
);
759 if (TREE_CODE (*expr_p
) == PTRMEM_CST
)
766 if (TREE_OPERAND (*expr_p
, 0)
767 && (TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == INIT_EXPR
768 || TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == MODIFY_EXPR
))
770 expr_p
= &TREE_OPERAND (*expr_p
, 0);
771 /* Avoid going through the INIT_EXPR case, which can
772 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
773 goto modify_expr_case
;
778 ret
= (enum gimplify_status
) c_gimplify_expr (expr_p
, pre_p
, post_p
);
782 /* Restore saved state. */
783 if (STATEMENT_CODE_P (code
))
784 current_stmt_tree ()->stmts_are_full_exprs_p
785 = saved_stmts_are_full_exprs_p
;
791 is_invisiref_parm (const_tree t
)
793 return ((TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
794 && DECL_BY_REFERENCE (t
));
797 /* A stable comparison routine for use with splay trees and DECLs. */
800 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
805 return DECL_UID (a
) - DECL_UID (b
);
808 /* OpenMP context during genericization. */
810 struct cp_genericize_omp_taskreg
814 struct cp_genericize_omp_taskreg
*outer
;
815 splay_tree variables
;
818 /* Return true if genericization should try to determine if
819 DECL is firstprivate or shared within task regions. */
822 omp_var_to_track (tree decl
)
824 tree type
= TREE_TYPE (decl
);
825 if (is_invisiref_parm (decl
))
826 type
= TREE_TYPE (type
);
827 else if (TYPE_REF_P (type
))
828 type
= TREE_TYPE (type
);
829 while (TREE_CODE (type
) == ARRAY_TYPE
)
830 type
= TREE_TYPE (type
);
831 if (type
== error_mark_node
|| !CLASS_TYPE_P (type
))
833 if (VAR_P (decl
) && CP_DECL_THREAD_LOCAL_P (decl
))
835 if (cxx_omp_predetermined_sharing (decl
) != OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
840 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
843 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg
*omp_ctx
, tree decl
)
845 splay_tree_node n
= splay_tree_lookup (omp_ctx
->variables
,
846 (splay_tree_key
) decl
);
849 int flags
= OMP_CLAUSE_DEFAULT_SHARED
;
851 omp_cxx_notice_variable (omp_ctx
->outer
, decl
);
852 if (!omp_ctx
->default_shared
)
854 struct cp_genericize_omp_taskreg
*octx
;
856 for (octx
= omp_ctx
->outer
; octx
; octx
= octx
->outer
)
858 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
859 if (n
&& n
->value
!= OMP_CLAUSE_DEFAULT_SHARED
)
861 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
864 if (octx
->is_parallel
)
868 && (TREE_CODE (decl
) == PARM_DECL
869 || (!(TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
870 && DECL_CONTEXT (decl
) == current_function_decl
)))
871 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
872 if (flags
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
874 /* DECL is implicitly determined firstprivate in
875 the current task construct. Ensure copy ctor and
876 dtor are instantiated, because during gimplification
877 it will be already too late. */
878 tree type
= TREE_TYPE (decl
);
879 if (is_invisiref_parm (decl
))
880 type
= TREE_TYPE (type
);
881 else if (TYPE_REF_P (type
))
882 type
= TREE_TYPE (type
);
883 while (TREE_CODE (type
) == ARRAY_TYPE
)
884 type
= TREE_TYPE (type
);
885 get_copy_ctor (type
, tf_none
);
886 get_dtor (type
, tf_none
);
889 splay_tree_insert (omp_ctx
->variables
, (splay_tree_key
) decl
, flags
);
893 /* If we might need to clean up a partially constructed object, break down the
894 CONSTRUCTOR with split_nonconstant_init. Also expand VEC_INIT_EXPR at this
895 point. If initializing TO with FROM is non-trivial, overwrite *REPLACE with
899 cp_genericize_init (tree
*replace
, tree from
, tree to
)
901 tree init
= NULL_TREE
;
902 if (TREE_CODE (from
) == VEC_INIT_EXPR
)
903 init
= expand_vec_init_expr (to
, from
, tf_warning_or_error
);
904 else if (flag_exceptions
905 && TREE_CODE (from
) == CONSTRUCTOR
906 && TREE_SIDE_EFFECTS (from
)
907 && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (from
)))
909 to
= cp_stabilize_reference (to
);
910 replace_placeholders (from
, to
);
911 init
= split_nonconstant_init (to
, from
);
916 if (*replace
== from
)
917 /* Make cp_gimplify_init_expr call replace_decl on this
918 TARGET_EXPR_INITIAL. */
919 init
= fold_convert (void_type_node
, init
);
924 /* For an INIT_EXPR, replace the INIT_EXPR itself. */
927 cp_genericize_init_expr (tree
*stmt_p
)
929 iloc_sentinel ils
= EXPR_LOCATION (*stmt_p
);
930 tree to
= TREE_OPERAND (*stmt_p
, 0);
931 tree from
= TREE_OPERAND (*stmt_p
, 1);
932 if (SIMPLE_TARGET_EXPR_P (from
)
933 /* Return gets confused if we clobber its INIT_EXPR this soon. */
934 && TREE_CODE (to
) != RESULT_DECL
)
935 from
= TARGET_EXPR_INITIAL (from
);
936 cp_genericize_init (stmt_p
, from
, to
);
939 /* For a TARGET_EXPR, change the TARGET_EXPR_INITIAL. We will need to use
940 replace_decl later when we know what we're initializing. */
943 cp_genericize_target_expr (tree
*stmt_p
)
945 iloc_sentinel ils
= EXPR_LOCATION (*stmt_p
);
946 tree slot
= TARGET_EXPR_SLOT (*stmt_p
);
947 cp_genericize_init (&TARGET_EXPR_INITIAL (*stmt_p
),
948 TARGET_EXPR_INITIAL (*stmt_p
), slot
);
949 gcc_assert (!DECL_INITIAL (slot
));
952 /* Genericization context. */
954 struct cp_genericize_data
956 hash_set
<tree
> *p_set
;
957 auto_vec
<tree
> bind_expr_stack
;
958 struct cp_genericize_omp_taskreg
*omp_ctx
;
961 bool handle_invisiref_parm_p
;
964 /* Perform any pre-gimplification folding of C++ front end trees to
966 Note: The folding of non-omp cases is something to move into
967 the middle-end. As for now we have most foldings only on GENERIC
968 in fold-const, we need to perform this before transformation to
974 bool genericize
; // called from cp_fold_function?
976 cp_fold_data (bool g
): genericize (g
) {}
980 cp_fold_r (tree
*stmt_p
, int *walk_subtrees
, void *data_
)
982 cp_fold_data
*data
= (cp_fold_data
*)data_
;
984 enum tree_code code
= TREE_CODE (stmt
);
989 if (TREE_CODE (PTRMEM_CST_MEMBER (stmt
)) == FUNCTION_DECL
990 && DECL_IMMEDIATE_FUNCTION_P (PTRMEM_CST_MEMBER (stmt
)))
992 if (!data
->pset
.add (stmt
))
993 error_at (PTRMEM_CST_LOCATION (stmt
),
994 "taking address of an immediate function %qD",
995 PTRMEM_CST_MEMBER (stmt
));
996 stmt
= *stmt_p
= build_zero_cst (TREE_TYPE (stmt
));
1002 if (TREE_CODE (TREE_OPERAND (stmt
, 0)) == FUNCTION_DECL
1003 && DECL_IMMEDIATE_FUNCTION_P (TREE_OPERAND (stmt
, 0)))
1005 error_at (EXPR_LOCATION (stmt
),
1006 "taking address of an immediate function %qD",
1007 TREE_OPERAND (stmt
, 0));
1008 stmt
= *stmt_p
= build_zero_cst (TREE_TYPE (stmt
));
1017 *stmt_p
= stmt
= cp_fold (*stmt_p
);
1019 if (data
->pset
.add (stmt
))
1021 /* Don't walk subtrees of stmts we've already walked once, otherwise
1022 we can have exponential complexity with e.g. lots of nested
1023 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1024 always the same tree, which the first time cp_fold_r has been
1025 called on it had the subtrees walked. */
1030 code
= TREE_CODE (stmt
);
1037 case OMP_DISTRIBUTE
:
1041 cp_walk_tree (&OMP_FOR_BODY (stmt
), cp_fold_r
, data
, NULL
);
1042 cp_walk_tree (&OMP_FOR_CLAUSES (stmt
), cp_fold_r
, data
, NULL
);
1043 cp_walk_tree (&OMP_FOR_INIT (stmt
), cp_fold_r
, data
, NULL
);
1044 x
= OMP_FOR_COND (stmt
);
1045 if (x
&& TREE_CODE_CLASS (TREE_CODE (x
)) == tcc_comparison
)
1047 cp_walk_tree (&TREE_OPERAND (x
, 0), cp_fold_r
, data
, NULL
);
1048 cp_walk_tree (&TREE_OPERAND (x
, 1), cp_fold_r
, data
, NULL
);
1050 else if (x
&& TREE_CODE (x
) == TREE_VEC
)
1052 n
= TREE_VEC_LENGTH (x
);
1053 for (i
= 0; i
< n
; i
++)
1055 tree o
= TREE_VEC_ELT (x
, i
);
1056 if (o
&& TREE_CODE_CLASS (TREE_CODE (o
)) == tcc_comparison
)
1057 cp_walk_tree (&TREE_OPERAND (o
, 1), cp_fold_r
, data
, NULL
);
1060 x
= OMP_FOR_INCR (stmt
);
1061 if (x
&& TREE_CODE (x
) == TREE_VEC
)
1063 n
= TREE_VEC_LENGTH (x
);
1064 for (i
= 0; i
< n
; i
++)
1066 tree o
= TREE_VEC_ELT (x
, i
);
1067 if (o
&& TREE_CODE (o
) == MODIFY_EXPR
)
1068 o
= TREE_OPERAND (o
, 1);
1069 if (o
&& (TREE_CODE (o
) == PLUS_EXPR
|| TREE_CODE (o
) == MINUS_EXPR
1070 || TREE_CODE (o
) == POINTER_PLUS_EXPR
))
1072 cp_walk_tree (&TREE_OPERAND (o
, 0), cp_fold_r
, data
, NULL
);
1073 cp_walk_tree (&TREE_OPERAND (o
, 1), cp_fold_r
, data
, NULL
);
1077 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt
), cp_fold_r
, data
, NULL
);
1082 if (IF_STMT_CONSTEVAL_P (stmt
))
1084 /* Don't walk THEN_CLAUSE (stmt) for consteval if. IF_COND is always
1085 boolean_false_node. */
1086 cp_walk_tree (&ELSE_CLAUSE (stmt
), cp_fold_r
, data
, NULL
);
1087 cp_walk_tree (&IF_SCOPE (stmt
), cp_fold_r
, data
, NULL
);
1093 /* cp_genericize_{init,target}_expr are only for genericize time; they're
1094 here rather than in cp_genericize to avoid problems with the invisible
1095 reference transition. */
1097 if (data
->genericize
)
1098 cp_genericize_init_expr (stmt_p
);
1102 if (data
->genericize
)
1103 cp_genericize_target_expr (stmt_p
);
1105 /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
1106 that case, use it in place of this one. */
1107 if (tree
&init
= TARGET_EXPR_INITIAL (stmt
))
1109 cp_walk_tree (&init
, cp_fold_r
, data
, NULL
);
1111 if (TREE_CODE (init
) == TARGET_EXPR
)
1113 TARGET_EXPR_ELIDING_P (init
) = TARGET_EXPR_ELIDING_P (stmt
);
1126 /* Fold ALL the trees! FIXME we should be able to remove this, but
1127 apparently that still causes optimization regressions. */
1130 cp_fold_function (tree fndecl
)
1132 cp_fold_data
data (/*genericize*/true);
1133 cp_walk_tree (&DECL_SAVED_TREE (fndecl
), cp_fold_r
, &data
, NULL
);
1136 /* Turn SPACESHIP_EXPR EXPR into GENERIC. */
1138 static tree
genericize_spaceship (tree expr
)
1140 iloc_sentinel
s (cp_expr_location (expr
));
1141 tree type
= TREE_TYPE (expr
);
1142 tree op0
= TREE_OPERAND (expr
, 0);
1143 tree op1
= TREE_OPERAND (expr
, 1);
1144 return genericize_spaceship (input_location
, type
, op0
, op1
);
1147 /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1148 to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1149 the middle-end (c++/88256). If EXPR is a DECL, use add_stmt and return
1150 NULL_TREE; otherwise return a COMPOUND_STMT of the DECL_EXPR and EXPR. */
1153 predeclare_vla (tree expr
)
1155 tree type
= TREE_TYPE (expr
);
1156 if (type
== error_mark_node
)
1158 if (is_typedef_decl (expr
))
1159 type
= DECL_ORIGINAL_TYPE (expr
);
1161 /* We need to strip pointers for gimplify_type_sizes. */
1163 while (POINTER_TYPE_P (vla
))
1165 if (TYPE_NAME (vla
))
1167 vla
= TREE_TYPE (vla
);
1169 if (vla
== type
|| TYPE_NAME (vla
)
1170 || !variably_modified_type_p (vla
, NULL_TREE
))
1173 tree decl
= build_decl (input_location
, TYPE_DECL
, NULL_TREE
, vla
);
1174 DECL_ARTIFICIAL (decl
) = 1;
1175 TYPE_NAME (vla
) = decl
;
1176 tree dexp
= build_stmt (input_location
, DECL_EXPR
, decl
);
1184 expr
= build2 (COMPOUND_EXPR
, type
, dexp
, expr
);
1189 /* Perform any pre-gimplification lowering of C++ front end trees to
1193 cp_genericize_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
1195 tree stmt
= *stmt_p
;
1196 struct cp_genericize_data
*wtd
= (struct cp_genericize_data
*) data
;
1197 hash_set
<tree
> *p_set
= wtd
->p_set
;
1199 /* If in an OpenMP context, note var uses. */
1200 if (UNLIKELY (wtd
->omp_ctx
!= NULL
)
1202 || TREE_CODE (stmt
) == PARM_DECL
1203 || TREE_CODE (stmt
) == RESULT_DECL
)
1204 && omp_var_to_track (stmt
))
1205 omp_cxx_notice_variable (wtd
->omp_ctx
, stmt
);
1207 /* Don't dereference parms in a thunk, pass the references through. */
1208 if ((TREE_CODE (stmt
) == CALL_EXPR
&& call_from_lambda_thunk_p (stmt
))
1209 || (TREE_CODE (stmt
) == AGGR_INIT_EXPR
&& AGGR_INIT_FROM_THUNK_P (stmt
)))
1215 /* Dereference invisible reference parms. */
1216 if (wtd
->handle_invisiref_parm_p
&& is_invisiref_parm (stmt
))
1218 *stmt_p
= convert_from_reference (stmt
);
1219 p_set
->add (*stmt_p
);
1224 /* Map block scope extern declarations to visible declarations with the
1225 same name and type in outer scopes if any. */
1226 if (VAR_OR_FUNCTION_DECL_P (stmt
) && DECL_LOCAL_DECL_P (stmt
))
1227 if (tree alias
= DECL_LOCAL_DECL_ALIAS (stmt
))
1229 if (alias
!= error_mark_node
)
1232 TREE_USED (alias
) |= TREE_USED (stmt
);
1238 if (TREE_CODE (stmt
) == INTEGER_CST
1239 && TYPE_REF_P (TREE_TYPE (stmt
))
1240 && (flag_sanitize
& (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1241 && !wtd
->no_sanitize_p
)
1243 ubsan_maybe_instrument_reference (stmt_p
);
1244 if (*stmt_p
!= stmt
)
1251 /* Other than invisiref parms, don't walk the same tree twice. */
1252 if (p_set
->contains (stmt
))
1258 switch (TREE_CODE (stmt
))
1261 if (is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
1263 /* If in an OpenMP context, note var uses. */
1264 if (UNLIKELY (wtd
->omp_ctx
!= NULL
)
1265 && omp_var_to_track (TREE_OPERAND (stmt
, 0)))
1266 omp_cxx_notice_variable (wtd
->omp_ctx
, TREE_OPERAND (stmt
, 0));
1267 *stmt_p
= fold_convert (TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
1273 if (TREE_OPERAND (stmt
, 0) && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
1274 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1279 switch (OMP_CLAUSE_CODE (stmt
))
1281 case OMP_CLAUSE_LASTPRIVATE
:
1282 /* Don't dereference an invisiref in OpenMP clauses. */
1283 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1286 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt
))
1287 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt
),
1288 cp_genericize_r
, data
, NULL
);
1291 case OMP_CLAUSE_PRIVATE
:
1292 /* Don't dereference an invisiref in OpenMP clauses. */
1293 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1295 else if (wtd
->omp_ctx
!= NULL
)
1297 /* Private clause doesn't cause any references to the
1298 var in outer contexts, avoid calling
1299 omp_cxx_notice_variable for it. */
1300 struct cp_genericize_omp_taskreg
*old
= wtd
->omp_ctx
;
1301 wtd
->omp_ctx
= NULL
;
1302 cp_walk_tree (&OMP_CLAUSE_DECL (stmt
), cp_genericize_r
,
1308 case OMP_CLAUSE_SHARED
:
1309 case OMP_CLAUSE_FIRSTPRIVATE
:
1310 case OMP_CLAUSE_COPYIN
:
1311 case OMP_CLAUSE_COPYPRIVATE
:
1312 case OMP_CLAUSE_INCLUSIVE
:
1313 case OMP_CLAUSE_EXCLUSIVE
:
1314 /* Don't dereference an invisiref in OpenMP clauses. */
1315 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1318 case OMP_CLAUSE_REDUCTION
:
1319 case OMP_CLAUSE_IN_REDUCTION
:
1320 case OMP_CLAUSE_TASK_REDUCTION
:
1321 /* Don't dereference an invisiref in reduction clause's
1322 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1323 still needs to be genericized. */
1324 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1327 if (OMP_CLAUSE_REDUCTION_INIT (stmt
))
1328 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt
),
1329 cp_genericize_r
, data
, NULL
);
1330 if (OMP_CLAUSE_REDUCTION_MERGE (stmt
))
1331 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt
),
1332 cp_genericize_r
, data
, NULL
);
1340 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1341 to lower this construct before scanning it, so we need to lower these
1342 before doing anything else. */
1344 *stmt_p
= build2_loc (EXPR_LOCATION (stmt
),
1345 CLEANUP_EH_ONLY (stmt
) ? TRY_CATCH_EXPR
1348 CLEANUP_BODY (stmt
),
1349 CLEANUP_EXPR (stmt
));
1353 genericize_if_stmt (stmt_p
);
1354 /* *stmt_p has changed, tail recurse to handle it again. */
1355 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
1357 /* COND_EXPR might have incompatible types in branches if one or both
1358 arms are bitfields. Fix it up now. */
1362 = (TREE_OPERAND (stmt
, 1)
1363 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 1))
1366 = (TREE_OPERAND (stmt
, 2)
1367 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 2))
1370 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1371 TREE_TYPE (TREE_OPERAND (stmt
, 1))))
1373 TREE_OPERAND (stmt
, 1)
1374 = fold_convert (type_left
, TREE_OPERAND (stmt
, 1));
1375 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1379 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1380 TREE_TYPE (TREE_OPERAND (stmt
, 2))))
1382 TREE_OPERAND (stmt
, 2)
1383 = fold_convert (type_right
, TREE_OPERAND (stmt
, 2));
1384 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1391 if (UNLIKELY (wtd
->omp_ctx
!= NULL
))
1394 for (decl
= BIND_EXPR_VARS (stmt
); decl
; decl
= DECL_CHAIN (decl
))
1396 && !DECL_EXTERNAL (decl
)
1397 && omp_var_to_track (decl
))
1400 = splay_tree_lookup (wtd
->omp_ctx
->variables
,
1401 (splay_tree_key
) decl
);
1403 splay_tree_insert (wtd
->omp_ctx
->variables
,
1404 (splay_tree_key
) decl
,
1406 ? OMP_CLAUSE_DEFAULT_SHARED
1407 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1410 if (sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
| SANITIZE_VPTR
))
1412 /* The point here is to not sanitize static initializers. */
1413 bool no_sanitize_p
= wtd
->no_sanitize_p
;
1414 wtd
->no_sanitize_p
= true;
1415 for (tree decl
= BIND_EXPR_VARS (stmt
);
1417 decl
= DECL_CHAIN (decl
))
1419 && TREE_STATIC (decl
)
1420 && DECL_INITIAL (decl
))
1421 cp_walk_tree (&DECL_INITIAL (decl
), cp_genericize_r
, data
, NULL
);
1422 wtd
->no_sanitize_p
= no_sanitize_p
;
1424 wtd
->bind_expr_stack
.safe_push (stmt
);
1425 cp_walk_tree (&BIND_EXPR_BODY (stmt
),
1426 cp_genericize_r
, data
, NULL
);
1427 wtd
->bind_expr_stack
.pop ();
1430 case ASSERTION_STMT
:
1431 case PRECONDITION_STMT
:
1432 case POSTCONDITION_STMT
:
1434 if (tree check
= build_contract_check (stmt
))
1437 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
1440 /* If we didn't build a check, replace it with void_node so we don't
1441 leak contracts into GENERIC. */
1442 *stmt_p
= void_node
;
1449 tree block
= NULL_TREE
;
1451 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1452 BLOCK, and append an IMPORTED_DECL to its
1453 BLOCK_VARS chained list. */
1454 if (wtd
->bind_expr_stack
.exists ())
1457 for (i
= wtd
->bind_expr_stack
.length () - 1; i
>= 0; i
--)
1458 if ((block
= BIND_EXPR_BLOCK (wtd
->bind_expr_stack
[i
])))
1463 tree decl
= TREE_OPERAND (stmt
, 0);
1466 if (undeduced_auto_decl (decl
))
1467 /* Omit from the GENERIC, the back-end can't handle it. */;
1470 tree using_directive
= make_node (IMPORTED_DECL
);
1471 TREE_TYPE (using_directive
) = void_type_node
;
1472 DECL_CONTEXT (using_directive
) = current_function_decl
;
1474 IMPORTED_DECL_ASSOCIATED_DECL (using_directive
) = decl
;
1475 DECL_CHAIN (using_directive
) = BLOCK_VARS (block
);
1476 BLOCK_VARS (block
) = using_directive
;
1479 /* The USING_STMT won't appear in GENERIC. */
1480 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1486 if (TREE_CODE (DECL_EXPR_DECL (stmt
)) == USING_DECL
)
1488 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1489 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1494 tree d
= DECL_EXPR_DECL (stmt
);
1496 gcc_assert (CP_DECL_THREAD_LOCAL_P (d
) == DECL_THREAD_LOCAL_P (d
));
1504 struct cp_genericize_omp_taskreg omp_ctx
;
1509 cp_walk_tree (&OMP_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
1510 omp_ctx
.is_parallel
= TREE_CODE (stmt
) == OMP_PARALLEL
;
1511 omp_ctx
.default_shared
= omp_ctx
.is_parallel
;
1512 omp_ctx
.outer
= wtd
->omp_ctx
;
1513 omp_ctx
.variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
1514 wtd
->omp_ctx
= &omp_ctx
;
1515 for (c
= OMP_CLAUSES (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
1516 switch (OMP_CLAUSE_CODE (c
))
1518 case OMP_CLAUSE_SHARED
:
1519 case OMP_CLAUSE_PRIVATE
:
1520 case OMP_CLAUSE_FIRSTPRIVATE
:
1521 case OMP_CLAUSE_LASTPRIVATE
:
1522 decl
= OMP_CLAUSE_DECL (c
);
1523 if (decl
== error_mark_node
|| !omp_var_to_track (decl
))
1525 n
= splay_tree_lookup (omp_ctx
.variables
, (splay_tree_key
) decl
);
1528 splay_tree_insert (omp_ctx
.variables
, (splay_tree_key
) decl
,
1529 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
1530 ? OMP_CLAUSE_DEFAULT_SHARED
1531 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1532 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
&& omp_ctx
.outer
)
1533 omp_cxx_notice_variable (omp_ctx
.outer
, decl
);
1535 case OMP_CLAUSE_DEFAULT
:
1536 if (OMP_CLAUSE_DEFAULT_KIND (c
) == OMP_CLAUSE_DEFAULT_SHARED
)
1537 omp_ctx
.default_shared
= true;
1541 if (TREE_CODE (stmt
) == OMP_TASKLOOP
)
1542 c_genericize_control_stmt (stmt_p
, walk_subtrees
, data
,
1543 cp_genericize_r
, cp_walk_subtrees
);
1545 cp_walk_tree (&OMP_BODY (stmt
), cp_genericize_r
, data
, NULL
);
1546 wtd
->omp_ctx
= omp_ctx
.outer
;
1547 splay_tree_delete (omp_ctx
.variables
);
1552 cfun
->has_omp_target
= true;
1558 tree try_block
= wtd
->try_block
;
1559 wtd
->try_block
= stmt
;
1560 cp_walk_tree (&TRY_STMTS (stmt
), cp_genericize_r
, data
, NULL
);
1561 wtd
->try_block
= try_block
;
1562 cp_walk_tree (&TRY_HANDLERS (stmt
), cp_genericize_r
, data
, NULL
);
1566 case MUST_NOT_THROW_EXPR
:
1567 /* MUST_NOT_THROW_COND might be something else with TM. */
1568 if (MUST_NOT_THROW_COND (stmt
) == NULL_TREE
)
1571 tree try_block
= wtd
->try_block
;
1572 wtd
->try_block
= stmt
;
1573 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_genericize_r
, data
, NULL
);
1574 wtd
->try_block
= try_block
;
1580 location_t loc
= location_of (stmt
);
1581 if (warning_suppressed_p (stmt
/* What warning? */))
1583 else if (wtd
->try_block
)
1585 if (TREE_CODE (wtd
->try_block
) == MUST_NOT_THROW_EXPR
)
1587 auto_diagnostic_group d
;
1588 if (warning_at (loc
, OPT_Wterminate
,
1589 "%<throw%> will always call %<terminate%>")
1590 && cxx_dialect
>= cxx11
1591 && DECL_DESTRUCTOR_P (current_function_decl
))
1592 inform (loc
, "in C++11 destructors default to %<noexcept%>");
1597 if (warn_cxx11_compat
&& cxx_dialect
< cxx11
1598 && DECL_DESTRUCTOR_P (current_function_decl
)
1599 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl
))
1601 && (get_defaulted_eh_spec (current_function_decl
)
1602 == empty_except_spec
))
1603 warning_at (loc
, OPT_Wc__11_compat
,
1604 "in C++11 this %<throw%> will call %<terminate%> "
1605 "because destructors default to %<noexcept%>");
1611 gcc_checking_assert (!AGGREGATE_TYPE_P (TREE_TYPE (stmt
)));
1612 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt
));
1615 case SPACESHIP_EXPR
:
1616 *stmt_p
= genericize_spaceship (*stmt_p
);
1620 /* By the time we get here we're handing off to the back end, so we don't
1621 need or want to preserve PTRMEM_CST anymore. */
1622 *stmt_p
= cplus_expand_constant (stmt
);
1627 /* For MEM_REF, make sure not to sanitize the second operand even
1628 if it has reference type. It is just an offset with a type
1629 holding other information. There is no other processing we
1630 need to do for INTEGER_CSTs, so just ignore the second argument
1632 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_genericize_r
, data
, NULL
);
1637 *stmt_p
= predeclare_vla (*stmt_p
);
1638 if (!wtd
->no_sanitize_p
1639 && sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
)
1640 && TYPE_REF_P (TREE_TYPE (stmt
)))
1641 ubsan_maybe_instrument_reference (stmt_p
);
1645 /* Evaluate function concept checks instead of treating them as
1646 normal functions. */
1647 if (concept_check_p (stmt
))
1649 *stmt_p
= evaluate_concept_check (stmt
);
1650 * walk_subtrees
= 0;
1654 if (!wtd
->no_sanitize_p
1655 && sanitize_flags_p ((SANITIZE_NULL
1656 | SANITIZE_ALIGNMENT
| SANITIZE_VPTR
)))
1658 tree fn
= CALL_EXPR_FN (stmt
);
1660 && !error_operand_p (fn
)
1661 && INDIRECT_TYPE_P (TREE_TYPE (fn
))
1662 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) == METHOD_TYPE
)
1665 = TREE_CODE (fn
) == ADDR_EXPR
1666 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
1667 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn
, 0));
1668 if (sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1669 ubsan_maybe_instrument_member_call (stmt
, is_ctor
);
1670 if (sanitize_flags_p (SANITIZE_VPTR
) && !is_ctor
)
1671 cp_ubsan_maybe_instrument_member_call (stmt
);
1673 else if (fn
== NULL_TREE
1674 && CALL_EXPR_IFN (stmt
) == IFN_UBSAN_NULL
1675 && TREE_CODE (CALL_EXPR_ARG (stmt
, 0)) == INTEGER_CST
1676 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt
, 0))))
1680 case AGGR_INIT_EXPR
:
1681 /* For calls to a multi-versioned function, overload resolution
1682 returns the function with the highest target priority, that is,
1683 the version that will checked for dispatching first. If this
1684 version is inlinable, a direct call to this version can be made
1685 otherwise the call should go through the dispatcher. */
1687 tree fn
= cp_get_callee_fndecl_nofold (stmt
);
1688 if (fn
&& DECL_FUNCTION_VERSIONED (fn
)
1689 && (current_function_decl
== NULL
1690 || !targetm
.target_option
.can_inline_p (current_function_decl
,
1692 if (tree dis
= get_function_version_dispatcher (fn
))
1694 mark_versions_used (dis
);
1695 dis
= build_address (dis
);
1696 if (TREE_CODE (stmt
) == CALL_EXPR
)
1697 CALL_EXPR_FN (stmt
) = dis
;
1699 AGGR_INIT_EXPR_FN (stmt
) = dis
;
1705 if (TARGET_EXPR_INITIAL (stmt
)
1706 && TREE_CODE (TARGET_EXPR_INITIAL (stmt
)) == CONSTRUCTOR
1707 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt
)))
1708 TARGET_EXPR_NO_ELIDE (stmt
) = 1;
1711 case TEMPLATE_ID_EXPR
:
1712 gcc_assert (concept_check_p (stmt
));
1713 /* Emit the value of the concept check. */
1714 *stmt_p
= evaluate_concept_check (stmt
);
1718 case OMP_DISTRIBUTE
:
1719 /* Need to explicitly instantiate copy ctors on class iterators of
1720 composite distribute parallel for. */
1721 if (OMP_FOR_INIT (*stmt_p
) == NULL_TREE
)
1723 tree
*data
[4] = { NULL
, NULL
, NULL
, NULL
};
1724 tree inner
= walk_tree (&OMP_FOR_BODY (*stmt_p
),
1725 find_combined_omp_for
, data
, NULL
);
1726 if (inner
!= NULL_TREE
1727 && TREE_CODE (inner
) == OMP_FOR
)
1729 for (int i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner
)); i
++)
1730 if (OMP_FOR_ORIG_DECLS (inner
)
1731 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner
),
1733 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner
),
1736 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner
), i
);
1737 /* Class iterators aren't allowed on OMP_SIMD, so the only
1738 case we need to solve is distribute parallel for. */
1739 gcc_assert (TREE_CODE (inner
) == OMP_FOR
1741 tree orig_decl
= TREE_PURPOSE (orig
);
1742 tree c
, cl
= NULL_TREE
;
1743 for (c
= OMP_FOR_CLAUSES (inner
);
1744 c
; c
= OMP_CLAUSE_CHAIN (c
))
1745 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1746 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
1747 && OMP_CLAUSE_DECL (c
) == orig_decl
)
1752 if (cl
== NULL_TREE
)
1754 for (c
= OMP_PARALLEL_CLAUSES (*data
[1]);
1755 c
; c
= OMP_CLAUSE_CHAIN (c
))
1756 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1757 && OMP_CLAUSE_DECL (c
) == orig_decl
)
1765 orig_decl
= require_complete_type (orig_decl
);
1766 tree inner_type
= TREE_TYPE (orig_decl
);
1767 if (orig_decl
== error_mark_node
)
1769 if (TYPE_REF_P (TREE_TYPE (orig_decl
)))
1770 inner_type
= TREE_TYPE (inner_type
);
1772 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
1773 inner_type
= TREE_TYPE (inner_type
);
1774 get_copy_ctor (inner_type
, tf_warning_or_error
);
1791 case STATEMENT_LIST
:
1792 /* These cases are handled by shared code. */
1793 c_genericize_control_stmt (stmt_p
, walk_subtrees
, data
,
1794 cp_genericize_r
, cp_walk_subtrees
);
1798 *stmt_p
= build1_loc (EXPR_LOCATION (stmt
), VIEW_CONVERT_EXPR
,
1799 TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
1803 if (IS_TYPE_OR_DECL_P (stmt
))
1808 p_set
->add (*stmt_p
);
1813 /* Lower C++ front end trees to GENERIC in T_P. */
1816 cp_genericize_tree (tree
* t_p
, bool handle_invisiref_parm_p
)
1818 struct cp_genericize_data wtd
;
1820 wtd
.p_set
= new hash_set
<tree
>;
1821 wtd
.bind_expr_stack
.create (0);
1823 wtd
.try_block
= NULL_TREE
;
1824 wtd
.no_sanitize_p
= false;
1825 wtd
.handle_invisiref_parm_p
= handle_invisiref_parm_p
;
1826 cp_walk_tree (t_p
, cp_genericize_r
, &wtd
, NULL
);
1828 if (sanitize_flags_p (SANITIZE_VPTR
))
1829 cp_ubsan_instrument_member_accesses (t_p
);
1832 /* If a function that should end with a return in non-void
1833 function doesn't obviously end with return, add ubsan
1834 instrumentation code to verify it at runtime. If -fsanitize=return
1835 is not enabled, instrument __builtin_unreachable. */
1838 cp_maybe_instrument_return (tree fndecl
)
1840 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
1841 || DECL_CONSTRUCTOR_P (fndecl
)
1842 || DECL_DESTRUCTOR_P (fndecl
)
1843 || !targetm
.warn_func_return (fndecl
))
1846 if (!sanitize_flags_p (SANITIZE_RETURN
, fndecl
)
1847 /* Don't add __builtin_unreachable () if not optimizing, it will not
1848 improve any optimizations in that case, just break UB code.
1849 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1850 UBSan covers this with ubsan_instrument_return above where sufficient
1851 information is provided, while the __builtin_unreachable () below
1852 if return sanitization is disabled will just result in hard to
1853 understand runtime error without location. */
1854 && ((!optimize
&& !flag_unreachable_traps
)
1855 || sanitize_flags_p (SANITIZE_UNREACHABLE
, fndecl
)))
1858 tree t
= DECL_SAVED_TREE (fndecl
);
1861 switch (TREE_CODE (t
))
1864 t
= BIND_EXPR_BODY (t
);
1866 case TRY_FINALLY_EXPR
:
1867 case CLEANUP_POINT_EXPR
:
1868 t
= TREE_OPERAND (t
, 0);
1870 case STATEMENT_LIST
:
1872 tree_stmt_iterator i
= tsi_last (t
);
1873 while (!tsi_end_p (i
))
1875 tree p
= tsi_stmt (i
);
1876 if (TREE_CODE (p
) != DEBUG_BEGIN_STMT
)
1896 tree
*p
= &DECL_SAVED_TREE (fndecl
);
1897 if (TREE_CODE (*p
) == BIND_EXPR
)
1898 p
= &BIND_EXPR_BODY (*p
);
1900 location_t loc
= DECL_SOURCE_LOCATION (fndecl
);
1901 if (sanitize_flags_p (SANITIZE_RETURN
, fndecl
))
1902 t
= ubsan_instrument_return (loc
);
1904 t
= build_builtin_unreachable (BUILTINS_LOCATION
);
1906 append_to_statement_list (t
, p
);
1910 cp_genericize (tree fndecl
)
1914 /* Fix up the types of parms passed by invisible reference. */
1915 for (t
= DECL_ARGUMENTS (fndecl
); t
; t
= DECL_CHAIN (t
))
1916 if (TREE_ADDRESSABLE (TREE_TYPE (t
)))
1918 /* If a function's arguments are copied to create a thunk,
1919 then DECL_BY_REFERENCE will be set -- but the type of the
1920 argument will be a pointer type, so we will never get
1922 gcc_assert (!DECL_BY_REFERENCE (t
));
1923 gcc_assert (DECL_ARG_TYPE (t
) != TREE_TYPE (t
));
1924 TREE_TYPE (t
) = DECL_ARG_TYPE (t
);
1925 DECL_BY_REFERENCE (t
) = 1;
1926 TREE_ADDRESSABLE (t
) = 0;
1930 /* Do the same for the return value. */
1931 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl
))))
1933 t
= DECL_RESULT (fndecl
);
1934 TREE_TYPE (t
) = build_reference_type (TREE_TYPE (t
));
1935 DECL_BY_REFERENCE (t
) = 1;
1936 TREE_ADDRESSABLE (t
) = 0;
1940 /* Adjust DECL_VALUE_EXPR of the original var. */
1941 tree outer
= outer_curly_brace_block (current_function_decl
);
1945 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1947 && DECL_NAME (t
) == DECL_NAME (var
)
1948 && DECL_HAS_VALUE_EXPR_P (var
)
1949 && DECL_VALUE_EXPR (var
) == t
)
1951 tree val
= convert_from_reference (t
);
1952 SET_DECL_VALUE_EXPR (var
, val
);
1958 /* If we're a clone, the body is already GIMPLE. */
1959 if (DECL_CLONED_FUNCTION_P (fndecl
))
1962 /* Allow cp_genericize calls to be nested. */
1963 bc_state_t save_state
;
1964 save_bc_state (&save_state
);
1966 /* We do want to see every occurrence of the parms, so we can't just use
1967 walk_tree's hash functionality. */
1968 cp_genericize_tree (&DECL_SAVED_TREE (fndecl
), true);
1970 cp_maybe_instrument_return (fndecl
);
1972 /* Do everything else. */
1973 c_genericize (fndecl
);
1974 restore_bc_state (&save_state
);
1977 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1978 NULL if there is in fact nothing to do. ARG2 may be null if FN
1979 actually only takes one argument. */
1982 cxx_omp_clause_apply_fn (tree fn
, tree arg1
, tree arg2
)
1984 tree defparm
, parm
, t
;
1992 nargs
= list_length (DECL_ARGUMENTS (fn
));
1993 argarray
= XALLOCAVEC (tree
, nargs
);
1995 defparm
= TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn
)));
1997 defparm
= TREE_CHAIN (defparm
);
1999 bool is_method
= TREE_CODE (TREE_TYPE (fn
)) == METHOD_TYPE
;
2000 if (TREE_CODE (TREE_TYPE (arg1
)) == ARRAY_TYPE
)
2002 tree inner_type
= TREE_TYPE (arg1
);
2003 tree start1
, end1
, p1
;
2004 tree start2
= NULL
, p2
= NULL
;
2005 tree ret
= NULL
, lab
;
2011 inner_type
= TREE_TYPE (inner_type
);
2012 start1
= build4 (ARRAY_REF
, inner_type
, start1
,
2013 size_zero_node
, NULL
, NULL
);
2015 start2
= build4 (ARRAY_REF
, inner_type
, start2
,
2016 size_zero_node
, NULL
, NULL
);
2018 while (TREE_CODE (inner_type
) == ARRAY_TYPE
);
2019 start1
= build_fold_addr_expr_loc (input_location
, start1
);
2021 start2
= build_fold_addr_expr_loc (input_location
, start2
);
2023 end1
= TYPE_SIZE_UNIT (TREE_TYPE (arg1
));
2024 end1
= fold_build_pointer_plus (start1
, end1
);
2026 p1
= create_tmp_var (TREE_TYPE (start1
));
2027 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, start1
);
2028 append_to_statement_list (t
, &ret
);
2032 p2
= create_tmp_var (TREE_TYPE (start2
));
2033 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, start2
);
2034 append_to_statement_list (t
, &ret
);
2037 lab
= create_artificial_label (input_location
);
2038 t
= build1 (LABEL_EXPR
, void_type_node
, lab
);
2039 append_to_statement_list (t
, &ret
);
2044 /* Handle default arguments. */
2045 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
2046 parm
= TREE_CHAIN (parm
), i
++)
2047 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
2048 TREE_PURPOSE (parm
), fn
,
2049 i
- is_method
, tf_warning_or_error
);
2050 t
= build_call_a (fn
, i
, argarray
);
2051 t
= fold_convert (void_type_node
, t
);
2052 t
= fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
2053 append_to_statement_list (t
, &ret
);
2055 t
= fold_build_pointer_plus (p1
, TYPE_SIZE_UNIT (inner_type
));
2056 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, t
);
2057 append_to_statement_list (t
, &ret
);
2061 t
= fold_build_pointer_plus (p2
, TYPE_SIZE_UNIT (inner_type
));
2062 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, t
);
2063 append_to_statement_list (t
, &ret
);
2066 t
= build2 (NE_EXPR
, boolean_type_node
, p1
, end1
);
2067 t
= build3 (COND_EXPR
, void_type_node
, t
, build_and_jump (&lab
), NULL
);
2068 append_to_statement_list (t
, &ret
);
2074 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg1
);
2076 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg2
);
2077 /* Handle default arguments. */
2078 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
2079 parm
= TREE_CHAIN (parm
), i
++)
2080 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
2081 TREE_PURPOSE (parm
), fn
,
2082 i
- is_method
, tf_warning_or_error
);
2083 t
= build_call_a (fn
, i
, argarray
);
2084 t
= fold_convert (void_type_node
, t
);
2085 return fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
2089 /* Return code to initialize DECL with its default constructor, or
2090 NULL if there's nothing to do. */
2093 cxx_omp_clause_default_ctor (tree clause
, tree decl
, tree
/*outer*/)
2095 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2099 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), decl
, NULL
);
2104 /* Return code to initialize DST with a copy constructor from SRC. */
2107 cxx_omp_clause_copy_ctor (tree clause
, tree dst
, tree src
)
2109 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2113 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), dst
, src
);
2115 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
2120 /* Similarly, except use an assignment operator instead. */
2123 cxx_omp_clause_assign_op (tree clause
, tree dst
, tree src
)
2125 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2129 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 2), dst
, src
);
2131 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
2136 /* Return code to destroy DECL. */
2139 cxx_omp_clause_dtor (tree clause
, tree decl
)
2141 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2145 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 1), decl
, NULL
);
2150 /* True if OpenMP should privatize what this DECL points to rather
2151 than the DECL itself. */
2154 cxx_omp_privatize_by_reference (const_tree decl
)
2156 return (TYPE_REF_P (TREE_TYPE (decl
))
2157 || is_invisiref_parm (decl
));
2160 /* Return true if DECL is const qualified var having no mutable member. */
2162 cxx_omp_const_qual_no_mutable (tree decl
)
2164 tree type
= TREE_TYPE (decl
);
2165 if (TYPE_REF_P (type
))
2167 if (!is_invisiref_parm (decl
))
2169 type
= TREE_TYPE (type
);
2171 if (TREE_CODE (decl
) == RESULT_DECL
&& DECL_NAME (decl
))
2173 /* NVR doesn't preserve const qualification of the
2175 tree outer
= outer_curly_brace_block (current_function_decl
);
2179 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
2181 && DECL_NAME (decl
) == DECL_NAME (var
)
2182 && (TYPE_MAIN_VARIANT (type
)
2183 == TYPE_MAIN_VARIANT (TREE_TYPE (var
))))
2185 if (TYPE_READONLY (TREE_TYPE (var
)))
2186 type
= TREE_TYPE (var
);
2192 if (type
== error_mark_node
)
2195 /* Variables with const-qualified type having no mutable member
2196 are predetermined shared. */
2197 if (TYPE_READONLY (type
) && !cp_has_mutable_p (type
))
2203 /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
2204 of DECL is predetermined. */
2206 enum omp_clause_default_kind
2207 cxx_omp_predetermined_sharing_1 (tree decl
)
2209 /* Static data members are predetermined shared. */
2210 if (TREE_STATIC (decl
))
2212 tree ctx
= CP_DECL_CONTEXT (decl
);
2213 if (TYPE_P (ctx
) && MAYBE_CLASS_TYPE_P (ctx
))
2214 return OMP_CLAUSE_DEFAULT_SHARED
;
2216 if (c_omp_predefined_variable (decl
))
2217 return OMP_CLAUSE_DEFAULT_SHARED
;
2220 /* this may not be specified in data-sharing clauses, still we need
2221 to predetermined it firstprivate. */
2222 if (decl
== current_class_ptr
)
2223 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
2225 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
2228 /* Likewise, but also include the artificial vars. We don't want to
2229 disallow the artificial vars being mentioned in explicit clauses,
2230 as we use artificial vars e.g. for loop constructs with random
2231 access iterators other than pointers, but during gimplification
2232 we want to treat them as predetermined. */
2234 enum omp_clause_default_kind
2235 cxx_omp_predetermined_sharing (tree decl
)
2237 enum omp_clause_default_kind ret
= cxx_omp_predetermined_sharing_1 (decl
);
2238 if (ret
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
2241 /* Predetermine artificial variables holding integral values, those
2242 are usually result of gimplify_one_sizepos or SAVE_EXPR
2245 && DECL_ARTIFICIAL (decl
)
2246 && INTEGRAL_TYPE_P (TREE_TYPE (decl
))
2247 && !(DECL_LANG_SPECIFIC (decl
)
2248 && DECL_OMP_PRIVATIZED_MEMBER (decl
)))
2249 return OMP_CLAUSE_DEFAULT_SHARED
;
2251 /* Similarly for typeinfo symbols. */
2252 if (VAR_P (decl
) && DECL_ARTIFICIAL (decl
) && DECL_TINFO_P (decl
))
2253 return OMP_CLAUSE_DEFAULT_SHARED
;
2255 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
2258 enum omp_clause_defaultmap_kind
2259 cxx_omp_predetermined_mapping (tree decl
)
2261 /* Predetermine artificial variables holding integral values, those
2262 are usually result of gimplify_one_sizepos or SAVE_EXPR
2265 && DECL_ARTIFICIAL (decl
)
2266 && INTEGRAL_TYPE_P (TREE_TYPE (decl
))
2267 && !(DECL_LANG_SPECIFIC (decl
)
2268 && DECL_OMP_PRIVATIZED_MEMBER (decl
)))
2269 return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
;
2271 if (c_omp_predefined_variable (decl
))
2272 return OMP_CLAUSE_DEFAULTMAP_TO
;
2274 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
;
2277 /* Finalize an implicitly determined clause. */
2280 cxx_omp_finish_clause (tree c
, gimple_seq
*, bool /* openacc */)
2282 tree decl
, inner_type
;
2283 bool make_shared
= false;
2285 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
2286 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
2287 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LASTPRIVATE
2288 || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
)))
2291 decl
= OMP_CLAUSE_DECL (c
);
2292 decl
= require_complete_type (decl
);
2293 inner_type
= TREE_TYPE (decl
);
2294 if (decl
== error_mark_node
)
2296 else if (TYPE_REF_P (TREE_TYPE (decl
)))
2297 inner_type
= TREE_TYPE (inner_type
);
2299 /* We're interested in the base element, not arrays. */
2300 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
2301 inner_type
= TREE_TYPE (inner_type
);
2303 /* Check for special function availability by building a call to one.
2304 Save the results, because later we won't be in the right context
2305 for making these queries. */
2306 bool first
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
;
2307 bool last
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
;
2309 && CLASS_TYPE_P (inner_type
)
2310 && cxx_omp_create_clause_info (c
, inner_type
, !first
, first
, last
,
2316 OMP_CLAUSE_CODE (c
) = OMP_CLAUSE_SHARED
;
2317 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
) = 0;
2318 OMP_CLAUSE_SHARED_READONLY (c
) = 0;
2322 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2323 disregarded in OpenMP construct, because it is going to be
2324 remapped during OpenMP lowering. SHARED is true if DECL
2325 is going to be shared, false if it is going to be privatized. */
2328 cxx_omp_disregard_value_expr (tree decl
, bool shared
)
2333 && DECL_HAS_VALUE_EXPR_P (decl
)
2334 && DECL_ARTIFICIAL (decl
)
2335 && DECL_LANG_SPECIFIC (decl
)
2336 && DECL_OMP_PRIVATIZED_MEMBER (decl
))
2338 if (VAR_P (decl
) && DECL_CONTEXT (decl
) && is_capture_proxy (decl
))
2343 /* Fold expression X which is used as an rvalue if RVAL is true. */
2346 cp_fold_maybe_rvalue (tree x
, bool rval
)
2352 x
= mark_rvalue_use (x
);
2353 if (rval
&& DECL_P (x
)
2354 && !TYPE_REF_P (TREE_TYPE (x
)))
2356 tree v
= decl_constant_value (x
);
2357 if (v
!= x
&& v
!= error_mark_node
)
2368 /* Fold expression X which is used as an rvalue. */
2371 cp_fold_rvalue (tree x
)
2373 return cp_fold_maybe_rvalue (x
, true);
2376 /* Perform folding on expression X. */
2379 cp_fully_fold (tree x
)
2381 if (processing_template_decl
)
2383 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2384 have to call both. */
2385 if (cxx_dialect
>= cxx11
)
2387 x
= maybe_constant_value (x
);
2388 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2389 a TARGET_EXPR; undo that here. */
2390 if (TREE_CODE (x
) == TARGET_EXPR
)
2391 x
= TARGET_EXPR_INITIAL (x
);
2392 else if (TREE_CODE (x
) == VIEW_CONVERT_EXPR
2393 && TREE_CODE (TREE_OPERAND (x
, 0)) == CONSTRUCTOR
2394 && TREE_TYPE (TREE_OPERAND (x
, 0)) == TREE_TYPE (x
))
2395 x
= TREE_OPERAND (x
, 0);
2397 return cp_fold_rvalue (x
);
2400 /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2404 cp_fully_fold_init (tree x
)
2406 if (processing_template_decl
)
2408 x
= cp_fully_fold (x
);
2409 cp_fold_data
data (/*genericize*/false);
2410 cp_walk_tree (&x
, cp_fold_r
, &data
, NULL
);
2414 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2415 and certain changes are made to the folding done. Or should be (FIXME). We
2416 never touch maybe_const, as it is only used for the C front-end
2417 C_MAYBE_CONST_EXPR. */
2420 c_fully_fold (tree x
, bool /*in_init*/, bool */
*maybe_const*/
, bool lval
)
2422 return cp_fold_maybe_rvalue (x
, !lval
);
2425 static GTY((deletable
)) hash_map
<tree
, tree
> *fold_cache
;
2427 /* Dispose of the whole FOLD_CACHE. */
2430 clear_fold_cache (void)
2432 if (fold_cache
!= NULL
)
2433 fold_cache
->empty ();
2436 /* This function tries to fold an expression X.
2437 To avoid combinatorial explosion, folding results are kept in fold_cache.
2438 If X is invalid, we don't fold at all.
2439 For performance reasons we don't cache expressions representing a
2440 declaration or constant.
2441 Function returns X or its folded variant. */
2446 tree op0
, op1
, op2
, op3
;
2447 tree org_x
= x
, r
= NULL_TREE
;
2448 enum tree_code code
;
2450 bool rval_ops
= true;
2452 if (!x
|| x
== error_mark_node
)
2455 if (EXPR_P (x
) && (!TREE_TYPE (x
) || TREE_TYPE (x
) == error_mark_node
))
2458 /* Don't bother to cache DECLs or constants. */
2459 if (DECL_P (x
) || CONSTANT_CLASS_P (x
))
2462 if (fold_cache
== NULL
)
2463 fold_cache
= hash_map
<tree
, tree
>::create_ggc (101);
2465 if (tree
*cached
= fold_cache
->get (x
))
2468 uid_sensitive_constexpr_evaluation_checker c
;
2470 code
= TREE_CODE (x
);
2473 case CLEANUP_POINT_EXPR
:
2474 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2476 r
= cp_fold_rvalue (TREE_OPERAND (x
, 0));
2477 if (!TREE_SIDE_EFFECTS (r
))
2482 x
= fold_sizeof_expr (x
);
2485 case VIEW_CONVERT_EXPR
:
2488 case NON_LVALUE_EXPR
:
2491 if (VOID_TYPE_P (TREE_TYPE (x
)))
2493 /* This is just to make sure we don't end up with casts to
2494 void from error_mark_node. If we just return x, then
2495 cp_fold_r might fold the operand into error_mark_node and
2496 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2497 during gimplification doesn't like such casts.
2498 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2499 folding of the operand should be in the caches and if in cp_fold_r
2500 it will modify it in place. */
2501 op0
= cp_fold (TREE_OPERAND (x
, 0));
2502 if (op0
== error_mark_node
)
2503 x
= error_mark_node
;
2507 loc
= EXPR_LOCATION (x
);
2508 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
2510 if (code
== CONVERT_EXPR
2511 && SCALAR_TYPE_P (TREE_TYPE (x
))
2512 && op0
!= void_node
)
2513 /* During parsing we used convert_to_*_nofold; re-convert now using the
2514 folding variants, since fold() doesn't do those transformations. */
2515 x
= fold (convert (TREE_TYPE (x
), op0
));
2516 else if (op0
!= TREE_OPERAND (x
, 0))
2518 if (op0
== error_mark_node
)
2519 x
= error_mark_node
;
2521 x
= fold_build1_loc (loc
, code
, TREE_TYPE (x
), op0
);
2526 /* Conversion of an out-of-range value has implementation-defined
2527 behavior; the language considers it different from arithmetic
2528 overflow, which is undefined. */
2529 if (TREE_CODE (op0
) == INTEGER_CST
2530 && TREE_OVERFLOW_P (x
) && !TREE_OVERFLOW_P (op0
))
2531 TREE_OVERFLOW (x
) = false;
2535 case EXCESS_PRECISION_EXPR
:
2536 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
2537 x
= fold_convert_loc (EXPR_LOCATION (x
), TREE_TYPE (x
), op0
);
2541 /* We don't need the decltype(auto) obfuscation anymore. */
2542 if (REF_PARENTHESIZED_P (x
))
2544 tree p
= maybe_undo_parenthesized_ref (x
);
2551 loc
= EXPR_LOCATION (x
);
2552 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), false);
2554 /* Cope with user tricks that amount to offsetof. */
2555 if (op0
!= error_mark_node
2556 && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0
)))
2558 tree val
= get_base_address (op0
);
2560 && INDIRECT_REF_P (val
)
2561 && COMPLETE_TYPE_P (TREE_TYPE (val
))
2562 && TREE_CONSTANT (TREE_OPERAND (val
, 0)))
2564 val
= TREE_OPERAND (val
, 0);
2566 val
= maybe_constant_value (val
);
2567 if (TREE_CODE (val
) == INTEGER_CST
)
2568 return fold_offsetof (op0
, TREE_TYPE (x
));
2578 case FIX_TRUNC_EXPR
:
2584 case TRUTH_NOT_EXPR
:
2585 case FIXED_CONVERT_EXPR
:
2588 loc
= EXPR_LOCATION (x
);
2589 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
2592 if (op0
!= TREE_OPERAND (x
, 0))
2594 if (op0
== error_mark_node
)
2595 x
= error_mark_node
;
2598 x
= fold_build1_loc (loc
, code
, TREE_TYPE (x
), op0
);
2599 if (code
== INDIRECT_REF
2600 && (INDIRECT_REF_P (x
) || TREE_CODE (x
) == MEM_REF
))
2602 TREE_READONLY (x
) = TREE_READONLY (org_x
);
2603 TREE_SIDE_EFFECTS (x
) = TREE_SIDE_EFFECTS (org_x
);
2604 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
2611 gcc_assert (TREE_CODE (x
) != COND_EXPR
2612 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x
, 0))));
2615 case UNARY_PLUS_EXPR
:
2616 op0
= cp_fold_rvalue (TREE_OPERAND (x
, 0));
2617 if (op0
== error_mark_node
)
2618 x
= error_mark_node
;
2620 x
= fold_convert (TREE_TYPE (x
), op0
);
2623 case POSTDECREMENT_EXPR
:
2624 case POSTINCREMENT_EXPR
:
2626 case PREDECREMENT_EXPR
:
2627 case PREINCREMENT_EXPR
:
2632 case POINTER_PLUS_EXPR
:
2634 case POINTER_DIFF_EXPR
:
2637 case TRUNC_DIV_EXPR
:
2639 case FLOOR_DIV_EXPR
:
2640 case ROUND_DIV_EXPR
:
2641 case TRUNC_MOD_EXPR
:
2643 case ROUND_MOD_EXPR
:
2645 case EXACT_DIV_EXPR
:
2655 case TRUTH_AND_EXPR
:
2656 case TRUTH_ANDIF_EXPR
:
2658 case TRUTH_ORIF_EXPR
:
2659 case TRUTH_XOR_EXPR
:
2660 case LT_EXPR
: case LE_EXPR
:
2661 case GT_EXPR
: case GE_EXPR
:
2662 case EQ_EXPR
: case NE_EXPR
:
2663 case UNORDERED_EXPR
: case ORDERED_EXPR
:
2664 case UNLT_EXPR
: case UNLE_EXPR
:
2665 case UNGT_EXPR
: case UNGE_EXPR
:
2666 case UNEQ_EXPR
: case LTGT_EXPR
:
2667 case RANGE_EXPR
: case COMPLEX_EXPR
:
2669 loc
= EXPR_LOCATION (x
);
2670 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
2671 op1
= cp_fold_rvalue (TREE_OPERAND (x
, 1));
2673 /* decltype(nullptr) has only one value, so optimize away all comparisons
2674 with that type right away, keeping them in the IL causes troubles for
2675 various optimizations. */
2676 if (COMPARISON_CLASS_P (org_x
)
2677 && TREE_CODE (TREE_TYPE (op0
)) == NULLPTR_TYPE
2678 && TREE_CODE (TREE_TYPE (op1
)) == NULLPTR_TYPE
)
2683 x
= constant_boolean_node (true, TREE_TYPE (x
));
2686 x
= constant_boolean_node (false, TREE_TYPE (x
));
2691 return omit_two_operands_loc (loc
, TREE_TYPE (x
), x
,
2695 if (op0
!= TREE_OPERAND (x
, 0) || op1
!= TREE_OPERAND (x
, 1))
2697 if (op0
== error_mark_node
|| op1
== error_mark_node
)
2698 x
= error_mark_node
;
2700 x
= fold_build2_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
);
2705 /* This is only needed for -Wnonnull-compare and only if
2706 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2707 generation, we do it always. */
2708 if (COMPARISON_CLASS_P (org_x
))
2710 if (x
== error_mark_node
|| TREE_CODE (x
) == INTEGER_CST
)
2712 else if (COMPARISON_CLASS_P (x
))
2714 if (warn_nonnull_compare
2715 && warning_suppressed_p (org_x
, OPT_Wnonnull_compare
))
2716 suppress_warning (x
, OPT_Wnonnull_compare
);
2718 /* Otherwise give up on optimizing these, let GIMPLE folders
2719 optimize those later on. */
2720 else if (op0
!= TREE_OPERAND (org_x
, 0)
2721 || op1
!= TREE_OPERAND (org_x
, 1))
2723 x
= build2_loc (loc
, code
, TREE_TYPE (org_x
), op0
, op1
);
2724 if (warn_nonnull_compare
2725 && warning_suppressed_p (org_x
, OPT_Wnonnull_compare
))
2726 suppress_warning (x
, OPT_Wnonnull_compare
);
2736 loc
= EXPR_LOCATION (x
);
2737 op0
= cp_fold_rvalue (TREE_OPERAND (x
, 0));
2738 op1
= cp_fold (TREE_OPERAND (x
, 1));
2739 op2
= cp_fold (TREE_OPERAND (x
, 2));
2741 if (TREE_CODE (TREE_TYPE (x
)) == BOOLEAN_TYPE
)
2743 warning_sentinel
s (warn_int_in_bool_context
);
2744 if (!VOID_TYPE_P (TREE_TYPE (op1
)))
2745 op1
= cp_truthvalue_conversion (op1
, tf_warning_or_error
);
2746 if (!VOID_TYPE_P (TREE_TYPE (op2
)))
2747 op2
= cp_truthvalue_conversion (op2
, tf_warning_or_error
);
2749 else if (VOID_TYPE_P (TREE_TYPE (x
)))
2751 if (TREE_CODE (op0
) == INTEGER_CST
)
2753 /* If the condition is constant, fold can fold away
2754 the COND_EXPR. If some statement-level uses of COND_EXPR
2755 have one of the branches NULL, avoid folding crash. */
2757 op1
= build_empty_stmt (loc
);
2759 op2
= build_empty_stmt (loc
);
2763 /* Otherwise, don't bother folding a void condition, since
2764 it can't produce a constant value. */
2765 if (op0
!= TREE_OPERAND (x
, 0)
2766 || op1
!= TREE_OPERAND (x
, 1)
2767 || op2
!= TREE_OPERAND (x
, 2))
2768 x
= build3_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
);
2773 if (op0
!= TREE_OPERAND (x
, 0)
2774 || op1
!= TREE_OPERAND (x
, 1)
2775 || op2
!= TREE_OPERAND (x
, 2))
2777 if (op0
== error_mark_node
2778 || op1
== error_mark_node
2779 || op2
== error_mark_node
)
2780 x
= error_mark_node
;
2782 x
= fold_build3_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
);
2787 /* A COND_EXPR might have incompatible types in branches if one or both
2788 arms are bitfields. If folding exposed such a branch, fix it up. */
2789 if (TREE_CODE (x
) != code
2790 && x
!= error_mark_node
2791 && !useless_type_conversion_p (TREE_TYPE (org_x
), TREE_TYPE (x
)))
2792 x
= fold_convert (TREE_TYPE (org_x
), x
);
2798 tree callee
= get_callee_fndecl (x
);
2800 /* "Inline" calls to std::move/forward and other cast-like functions
2801 by simply folding them into a corresponding cast to their return
2802 type. This is cheaper than relying on the middle end to do so, and
2803 also means we avoid generating useless debug info for them at all.
2805 At this point the argument has already been converted into a
2806 reference, so it suffices to use a NOP_EXPR to express the
2808 if ((OPTION_SET_P (flag_fold_simple_inlines
)
2809 ? flag_fold_simple_inlines
2811 && call_expr_nargs (x
) == 1
2812 && decl_in_std_namespace_p (callee
)
2813 && DECL_NAME (callee
) != NULL_TREE
2814 && (id_equal (DECL_NAME (callee
), "move")
2815 || id_equal (DECL_NAME (callee
), "forward")
2816 || id_equal (DECL_NAME (callee
), "addressof")
2817 /* This addressof equivalent is used heavily in libstdc++. */
2818 || id_equal (DECL_NAME (callee
), "__addressof")
2819 || id_equal (DECL_NAME (callee
), "as_const")))
2821 r
= CALL_EXPR_ARG (x
, 0);
2822 /* Check that the return and argument types are sane before
2824 if (INDIRECT_TYPE_P (TREE_TYPE (x
))
2825 && INDIRECT_TYPE_P (TREE_TYPE (r
)))
2827 if (!same_type_p (TREE_TYPE (x
), TREE_TYPE (r
)))
2828 r
= build_nop (TREE_TYPE (x
), r
);
2834 int sv
= optimize
, nw
= sv
;
2836 /* Some built-in function calls will be evaluated at compile-time in
2837 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2838 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2839 if (callee
&& fndecl_built_in_p (callee
) && !optimize
2840 && DECL_IS_BUILTIN_CONSTANT_P (callee
)
2841 && current_function_decl
2842 && DECL_DECLARED_CONSTEXPR_P (current_function_decl
))
2845 if (callee
&& fndecl_built_in_p (callee
, BUILT_IN_FRONTEND
))
2847 switch (DECL_FE_FUNCTION_CODE (callee
))
2849 /* Defer folding __builtin_is_constant_evaluated. */
2850 case CP_BUILT_IN_IS_CONSTANT_EVALUATED
:
2852 case CP_BUILT_IN_SOURCE_LOCATION
:
2853 x
= fold_builtin_source_location (EXPR_LOCATION (x
));
2855 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER
:
2856 x
= fold_builtin_is_corresponding_member
2857 (EXPR_LOCATION (x
), call_expr_nargs (x
),
2858 &CALL_EXPR_ARG (x
, 0));
2860 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS
:
2861 x
= fold_builtin_is_pointer_inverconvertible_with_class
2862 (EXPR_LOCATION (x
), call_expr_nargs (x
),
2863 &CALL_EXPR_ARG (x
, 0));
2872 && fndecl_built_in_p (callee
, CP_BUILT_IN_SOURCE_LOCATION
,
2875 x
= fold_builtin_source_location (EXPR_LOCATION (x
));
2879 bool changed
= false;
2880 int m
= call_expr_nargs (x
);
2881 for (int i
= 0; i
< m
; i
++)
2883 r
= cp_fold (CALL_EXPR_ARG (x
, i
));
2884 if (r
!= CALL_EXPR_ARG (x
, i
))
2886 if (r
== error_mark_node
)
2888 x
= error_mark_node
;
2893 CALL_EXPR_ARG (x
, i
) = r
;
2897 if (x
== error_mark_node
)
2904 if (TREE_CODE (r
) != CALL_EXPR
)
2912 /* Invoke maybe_constant_value for functions declared
2913 constexpr and not called with AGGR_INIT_EXPRs.
2915 Do constexpr expansion of expressions where the call itself is not
2916 constant, but the call followed by an INDIRECT_REF is. */
2917 if (callee
&& DECL_DECLARED_CONSTEXPR_P (callee
)
2919 r
= maybe_constant_value (x
);
2922 if (TREE_CODE (r
) != CALL_EXPR
)
2924 if (DECL_CONSTRUCTOR_P (callee
))
2926 loc
= EXPR_LOCATION (x
);
2927 tree s
= build_fold_indirect_ref_loc (loc
,
2928 CALL_EXPR_ARG (x
, 0));
2929 r
= cp_build_init_expr (s
, r
);
2942 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (x
);
2943 vec
<constructor_elt
, va_gc
> *nelts
= NULL
;
2944 FOR_EACH_VEC_SAFE_ELT (elts
, i
, p
)
2946 tree op
= cp_fold (p
->value
);
2949 if (op
== error_mark_node
)
2951 x
= error_mark_node
;
2956 nelts
= elts
->copy ();
2957 (*nelts
)[i
].value
= op
;
2962 x
= build_constructor (TREE_TYPE (x
), nelts
);
2963 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x
)
2964 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x
);
2966 if (VECTOR_TYPE_P (TREE_TYPE (x
)))
2972 bool changed
= false;
2973 int n
= TREE_VEC_LENGTH (x
);
2975 for (int i
= 0; i
< n
; i
++)
2977 tree op
= cp_fold (TREE_VEC_ELT (x
, i
));
2978 if (op
!= TREE_VEC_ELT (x
, i
))
2982 TREE_VEC_ELT (x
, i
) = op
;
2991 case ARRAY_RANGE_REF
:
2993 loc
= EXPR_LOCATION (x
);
2994 op0
= cp_fold (TREE_OPERAND (x
, 0));
2995 op1
= cp_fold (TREE_OPERAND (x
, 1));
2996 op2
= cp_fold (TREE_OPERAND (x
, 2));
2997 op3
= cp_fold (TREE_OPERAND (x
, 3));
2999 if (op0
!= TREE_OPERAND (x
, 0)
3000 || op1
!= TREE_OPERAND (x
, 1)
3001 || op2
!= TREE_OPERAND (x
, 2)
3002 || op3
!= TREE_OPERAND (x
, 3))
3004 if (op0
== error_mark_node
3005 || op1
== error_mark_node
3006 || op2
== error_mark_node
3007 || op3
== error_mark_node
)
3008 x
= error_mark_node
;
3011 x
= build4_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
, op3
);
3012 TREE_READONLY (x
) = TREE_READONLY (org_x
);
3013 TREE_SIDE_EFFECTS (x
) = TREE_SIDE_EFFECTS (org_x
);
3014 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
3022 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
3023 folding, evaluates to an invariant. In that case no need to wrap
3024 this folded tree with a SAVE_EXPR. */
3025 r
= cp_fold (TREE_OPERAND (x
, 0));
3026 if (tree_invariant_p (r
))
3031 x
= evaluate_requires_expr (x
);
3038 if (EXPR_P (x
) && TREE_CODE (x
) == code
)
3040 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
3041 copy_warning (x
, org_x
);
3044 if (!c
.evaluation_restricted_p ())
3046 fold_cache
->put (org_x
, x
);
3047 /* Prevent that we try to fold an already folded result again. */
3049 fold_cache
->put (x
, x
);
3055 /* Look up "hot", "cold", "likely" or "unlikely" in attribute list LIST. */
3058 lookup_hotness_attribute (tree list
)
3060 for (; list
; list
= TREE_CHAIN (list
))
3062 tree name
= get_attribute_name (list
);
3063 if ((is_attribute_p ("hot", name
)
3064 || is_attribute_p ("cold", name
)
3065 || is_attribute_p ("likely", name
)
3066 || is_attribute_p ("unlikely", name
))
3067 && is_attribute_namespace_p ("", list
))
3073 /* Remove "hot", "cold", "likely" and "unlikely" attributes from LIST. */
3076 remove_hotness_attribute (tree list
)
3078 for (tree
*p
= &list
; *p
; )
3081 tree name
= get_attribute_name (l
);
3082 if ((is_attribute_p ("hot", name
)
3083 || is_attribute_p ("cold", name
)
3084 || is_attribute_p ("likely", name
)
3085 || is_attribute_p ("unlikely", name
))
3086 && is_attribute_namespace_p ("", l
))
3088 *p
= TREE_CHAIN (l
);
3091 p
= &TREE_CHAIN (l
);
3096 /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
3100 process_stmt_hotness_attribute (tree std_attrs
, location_t attrs_loc
)
3102 if (std_attrs
== error_mark_node
)
3104 if (tree attr
= lookup_hotness_attribute (std_attrs
))
3106 tree name
= get_attribute_name (attr
);
3107 bool hot
= (is_attribute_p ("hot", name
)
3108 || is_attribute_p ("likely", name
));
3109 tree pred
= build_predict_expr (hot
? PRED_HOT_LABEL
: PRED_COLD_LABEL
,
3110 hot
? TAKEN
: NOT_TAKEN
);
3111 SET_EXPR_LOCATION (pred
, attrs_loc
);
3113 if (tree other
= lookup_hotness_attribute (TREE_CHAIN (attr
)))
3114 warning (OPT_Wattributes
, "ignoring attribute %qE after earlier %qE",
3115 get_attribute_name (other
), name
);
3116 std_attrs
= remove_hotness_attribute (std_attrs
);
3121 /* Build IFN_ASSUME internal call for assume condition ARG. */
3124 build_assume_call (location_t loc
, tree arg
)
3126 if (!processing_template_decl
)
3127 arg
= fold_build_cleanup_point_expr (TREE_TYPE (arg
), arg
);
3128 return build_call_expr_internal_loc (loc
, IFN_ASSUME
, void_type_node
,
3132 /* If [[assume (cond)]] appears on this statement, handle it. */
3135 process_stmt_assume_attribute (tree std_attrs
, tree statement
,
3136 location_t attrs_loc
)
3138 if (std_attrs
== error_mark_node
)
3140 tree attr
= lookup_attribute ("gnu", "assume", std_attrs
);
3143 /* The next token after the assume attribute is not ';'. */
3146 warning_at (attrs_loc
, OPT_Wattributes
,
3147 "%<assume%> attribute not followed by %<;%>");
3150 for (; attr
; attr
= lookup_attribute ("gnu", "assume", TREE_CHAIN (attr
)))
3152 tree args
= TREE_VALUE (attr
);
3153 int nargs
= list_length (args
);
3156 auto_diagnostic_group d
;
3157 error_at (attrs_loc
, "wrong number of arguments specified for "
3158 "%qE attribute", get_attribute_name (attr
));
3159 inform (attrs_loc
, "expected %i, found %i", 1, nargs
);
3163 tree arg
= TREE_VALUE (args
);
3164 if (!type_dependent_expression_p (arg
))
3165 arg
= contextual_conv_bool (arg
, tf_warning_or_error
);
3166 if (error_operand_p (arg
))
3168 finish_expr_stmt (build_assume_call (attrs_loc
, arg
));
3171 return remove_attribute ("gnu", "assume", std_attrs
);
3174 /* Helper of fold_builtin_source_location, return the
3175 std::source_location::__impl type after performing verification
3176 on it. LOC is used for reporting any errors. */
3179 get_source_location_impl_type (location_t loc
)
3181 tree name
= get_identifier ("source_location");
3182 tree decl
= lookup_qualified_name (std_node
, name
);
3183 if (TREE_CODE (decl
) != TYPE_DECL
)
3185 auto_diagnostic_group d
;
3186 if (decl
== error_mark_node
|| TREE_CODE (decl
) == TREE_LIST
)
3187 qualified_name_lookup_error (std_node
, name
, decl
, loc
);
3189 error_at (loc
, "%qD is not a type", decl
);
3190 return error_mark_node
;
3192 name
= get_identifier ("__impl");
3193 tree type
= TREE_TYPE (decl
);
3194 decl
= lookup_qualified_name (type
, name
);
3195 if (TREE_CODE (decl
) != TYPE_DECL
)
3197 auto_diagnostic_group d
;
3198 if (decl
== error_mark_node
|| TREE_CODE (decl
) == TREE_LIST
)
3199 qualified_name_lookup_error (type
, name
, decl
, loc
);
3201 error_at (loc
, "%qD is not a type", decl
);
3202 return error_mark_node
;
3204 type
= TREE_TYPE (decl
);
3205 if (TREE_CODE (type
) != RECORD_TYPE
)
3207 error_at (loc
, "%qD is not a class type", decl
);
3208 return error_mark_node
;
3212 for (tree field
= TYPE_FIELDS (type
);
3213 (field
= next_aggregate_field (field
)) != NULL_TREE
;
3214 field
= DECL_CHAIN (field
))
3216 if (DECL_NAME (field
) != NULL_TREE
)
3218 const char *n
= IDENTIFIER_POINTER (DECL_NAME (field
));
3219 if (strcmp (n
, "_M_file_name") == 0
3220 || strcmp (n
, "_M_function_name") == 0)
3222 if (TREE_TYPE (field
) != const_string_type_node
)
3224 error_at (loc
, "%qD does not have %<const char *%> type",
3226 return error_mark_node
;
3231 else if (strcmp (n
, "_M_line") == 0 || strcmp (n
, "_M_column") == 0)
3233 if (TREE_CODE (TREE_TYPE (field
)) != INTEGER_TYPE
)
3235 error_at (loc
, "%qD does not have integral type", field
);
3236 return error_mark_node
;
3247 error_at (loc
, "%<std::source_location::__impl%> does not contain only "
3248 "non-static data members %<_M_file_name%>, "
3249 "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
3250 return error_mark_node
;
3252 return build_qualified_type (type
, TYPE_QUAL_CONST
);
3255 /* Type for source_location_table hash_set. */
3256 struct GTY((for_user
)) source_location_table_entry
{
3262 /* Traits class for function start hash maps below. */
3264 struct source_location_table_entry_hash
3265 : ggc_remove
<source_location_table_entry
>
3267 typedef source_location_table_entry value_type
;
3268 typedef source_location_table_entry compare_type
;
3271 hash (const source_location_table_entry
&ref
)
3273 inchash::hash
hstate (0);
3274 hstate
.add_int (ref
.loc
);
3275 hstate
.add_int (ref
.uid
);
3276 return hstate
.end ();
3280 equal (const source_location_table_entry
&ref1
,
3281 const source_location_table_entry
&ref2
)
3283 return ref1
.loc
== ref2
.loc
&& ref1
.uid
== ref2
.uid
;
3287 mark_deleted (source_location_table_entry
&ref
)
3289 ref
.loc
= UNKNOWN_LOCATION
;
3291 ref
.var
= NULL_TREE
;
3294 static const bool empty_zero_p
= true;
3297 mark_empty (source_location_table_entry
&ref
)
3299 ref
.loc
= UNKNOWN_LOCATION
;
3301 ref
.var
= NULL_TREE
;
3305 is_deleted (const source_location_table_entry
&ref
)
3307 return (ref
.loc
== UNKNOWN_LOCATION
3309 && ref
.var
== NULL_TREE
);
3313 is_empty (const source_location_table_entry
&ref
)
3315 return (ref
.loc
== UNKNOWN_LOCATION
3317 && ref
.var
== NULL_TREE
);
3321 pch_nx (source_location_table_entry
&p
)
3323 extern void gt_pch_nx (source_location_table_entry
&);
3328 pch_nx (source_location_table_entry
&p
, gt_pointer_operator op
, void *cookie
)
3330 extern void gt_pch_nx (source_location_table_entry
*, gt_pointer_operator
,
3332 gt_pch_nx (&p
, op
, cookie
);
3336 static GTY(()) hash_table
<source_location_table_entry_hash
>
3337 *source_location_table
;
3338 static GTY(()) unsigned int source_location_id
;
3340 /* Fold __builtin_source_location () call. LOC is the location
3344 fold_builtin_source_location (location_t loc
)
3346 if (source_location_impl
== NULL_TREE
)
3348 auto_diagnostic_group d
;
3349 source_location_impl
= get_source_location_impl_type (loc
);
3350 if (source_location_impl
== error_mark_node
)
3351 inform (loc
, "evaluating %qs", "__builtin_source_location");
3353 if (source_location_impl
== error_mark_node
)
3354 return build_zero_cst (const_ptr_type_node
);
3355 if (source_location_table
== NULL
)
3356 source_location_table
3357 = hash_table
<source_location_table_entry_hash
>::create_ggc (64);
3358 const line_map_ordinary
*map
;
3359 source_location_table_entry entry
;
3361 = linemap_resolve_location (line_table
, loc
, LRK_MACRO_EXPANSION_POINT
,
3363 entry
.uid
= current_function_decl
? DECL_UID (current_function_decl
) : -1;
3364 entry
.var
= error_mark_node
;
3365 source_location_table_entry
*entryp
3366 = source_location_table
->find_slot (entry
, INSERT
);
3373 ASM_GENERATE_INTERNAL_LABEL (tmp_name
, "Lsrc_loc", source_location_id
++);
3374 var
= build_decl (loc
, VAR_DECL
, get_identifier (tmp_name
),
3375 source_location_impl
);
3376 TREE_STATIC (var
) = 1;
3377 TREE_PUBLIC (var
) = 0;
3378 DECL_ARTIFICIAL (var
) = 1;
3379 DECL_IGNORED_P (var
) = 1;
3380 DECL_EXTERNAL (var
) = 0;
3381 DECL_DECLARED_CONSTEXPR_P (var
) = 1;
3382 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var
) = 1;
3383 layout_decl (var
, 0);
3385 vec
<constructor_elt
, va_gc
> *v
= NULL
;
3387 for (tree field
= TYPE_FIELDS (source_location_impl
);
3388 (field
= next_aggregate_field (field
)) != NULL_TREE
;
3389 field
= DECL_CHAIN (field
))
3391 const char *n
= IDENTIFIER_POINTER (DECL_NAME (field
));
3392 tree val
= NULL_TREE
;
3393 if (strcmp (n
, "_M_file_name") == 0)
3395 if (const char *fname
= LOCATION_FILE (loc
))
3397 fname
= remap_macro_filename (fname
);
3398 val
= build_string_literal (fname
);
3401 val
= build_string_literal ("");
3403 else if (strcmp (n
, "_M_function_name") == 0)
3405 const char *name
= "";
3407 if (current_function_decl
)
3408 name
= cxx_printable_name (current_function_decl
, 2);
3410 val
= build_string_literal (name
);
3412 else if (strcmp (n
, "_M_line") == 0)
3413 val
= build_int_cst (TREE_TYPE (field
), LOCATION_LINE (loc
));
3414 else if (strcmp (n
, "_M_column") == 0)
3415 val
= build_int_cst (TREE_TYPE (field
), LOCATION_COLUMN (loc
));
3418 CONSTRUCTOR_APPEND_ELT (v
, field
, val
);
3421 tree ctor
= build_constructor (source_location_impl
, v
);
3422 TREE_CONSTANT (ctor
) = 1;
3423 TREE_STATIC (ctor
) = 1;
3424 DECL_INITIAL (var
) = ctor
;
3425 varpool_node::finalize_decl (var
);
3430 return build_fold_addr_expr_with_type_loc (loc
, var
, const_ptr_type_node
);
3433 #include "gt-cp-cp-gimplify.h"