1 /* C++-specific tree lowering bits; see also c-gimplify.cc and gimple.cc.
3 Copyright (C) 2002-2024 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "basic-block.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
37 #include "gcc-rich-location.h"
41 #include "file-prefix-map.h"
43 #include "omp-general.h"
46 /* Keep track of forward references to immediate-escalating functions in
47 case they become consteval. This vector contains ADDR_EXPRs and
48 PTRMEM_CSTs; it also stores FUNCTION_DECLs that had an escalating
49 function call in them, to check that they can be evaluated to a constant,
50 and immediate-escalating functions that may become consteval. */
51 static GTY(()) hash_set
<tree
> *deferred_escalating_exprs
;
54 remember_escalating_expr (tree t
)
56 if (!deferred_escalating_exprs
)
57 deferred_escalating_exprs
= hash_set
<tree
>::create_ggc (37);
58 deferred_escalating_exprs
->add (t
);
61 /* Flags for cp_fold and cp_fold_r. */
65 /* Whether we're being called from cp_fold_function. */
66 ff_genericize
= 1 << 0,
67 /* Whether we're folding a point where we know we're
68 definitely not in a manifestly constant-evaluated
70 ff_mce_false
= 1 << 1,
73 using fold_flags_t
= int;
79 cp_fold_data (fold_flags_t flags
): flags (flags
) {}
82 /* Forward declarations. */
84 static tree
cp_genericize_r (tree
*, int *, void *);
85 static tree
cp_fold_r (tree
*, int *, void *);
86 static void cp_genericize_tree (tree
*, bool);
87 static tree
cp_fold (tree
, fold_flags_t
);
88 static tree
cp_fold_immediate_r (tree
*, int *, void *);
90 /* Genericize a TRY_BLOCK. */
93 genericize_try_block (tree
*stmt_p
)
95 tree body
= TRY_STMTS (*stmt_p
);
96 tree cleanup
= TRY_HANDLERS (*stmt_p
);
98 *stmt_p
= build2 (TRY_CATCH_EXPR
, void_type_node
, body
, cleanup
);
101 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
104 genericize_catch_block (tree
*stmt_p
)
106 tree type
= HANDLER_TYPE (*stmt_p
);
107 tree body
= HANDLER_BODY (*stmt_p
);
109 /* FIXME should the caught type go in TREE_TYPE? */
110 *stmt_p
= build2 (CATCH_EXPR
, void_type_node
, type
, body
);
113 /* A terser interface for building a representation of an exception
117 build_gimple_eh_filter_tree (tree body
, tree allowed
, tree failure
)
121 /* FIXME should the allowed types go in TREE_TYPE? */
122 t
= build2 (EH_FILTER_EXPR
, void_type_node
, allowed
, NULL_TREE
);
123 append_to_statement_list (failure
, &EH_FILTER_FAILURE (t
));
125 t
= build2 (TRY_CATCH_EXPR
, void_type_node
, NULL_TREE
, t
);
126 append_to_statement_list (body
, &TREE_OPERAND (t
, 0));
131 /* Genericize an EH_SPEC_BLOCK by converting it to a
132 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
135 genericize_eh_spec_block (tree
*stmt_p
)
137 tree body
= EH_SPEC_STMTS (*stmt_p
);
138 tree allowed
= EH_SPEC_RAISES (*stmt_p
);
139 tree failure
= build_call_n (call_unexpected_fn
, 1, build_exc_ptr ());
141 *stmt_p
= build_gimple_eh_filter_tree (body
, allowed
, failure
);
142 suppress_warning (*stmt_p
);
143 suppress_warning (TREE_OPERAND (*stmt_p
, 1));
146 /* Return the first non-compound statement in STMT. */
149 first_stmt (tree stmt
)
151 switch (TREE_CODE (stmt
))
154 if (tree_statement_list_node
*p
= STATEMENT_LIST_HEAD (stmt
))
155 return first_stmt (p
->stmt
);
159 return first_stmt (BIND_EXPR_BODY (stmt
));
166 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
169 genericize_if_stmt (tree
*stmt_p
)
171 tree stmt
, cond
, then_
, else_
;
172 location_t locus
= EXPR_LOCATION (*stmt_p
);
175 cond
= IF_COND (stmt
);
176 then_
= THEN_CLAUSE (stmt
);
177 else_
= ELSE_CLAUSE (stmt
);
181 tree ft
= first_stmt (then_
);
182 tree fe
= first_stmt (else_
);
184 if (TREE_CODE (ft
) == PREDICT_EXPR
185 && TREE_CODE (fe
) == PREDICT_EXPR
186 && (pr
= PREDICT_EXPR_PREDICTOR (ft
)) == PREDICT_EXPR_PREDICTOR (fe
)
187 && (pr
== PRED_HOT_LABEL
|| pr
== PRED_COLD_LABEL
))
189 gcc_rich_location
richloc (EXPR_LOC_OR_LOC (ft
, locus
));
190 richloc
.add_range (EXPR_LOC_OR_LOC (fe
, locus
));
191 warning_at (&richloc
, OPT_Wattributes
,
192 "both branches of %<if%> statement marked as %qs",
193 pr
== PRED_HOT_LABEL
? "likely" : "unlikely");
198 then_
= build_empty_stmt (locus
);
200 else_
= build_empty_stmt (locus
);
202 /* consteval if has been verified not to have the then_/else_ blocks
203 entered by gotos/case labels from elsewhere, and as then_ block
204 can contain unfolded immediate function calls, we have to discard
205 the then_ block regardless of whether else_ has side-effects or not. */
206 if (IF_STMT_CONSTEVAL_P (stmt
))
208 if (block_may_fallthru (then_
))
209 stmt
= build3 (COND_EXPR
, void_type_node
, boolean_false_node
,
214 else if (IF_STMT_CONSTEXPR_P (stmt
))
215 stmt
= integer_nonzerop (cond
) ? then_
: else_
;
216 /* ??? This optimization doesn't seem to belong here, but removing it
217 causes -Wreturn-type regressions (e.g. 107310). */
218 else if (integer_nonzerop (cond
) && !TREE_SIDE_EFFECTS (else_
))
220 else if (integer_zerop (cond
) && !TREE_SIDE_EFFECTS (then_
))
223 stmt
= build3 (COND_EXPR
, void_type_node
, cond
, then_
, else_
);
224 protected_set_expr_location_if_unset (stmt
, locus
);
228 /* Hook into the middle of gimplifying an OMP_FOR node. */
230 static enum gimplify_status
231 cp_gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
233 tree for_stmt
= *expr_p
;
234 gimple_seq seq
= NULL
;
236 /* Protect ourselves from recursion. */
237 if (OMP_FOR_GIMPLIFYING_P (for_stmt
))
239 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 1;
241 gimplify_and_add (for_stmt
, &seq
);
242 gimple_seq_add_seq (pre_p
, seq
);
244 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 0;
249 /* Gimplify an EXPR_STMT node. */
252 gimplify_expr_stmt (tree
*stmt_p
)
254 tree stmt
= EXPR_STMT_EXPR (*stmt_p
);
256 if (stmt
== error_mark_node
)
259 /* Gimplification of a statement expression will nullify the
260 statement if all its side effects are moved to *PRE_P and *POST_P.
262 In this case we will not want to emit the gimplified statement.
263 However, we may still want to emit a warning, so we do that before
265 if (stmt
&& warn_unused_value
)
267 if (!TREE_SIDE_EFFECTS (stmt
))
269 if (!IS_EMPTY_STMT (stmt
)
270 && !VOID_TYPE_P (TREE_TYPE (stmt
))
271 && !warning_suppressed_p (stmt
, OPT_Wunused_value
))
272 warning (OPT_Wunused_value
, "statement with no effect");
275 warn_if_unused_value (stmt
, input_location
);
278 if (stmt
== NULL_TREE
)
279 stmt
= alloc_stmt_list ();
284 /* Gimplify initialization from an AGGR_INIT_EXPR. */
287 cp_gimplify_init_expr (tree
*expr_p
)
289 tree from
= TREE_OPERAND (*expr_p
, 1);
290 tree to
= TREE_OPERAND (*expr_p
, 0);
293 if (TREE_CODE (from
) == TARGET_EXPR
)
294 if (tree init
= TARGET_EXPR_INITIAL (from
))
296 /* Make sure that we expected to elide this temporary. But also allow
297 gimplify_modify_expr_rhs to elide temporaries of trivial type. */
298 gcc_checking_assert (TARGET_EXPR_ELIDING_P (from
)
299 || !TREE_ADDRESSABLE (TREE_TYPE (from
)));
300 if (target_expr_needs_replace (from
))
302 /* If this was changed by cp_genericize_target_expr, we need to
303 walk into it to replace uses of the slot. */
304 replace_decl (&init
, TARGET_EXPR_SLOT (from
), to
);
312 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
313 inside the TARGET_EXPR. */
316 tree sub
= TREE_CODE (t
) == COMPOUND_EXPR
? TREE_OPERAND (t
, 0) : t
;
318 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
319 replace the slot operand with our target.
321 Should we add a target parm to gimplify_expr instead? No, as in this
322 case we want to replace the INIT_EXPR. */
323 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
324 || TREE_CODE (sub
) == VEC_INIT_EXPR
)
326 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
)
327 AGGR_INIT_EXPR_SLOT (sub
) = to
;
329 VEC_INIT_EXPR_SLOT (sub
) = to
;
332 /* The initialization is now a side-effect, so the container can
335 TREE_TYPE (from
) = void_type_node
;
338 /* Handle aggregate NSDMI. */
339 replace_placeholders (sub
, to
);
344 t
= TREE_OPERAND (t
, 1);
349 /* Gimplify a MUST_NOT_THROW_EXPR. */
351 static enum gimplify_status
352 gimplify_must_not_throw_expr (tree
*expr_p
, gimple_seq
*pre_p
)
355 tree temp
= voidify_wrapper_expr (stmt
, NULL
);
356 tree body
= TREE_OPERAND (stmt
, 0);
357 gimple_seq try_
= NULL
;
358 gimple_seq catch_
= NULL
;
361 gimplify_and_add (body
, &try_
);
362 mnt
= gimple_build_eh_must_not_throw (call_terminate_fn
);
363 gimple_seq_add_stmt_without_update (&catch_
, mnt
);
364 mnt
= gimple_build_try (try_
, catch_
, GIMPLE_TRY_CATCH
);
366 gimple_seq_add_stmt_without_update (pre_p
, mnt
);
377 /* Return TRUE if an operand (OP) of a given TYPE being copied is
378 really just an empty class copy.
380 Check that the operand has a simple form so that TARGET_EXPRs and
381 non-empty CONSTRUCTORs get reduced properly, and we leave the
382 return slot optimization alone because it isn't a copy. */
385 simple_empty_class_p (tree type
, tree op
, tree_code code
)
387 if (TREE_CODE (op
) == COMPOUND_EXPR
)
388 return simple_empty_class_p (type
, TREE_OPERAND (op
, 1), code
);
389 if (SIMPLE_TARGET_EXPR_P (op
)
390 && TYPE_HAS_TRIVIAL_DESTRUCTOR (type
))
391 /* The TARGET_EXPR is itself a simple copy, look through it. */
392 return simple_empty_class_p (type
, TARGET_EXPR_INITIAL (op
), code
);
394 if (TREE_CODE (op
) == PARM_DECL
395 && TREE_ADDRESSABLE (TREE_TYPE (op
)))
397 tree fn
= DECL_CONTEXT (op
);
398 if (DECL_THUNK_P (fn
)
399 || lambda_static_thunk_p (fn
))
400 /* In a thunk, we pass through invisible reference parms, so this isn't
406 (TREE_CODE (op
) == EMPTY_CLASS_EXPR
407 || code
== MODIFY_EXPR
408 || is_gimple_lvalue (op
)
409 || INDIRECT_REF_P (op
)
410 || (TREE_CODE (op
) == CONSTRUCTOR
411 && CONSTRUCTOR_NELTS (op
) == 0)
412 || (TREE_CODE (op
) == CALL_EXPR
413 && !CALL_EXPR_RETURN_SLOT_OPT (op
)))
414 && !TREE_CLOBBER_P (op
)
415 && is_really_empty_class (type
, /*ignore_vptr*/true);
418 /* Returns true if evaluating E as an lvalue has side-effects;
419 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
420 have side-effects until there is a read or write through it. */
423 lvalue_has_side_effects (tree e
)
425 if (!TREE_SIDE_EFFECTS (e
))
427 while (handled_component_p (e
))
429 if (TREE_CODE (e
) == ARRAY_REF
430 && TREE_SIDE_EFFECTS (TREE_OPERAND (e
, 1)))
432 e
= TREE_OPERAND (e
, 0);
435 /* Just naming a variable has no side-effects. */
437 else if (INDIRECT_REF_P (e
))
438 /* Similarly, indirection has no side-effects. */
439 return TREE_SIDE_EFFECTS (TREE_OPERAND (e
, 0));
441 /* For anything else, trust TREE_SIDE_EFFECTS. */
442 return TREE_SIDE_EFFECTS (e
);
445 /* Return true if FN is an immediate-escalating function. */
448 immediate_escalating_function_p (tree fn
)
450 if (!fn
|| !flag_immediate_escalation
)
453 gcc_checking_assert (TREE_CODE (fn
) == FUNCTION_DECL
);
455 if (DECL_IMMEDIATE_FUNCTION_P (fn
))
458 /* An immediate-escalating function is
459 -- the call operator of a lambda that is not declared with the consteval
461 if (LAMBDA_FUNCTION_P (fn
))
463 /* -- a defaulted special member function that is not declared with the
464 consteval specifier */
465 special_function_kind sfk
= special_memfn_p (fn
);
466 if (sfk
!= sfk_none
&& DECL_DEFAULTED_FN (fn
))
468 /* -- a function that results from the instantiation of a templated entity
469 defined with the constexpr specifier. */
470 return is_instantiation_of_constexpr (fn
);
473 /* Return true if FN is an immediate-escalating function that has not been
474 checked for escalating expressions.. */
477 unchecked_immediate_escalating_function_p (tree fn
)
479 return (immediate_escalating_function_p (fn
)
480 && !DECL_ESCALATION_CHECKED_P (fn
));
483 /* Promote FN to an immediate function, including its clones. */
486 promote_function_to_consteval (tree fn
)
488 SET_DECL_IMMEDIATE_FUNCTION_P (fn
);
489 DECL_ESCALATION_CHECKED_P (fn
) = true;
491 FOR_EACH_CLONE (clone
, fn
)
493 SET_DECL_IMMEDIATE_FUNCTION_P (clone
);
494 DECL_ESCALATION_CHECKED_P (clone
) = true;
498 /* A wrapper around cp_fold_immediate_r. Return a non-null tree if
499 we found a non-constant immediate function, or taking the address
500 of an immediate function. */
503 cp_fold_immediate (tree
*tp
, mce_value manifestly_const_eval
,
504 tree decl
/*= current_function_decl*/)
506 if (cxx_dialect
<= cxx17
)
509 temp_override
<tree
> cfd (current_function_decl
, decl
);
511 fold_flags_t flags
= ff_none
;
512 if (manifestly_const_eval
== mce_false
)
513 flags
|= ff_mce_false
;
515 cp_fold_data
data (flags
);
516 int save_errorcount
= errorcount
;
517 tree r
= cp_walk_tree_without_duplicates (tp
, cp_fold_immediate_r
, &data
);
518 if (errorcount
> save_errorcount
)
519 return integer_one_node
;
523 /* Maybe say that FN (a function decl with DECL_IMMEDIATE_FUNCTION_P set)
524 was initially not an immediate function, but was promoted to one because
525 its body contained an immediate-escalating expression or conversion. */
528 maybe_explain_promoted_consteval (location_t loc
, tree fn
)
530 if (DECL_ESCALATION_CHECKED_P (fn
))
532 /* See if we can figure out what made the function consteval. */
533 tree x
= cp_fold_immediate (&DECL_SAVED_TREE (fn
), mce_unknown
, NULL_TREE
);
535 inform (cp_expr_loc_or_loc (x
, loc
),
536 "%qD was promoted to an immediate function because its "
537 "body contains an immediate-escalating expression %qE", fn
, x
);
539 inform (loc
, "%qD was promoted to an immediate function", fn
);
543 /* Gimplify *EXPR_P as rvalue into an expression that can't be modified
544 by expressions with side-effects in other operands. */
546 static enum gimplify_status
547 gimplify_to_rvalue (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
548 bool (*gimple_test_f
) (tree
))
550 enum gimplify_status t
551 = gimplify_expr (expr_p
, pre_p
, post_p
, gimple_test_f
, fb_rvalue
);
554 else if (is_gimple_variable (*expr_p
) && TREE_CODE (*expr_p
) != SSA_NAME
)
555 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
);
559 /* Like gimplify_arg, but if ORDERED is set (which should be set if
560 any of the arguments this argument is sequenced before has
561 TREE_SIDE_EFFECTS set, make sure expressions with is_gimple_reg_type type
562 are gimplified into SSA_NAME or a fresh temporary and for
563 non-is_gimple_reg_type we don't optimize away TARGET_EXPRs. */
565 static enum gimplify_status
566 cp_gimplify_arg (tree
*arg_p
, gimple_seq
*pre_p
, location_t call_location
,
569 enum gimplify_status t
;
571 && !is_gimple_reg_type (TREE_TYPE (*arg_p
))
572 && TREE_CODE (*arg_p
) == TARGET_EXPR
)
574 /* gimplify_arg would strip away the TARGET_EXPR, but
575 that can mean we don't copy the argument and some following
576 argument with side-effect could modify it. */
577 protected_set_expr_location (*arg_p
, call_location
);
578 return gimplify_expr (arg_p
, pre_p
, NULL
, is_gimple_lvalue
, fb_either
);
582 t
= gimplify_arg (arg_p
, pre_p
, call_location
);
586 && is_gimple_reg_type (TREE_TYPE (*arg_p
))
587 && is_gimple_variable (*arg_p
)
588 && TREE_CODE (*arg_p
) != SSA_NAME
589 /* No need to force references into register, references
590 can't be modified. */
591 && !TYPE_REF_P (TREE_TYPE (*arg_p
))
592 /* And this can't be modified either. */
593 && *arg_p
!= current_class_ptr
)
594 *arg_p
= get_initialized_tmp_var (*arg_p
, pre_p
);
600 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
603 cp_gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
605 int saved_stmts_are_full_exprs_p
= 0;
606 location_t loc
= cp_expr_loc_or_input_loc (*expr_p
);
607 enum tree_code code
= TREE_CODE (*expr_p
);
608 enum gimplify_status ret
;
610 if (STATEMENT_CODE_P (code
))
612 saved_stmts_are_full_exprs_p
= stmts_are_full_exprs_p ();
613 current_stmt_tree ()->stmts_are_full_exprs_p
614 = STMT_IS_FULL_EXPR_P (*expr_p
);
620 simplify_aggr_init_expr (expr_p
);
626 *expr_p
= expand_vec_init_expr (NULL_TREE
, *expr_p
,
627 tf_warning_or_error
);
629 cp_fold_data
data (ff_genericize
| ff_mce_false
);
630 cp_walk_tree (expr_p
, cp_fold_r
, &data
, NULL
);
631 cp_genericize_tree (expr_p
, false);
632 copy_if_shared (expr_p
);
638 /* FIXME communicate throw type to back end, probably by moving
639 THROW_EXPR into ../tree.def. */
640 *expr_p
= TREE_OPERAND (*expr_p
, 0);
644 case MUST_NOT_THROW_EXPR
:
645 ret
= gimplify_must_not_throw_expr (expr_p
, pre_p
);
648 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
649 LHS of an assignment might also be involved in the RHS, as in bug
652 cp_gimplify_init_expr (expr_p
);
653 if (TREE_CODE (*expr_p
) != INIT_EXPR
)
659 /* If the back end isn't clever enough to know that the lhs and rhs
660 types are the same, add an explicit conversion. */
661 tree op0
= TREE_OPERAND (*expr_p
, 0);
662 tree op1
= TREE_OPERAND (*expr_p
, 1);
664 if (!error_operand_p (op0
)
665 && !error_operand_p (op1
)
666 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0
))
667 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1
)))
668 && !useless_type_conversion_p (TREE_TYPE (op1
), TREE_TYPE (op0
)))
669 TREE_OPERAND (*expr_p
, 1) = build1 (VIEW_CONVERT_EXPR
,
670 TREE_TYPE (op0
), op1
);
672 else if (simple_empty_class_p (TREE_TYPE (op0
), op1
, code
))
674 while (TREE_CODE (op1
) == TARGET_EXPR
)
675 /* We're disconnecting the initializer from its target,
676 don't create a temporary. */
677 op1
= TARGET_EXPR_INITIAL (op1
);
679 /* Remove any copies of empty classes. Also drop volatile
680 variables on the RHS to avoid infinite recursion from
681 gimplify_expr trying to load the value. */
682 if (TREE_SIDE_EFFECTS (op1
))
684 if (TREE_THIS_VOLATILE (op1
)
685 && (REFERENCE_CLASS_P (op1
) || DECL_P (op1
)))
686 op1
= build_fold_addr_expr (op1
);
688 gimplify_and_add (op1
, pre_p
);
690 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
691 is_gimple_lvalue
, fb_lvalue
);
692 *expr_p
= TREE_OPERAND (*expr_p
, 0);
693 if (code
== RETURN_EXPR
&& REFERENCE_CLASS_P (*expr_p
))
694 /* Avoid 'return *<retval>;' */
695 *expr_p
= TREE_OPERAND (*expr_p
, 0);
697 /* P0145 says that the RHS is sequenced before the LHS.
698 gimplify_modify_expr gimplifies the RHS before the LHS, but that
699 isn't quite strong enough in two cases:
701 1) gimplify.cc wants to leave a CALL_EXPR on the RHS, which would
702 mean it's evaluated after the LHS.
704 2) the value calculation of the RHS is also sequenced before the
705 LHS, so for scalar assignment we need to preevaluate if the
706 RHS could be affected by LHS side-effects even if it has no
707 side-effects of its own. We don't need this for classes because
708 class assignment takes its RHS by reference. */
709 else if (flag_strong_eval_order
> 1
710 && TREE_CODE (*expr_p
) == MODIFY_EXPR
711 && lvalue_has_side_effects (op0
)
712 && (TREE_CODE (op1
) == CALL_EXPR
713 || (SCALAR_TYPE_P (TREE_TYPE (op1
))
714 && !TREE_CONSTANT (op1
))))
715 TREE_OPERAND (*expr_p
, 1) = get_initialized_tmp_var (op1
, pre_p
);
720 case EMPTY_CLASS_EXPR
:
721 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
722 *expr_p
= build_constructor (TREE_TYPE (*expr_p
), NULL
);
727 *expr_p
= BASELINK_FUNCTIONS (*expr_p
);
732 genericize_try_block (expr_p
);
737 genericize_catch_block (expr_p
);
742 genericize_eh_spec_block (expr_p
);
762 ret
= cp_gimplify_omp_for (expr_p
, pre_p
);
766 gimplify_expr_stmt (expr_p
);
770 case UNARY_PLUS_EXPR
:
772 tree arg
= TREE_OPERAND (*expr_p
, 0);
773 tree type
= TREE_TYPE (*expr_p
);
774 *expr_p
= (TREE_TYPE (arg
) != type
) ? fold_convert (type
, arg
)
782 if (flag_strong_eval_order
== 2
783 && CALL_EXPR_FN (*expr_p
)
784 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p
)
785 && cp_get_callee_fndecl_nofold (*expr_p
) == NULL_TREE
)
787 tree fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
788 enum gimplify_status t
789 = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p
), pre_p
, NULL
,
790 is_gimple_call_addr
);
793 /* GIMPLE considers most pointer conversion useless, but for
794 calls we actually care about the exact function pointer type. */
795 else if (TREE_TYPE (CALL_EXPR_FN (*expr_p
)) != fnptrtype
)
796 CALL_EXPR_FN (*expr_p
)
797 = build1 (NOP_EXPR
, fnptrtype
, CALL_EXPR_FN (*expr_p
));
799 if (!CALL_EXPR_FN (*expr_p
))
800 /* Internal function call. */;
801 else if (CALL_EXPR_REVERSE_ARGS (*expr_p
))
803 /* This is a call to a (compound) assignment operator that used
804 the operator syntax; gimplify the RHS first. */
805 gcc_assert (call_expr_nargs (*expr_p
) == 2);
806 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p
));
807 enum gimplify_status t
808 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p
, 1), pre_p
, loc
,
809 TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p
, 0)));
813 else if (CALL_EXPR_ORDERED_ARGS (*expr_p
))
815 /* Leave the last argument for gimplify_call_expr, to avoid problems
816 with __builtin_va_arg_pack(). */
817 int nargs
= call_expr_nargs (*expr_p
) - 1;
818 int last_side_effects_arg
= -1;
819 for (int i
= nargs
; i
> 0; --i
)
820 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p
, i
)))
822 last_side_effects_arg
= i
;
825 for (int i
= 0; i
< nargs
; ++i
)
827 enum gimplify_status t
828 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
, loc
,
829 i
< last_side_effects_arg
);
834 else if (flag_strong_eval_order
835 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p
))
837 /* If flag_strong_eval_order, evaluate the object argument first. */
838 tree fntype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
839 if (INDIRECT_TYPE_P (fntype
))
840 fntype
= TREE_TYPE (fntype
);
841 if (TREE_CODE (fntype
) == METHOD_TYPE
)
843 int nargs
= call_expr_nargs (*expr_p
);
844 bool side_effects
= false;
845 for (int i
= 1; i
< nargs
; ++i
)
846 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p
, i
)))
851 enum gimplify_status t
852 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p
, 0), pre_p
, loc
,
860 tree decl
= cp_get_callee_fndecl_nofold (*expr_p
);
863 if (fndecl_built_in_p (decl
, BUILT_IN_FRONTEND
))
864 switch (DECL_FE_FUNCTION_CODE (decl
))
866 case CP_BUILT_IN_IS_CONSTANT_EVALUATED
:
867 *expr_p
= boolean_false_node
;
869 case CP_BUILT_IN_SOURCE_LOCATION
:
871 = fold_builtin_source_location (*expr_p
);
873 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER
:
875 = fold_builtin_is_corresponding_member
876 (EXPR_LOCATION (*expr_p
), call_expr_nargs (*expr_p
),
877 &CALL_EXPR_ARG (*expr_p
, 0));
879 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS
:
881 = fold_builtin_is_pointer_inverconvertible_with_class
882 (EXPR_LOCATION (*expr_p
), call_expr_nargs (*expr_p
),
883 &CALL_EXPR_ARG (*expr_p
, 0));
888 else if (fndecl_built_in_p (decl
, BUILT_IN_CLZG
, BUILT_IN_CTZG
))
889 ret
= (enum gimplify_status
) c_gimplify_expr (expr_p
, pre_p
,
892 /* All consteval functions should have been processed by now. */
893 gcc_checking_assert (!immediate_invocation_p (decl
));
898 /* A TARGET_EXPR that expresses direct-initialization should have been
899 elided by cp_gimplify_init_expr. */
900 gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p
));
901 /* Likewise, but allow extra temps of trivial type so that
902 gimplify_init_ctor_preeval can materialize subobjects of a CONSTRUCTOR
903 on the rhs of an assignment, as in constexpr-aggr1.C. */
904 gcc_checking_assert (!TARGET_EXPR_ELIDING_P (*expr_p
)
905 || !TREE_ADDRESSABLE (TREE_TYPE (*expr_p
)));
910 *expr_p
= cplus_expand_constant (*expr_p
);
911 if (TREE_CODE (*expr_p
) == PTRMEM_CST
)
918 if (TREE_OPERAND (*expr_p
, 0)
919 && (TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == INIT_EXPR
920 || TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == MODIFY_EXPR
))
922 expr_p
= &TREE_OPERAND (*expr_p
, 0);
923 /* Avoid going through the INIT_EXPR case, which can
924 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
925 goto modify_expr_case
;
930 ret
= (enum gimplify_status
) c_gimplify_expr (expr_p
, pre_p
, post_p
);
934 /* Restore saved state. */
935 if (STATEMENT_CODE_P (code
))
936 current_stmt_tree ()->stmts_are_full_exprs_p
937 = saved_stmts_are_full_exprs_p
;
943 is_invisiref_parm (const_tree t
)
945 return ((TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
946 && DECL_BY_REFERENCE (t
));
949 /* A stable comparison routine for use with splay trees and DECLs. */
952 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
957 return DECL_UID (a
) - DECL_UID (b
);
960 /* OpenMP context during genericization. */
962 struct cp_genericize_omp_taskreg
966 struct cp_genericize_omp_taskreg
*outer
;
967 splay_tree variables
;
970 /* Return true if genericization should try to determine if
971 DECL is firstprivate or shared within task regions. */
974 omp_var_to_track (tree decl
)
976 tree type
= TREE_TYPE (decl
);
977 if (is_invisiref_parm (decl
))
978 type
= TREE_TYPE (type
);
979 else if (TYPE_REF_P (type
))
980 type
= TREE_TYPE (type
);
981 while (TREE_CODE (type
) == ARRAY_TYPE
)
982 type
= TREE_TYPE (type
);
983 if (type
== error_mark_node
|| !CLASS_TYPE_P (type
))
985 if (VAR_P (decl
) && CP_DECL_THREAD_LOCAL_P (decl
))
987 if (cxx_omp_predetermined_sharing (decl
) != OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
992 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
995 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg
*omp_ctx
, tree decl
)
997 splay_tree_node n
= splay_tree_lookup (omp_ctx
->variables
,
998 (splay_tree_key
) decl
);
1001 int flags
= OMP_CLAUSE_DEFAULT_SHARED
;
1003 omp_cxx_notice_variable (omp_ctx
->outer
, decl
);
1004 if (!omp_ctx
->default_shared
)
1006 struct cp_genericize_omp_taskreg
*octx
;
1008 for (octx
= omp_ctx
->outer
; octx
; octx
= octx
->outer
)
1010 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
1011 if (n
&& n
->value
!= OMP_CLAUSE_DEFAULT_SHARED
)
1013 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
1016 if (octx
->is_parallel
)
1020 && (TREE_CODE (decl
) == PARM_DECL
1021 || (!(TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
1022 && DECL_CONTEXT (decl
) == current_function_decl
)))
1023 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
1024 if (flags
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
1026 /* DECL is implicitly determined firstprivate in
1027 the current task construct. Ensure copy ctor and
1028 dtor are instantiated, because during gimplification
1029 it will be already too late. */
1030 tree type
= TREE_TYPE (decl
);
1031 if (is_invisiref_parm (decl
))
1032 type
= TREE_TYPE (type
);
1033 else if (TYPE_REF_P (type
))
1034 type
= TREE_TYPE (type
);
1035 while (TREE_CODE (type
) == ARRAY_TYPE
)
1036 type
= TREE_TYPE (type
);
1037 get_copy_ctor (type
, tf_none
);
1038 get_dtor (type
, tf_none
);
1041 splay_tree_insert (omp_ctx
->variables
, (splay_tree_key
) decl
, flags
);
1045 /* True if any of the element initializers in CTOR are TARGET_EXPRs that are
1046 not expected to elide, e.g. because unsafe_copy_elision_p is true. */
1049 any_non_eliding_target_exprs (tree ctor
)
1051 for (const constructor_elt
&e
: *CONSTRUCTOR_ELTS (ctor
))
1053 if (TREE_CODE (e
.value
) == TARGET_EXPR
1054 && !TARGET_EXPR_ELIDING_P (e
.value
))
1060 /* If we might need to clean up a partially constructed object, break down the
1061 CONSTRUCTOR with split_nonconstant_init. Also expand VEC_INIT_EXPR at this
1062 point. If initializing TO with FROM is non-trivial, overwrite *REPLACE with
1066 cp_genericize_init (tree
*replace
, tree from
, tree to
)
1068 tree init
= NULL_TREE
;
1069 if (TREE_CODE (from
) == VEC_INIT_EXPR
)
1070 init
= expand_vec_init_expr (to
, from
, tf_warning_or_error
);
1071 else if (TREE_CODE (from
) == CONSTRUCTOR
1072 && TREE_SIDE_EFFECTS (from
)
1073 && ((flag_exceptions
1074 && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (from
)))
1075 || any_non_eliding_target_exprs (from
)))
1077 to
= cp_stabilize_reference (to
);
1078 replace_placeholders (from
, to
);
1079 init
= split_nonconstant_init (to
, from
);
1084 if (*replace
== from
)
1085 /* Make cp_gimplify_init_expr call replace_decl on this
1086 TARGET_EXPR_INITIAL. */
1087 init
= fold_convert (void_type_node
, init
);
1092 /* For an INIT_EXPR, replace the INIT_EXPR itself. */
1095 cp_genericize_init_expr (tree
*stmt_p
)
1097 iloc_sentinel ils
= EXPR_LOCATION (*stmt_p
);
1098 tree to
= TREE_OPERAND (*stmt_p
, 0);
1099 tree from
= TREE_OPERAND (*stmt_p
, 1);
1100 if (SIMPLE_TARGET_EXPR_P (from
)
1101 /* Return gets confused if we clobber its INIT_EXPR this soon. */
1102 && TREE_CODE (to
) != RESULT_DECL
)
1103 from
= TARGET_EXPR_INITIAL (from
);
1104 cp_genericize_init (stmt_p
, from
, to
);
1107 /* For a TARGET_EXPR, change the TARGET_EXPR_INITIAL. We will need to use
1108 replace_decl later when we know what we're initializing. */
1111 cp_genericize_target_expr (tree
*stmt_p
)
1113 iloc_sentinel ils
= EXPR_LOCATION (*stmt_p
);
1114 tree slot
= TARGET_EXPR_SLOT (*stmt_p
);
1115 cp_genericize_init (&TARGET_EXPR_INITIAL (*stmt_p
),
1116 TARGET_EXPR_INITIAL (*stmt_p
), slot
);
1117 gcc_assert (!DECL_INITIAL (slot
));
1120 /* Similar to if (target_expr_needs_replace) replace_decl, but TP is the
1121 TARGET_EXPR_INITIAL, and this also updates *_SLOT. We need this extra
1122 replacement when cp_folding TARGET_EXPR to preserve the invariant that
1123 AGGR_INIT_EXPR_SLOT agrees with the enclosing TARGET_EXPR_SLOT. */
1126 maybe_replace_decl (tree
*tp
, tree decl
, tree replacement
)
1128 if (!*tp
|| !VOID_TYPE_P (TREE_TYPE (*tp
)))
1131 while (TREE_CODE (t
) == COMPOUND_EXPR
)
1132 t
= TREE_OPERAND (t
, 1);
1133 if (TREE_CODE (t
) == AGGR_INIT_EXPR
)
1134 replace_decl (&AGGR_INIT_EXPR_SLOT (t
), decl
, replacement
);
1135 else if (TREE_CODE (t
) == VEC_INIT_EXPR
)
1136 replace_decl (&VEC_INIT_EXPR_SLOT (t
), decl
, replacement
);
1138 replace_decl (tp
, decl
, replacement
);
1142 /* Genericization context. */
1144 struct cp_genericize_data
1146 hash_set
<tree
> *p_set
;
1147 auto_vec
<tree
> bind_expr_stack
;
1148 struct cp_genericize_omp_taskreg
*omp_ctx
;
1151 bool handle_invisiref_parm_p
;
1154 /* Emit an error about taking the address of an immediate function.
1155 EXPR is the whole expression; DECL is the immediate function. */
1158 taking_address_of_imm_fn_error (tree expr
, tree decl
)
1160 auto_diagnostic_group d
;
1161 const location_t loc
= (TREE_CODE (expr
) == PTRMEM_CST
1162 ? PTRMEM_CST_LOCATION (expr
)
1163 : EXPR_LOCATION (expr
));
1164 error_at (loc
, "taking address of an immediate function %qD", decl
);
1165 maybe_explain_promoted_consteval (loc
, decl
);
1168 /* A subroutine of cp_fold_r to handle immediate functions. */
1171 cp_fold_immediate_r (tree
*stmt_p
, int *walk_subtrees
, void *data_
)
1173 auto data
= static_cast<cp_fold_data
*>(data_
);
1174 tree stmt
= *stmt_p
;
1175 /* The purpose of this is not to emit errors for mce_unknown. */
1176 const tsubst_flags_t complain
= (data
->flags
& ff_mce_false
1177 ? tf_error
: tf_none
);
1178 const tree_code code
= TREE_CODE (stmt
);
1180 /* No need to look into types or unevaluated operands.
1181 NB: This affects cp_fold_r as well. */
1183 || unevaluated_p (code
)
1184 /* We do not use in_immediate_context here because it checks
1185 more than is desirable, e.g., sk_template_parms. */
1186 || cp_unevaluated_operand
1187 || (current_function_decl
1188 && DECL_IMMEDIATE_FUNCTION_P (current_function_decl
)))
1194 tree decl
= NULL_TREE
;
1195 bool call_p
= false;
1197 /* We are looking for &fn or fn(). */
1201 case AGGR_INIT_EXPR
:
1202 if (tree fn
= cp_get_callee (stmt
))
1203 if (TREE_CODE (fn
) != ADDR_EXPR
|| ADDR_EXPR_DENOTES_CALL_P (fn
))
1204 decl
= cp_get_fndecl_from_callee (fn
, /*fold*/false);
1208 decl
= PTRMEM_CST_MEMBER (stmt
);
1211 if (!ADDR_EXPR_DENOTES_CALL_P (stmt
))
1212 decl
= TREE_OPERAND (stmt
, 0);
1218 if (!decl
|| TREE_CODE (decl
) != FUNCTION_DECL
)
1221 /* Fully escalate once all templates have been instantiated. What we're
1222 calling is not a consteval function but it may become one. This
1223 requires recursing; DECL may be promoted to consteval because it
1224 contains an escalating expression E, but E itself may have to be
1225 promoted first, etc. */
1226 if (at_eof
> 1 && unchecked_immediate_escalating_function_p (decl
))
1228 /* Set before the actual walk to avoid endless recursion. */
1229 DECL_ESCALATION_CHECKED_P (decl
) = true;
1230 /* We're only looking for the first escalating expression. Let us not
1231 walk more trees than necessary, hence mce_unknown. */
1232 cp_fold_immediate (&DECL_SAVED_TREE (decl
), mce_unknown
, decl
);
1235 /* [expr.const]p16 "An expression or conversion is immediate-escalating if
1236 it is not initially in an immediate function context and it is either
1237 -- an immediate invocation that is not a constant expression and is not
1238 a subexpression of an immediate invocation."
1240 If we are in an immediate-escalating function, the immediate-escalating
1241 expression or conversion makes it an immediate function. So STMT does
1242 not need to produce a constant expression. */
1243 if (DECL_IMMEDIATE_FUNCTION_P (decl
))
1245 tree e
= cxx_constant_value (stmt
, tf_none
);
1246 if (e
== error_mark_node
)
1248 /* This takes care of, e.g.,
1249 template <typename T>
1250 constexpr int f(T t)
1254 where id (consteval) causes f<int> to be promoted. */
1255 if (immediate_escalating_function_p (current_function_decl
))
1256 promote_function_to_consteval (current_function_decl
);
1257 else if (complain
& tf_error
)
1261 auto_diagnostic_group d
;
1262 location_t loc
= cp_expr_loc_or_input_loc (stmt
);
1263 error_at (loc
, "call to consteval function %qE is "
1264 "not a constant expression", stmt
);
1265 /* Explain why it's not a constant expression. */
1266 *stmt_p
= cxx_constant_value (stmt
, complain
);
1267 maybe_explain_promoted_consteval (loc
, decl
);
1269 else if (!data
->pset
.add (stmt
))
1271 taking_address_of_imm_fn_error (stmt
, decl
);
1272 *stmt_p
= build_zero_cst (TREE_TYPE (stmt
));
1274 /* If we're giving hard errors, continue the walk rather than
1275 bailing out after the first error. */
1281 /* We've evaluated the consteval function call. */
1285 /* We've encountered a function call that may turn out to be consteval
1286 later. Store its caller so that we can ensure that the call is
1287 a constant expression. */
1288 else if (unchecked_immediate_escalating_function_p (decl
))
1290 /* Make sure we're not inserting new elements while walking
1291 the deferred_escalating_exprs hash table; if we are, it's
1292 likely that a function wasn't properly marked checked for
1294 gcc_checking_assert (at_eof
<= 1);
1295 if (current_function_decl
)
1296 remember_escalating_expr (current_function_decl
);
1297 /* auto p = &f<int>; in the global scope won't be ensconced in
1298 a function we could store for later at this point. (If there's
1299 no c_f_d at this point and we're dealing with a call, we should
1300 see the call when cp_fold_function __static_i_and_d.) */
1302 remember_escalating_expr (stmt
);
1308 /* Perform any pre-gimplification folding of C++ front end trees to
1310 Note: The folding of non-omp cases is something to move into
1311 the middle-end. As for now we have most foldings only on GENERIC
1312 in fold-const, we need to perform this before transformation to
1315 ??? This is algorithmically weird because walk_tree works in pre-order, so
1316 we see outer expressions before inner expressions. This isn't as much of an
1317 issue because cp_fold recurses into subexpressions in many cases, but then
1318 walk_tree walks back into those subexpressions again. We avoid the
1319 resulting complexity problem by caching the result of cp_fold, but it's
1323 cp_fold_r (tree
*stmt_p
, int *walk_subtrees
, void *data_
)
1325 cp_fold_data
*data
= (cp_fold_data
*)data_
;
1326 tree stmt
= *stmt_p
;
1327 enum tree_code code
= TREE_CODE (stmt
);
1329 if (cxx_dialect
>= cxx20
)
1331 /* Unfortunately we must handle code like
1333 where we have to check bar too. The cp_fold call below could
1334 fold the ?: into a constant before we've checked it. */
1335 if (code
== COND_EXPR
)
1337 auto then_fn
= cp_fold_r
, else_fn
= cp_fold_r
;
1338 /* See if we can figure out if either of the branches is dead. If it
1339 is, we don't need to do everything that cp_fold_r does. */
1340 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_fold_r
, data
, nullptr);
1341 if (integer_zerop (TREE_OPERAND (stmt
, 0)))
1342 then_fn
= cp_fold_immediate_r
;
1343 else if (integer_nonzerop (TREE_OPERAND (stmt
, 0)))
1344 else_fn
= cp_fold_immediate_r
;
1346 if (TREE_OPERAND (stmt
, 1))
1347 cp_walk_tree (&TREE_OPERAND (stmt
, 1), then_fn
, data
,
1349 if (TREE_OPERAND (stmt
, 2))
1350 cp_walk_tree (&TREE_OPERAND (stmt
, 2), else_fn
, data
,
1353 /* Don't return yet, still need the cp_fold below. */
1356 cp_fold_immediate_r (stmt_p
, walk_subtrees
, data
);
1359 *stmt_p
= stmt
= cp_fold (*stmt_p
, data
->flags
);
1361 /* For certain trees, like +foo(), the cp_fold above will remove the +,
1362 and the subsequent tree walk would go straight down to the CALL_EXPR's
1363 operands, meaning that cp_fold_immediate_r would never see the
1364 CALL_EXPR. Ew :(. */
1365 if (TREE_CODE (stmt
) == CALL_EXPR
&& code
!= CALL_EXPR
)
1366 cp_fold_immediate_r (stmt_p
, walk_subtrees
, data
);
1368 if (data
->pset
.add (stmt
))
1370 /* Don't walk subtrees of stmts we've already walked once, otherwise
1371 we can have exponential complexity with e.g. lots of nested
1372 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1373 always the same tree, which the first time cp_fold_r has been
1374 called on it had the subtrees walked. */
1379 code
= TREE_CODE (stmt
);
1386 case OMP_DISTRIBUTE
:
1390 cp_walk_tree (&OMP_FOR_BODY (stmt
), cp_fold_r
, data
, NULL
);
1391 cp_walk_tree (&OMP_FOR_CLAUSES (stmt
), cp_fold_r
, data
, NULL
);
1392 cp_walk_tree (&OMP_FOR_INIT (stmt
), cp_fold_r
, data
, NULL
);
1393 x
= OMP_FOR_COND (stmt
);
1394 if (x
&& TREE_CODE_CLASS (TREE_CODE (x
)) == tcc_comparison
)
1396 cp_walk_tree (&TREE_OPERAND (x
, 0), cp_fold_r
, data
, NULL
);
1397 cp_walk_tree (&TREE_OPERAND (x
, 1), cp_fold_r
, data
, NULL
);
1399 else if (x
&& TREE_CODE (x
) == TREE_VEC
)
1401 n
= TREE_VEC_LENGTH (x
);
1402 for (i
= 0; i
< n
; i
++)
1404 tree o
= TREE_VEC_ELT (x
, i
);
1405 if (o
&& TREE_CODE_CLASS (TREE_CODE (o
)) == tcc_comparison
)
1406 cp_walk_tree (&TREE_OPERAND (o
, 1), cp_fold_r
, data
, NULL
);
1409 x
= OMP_FOR_INCR (stmt
);
1410 if (x
&& TREE_CODE (x
) == TREE_VEC
)
1412 n
= TREE_VEC_LENGTH (x
);
1413 for (i
= 0; i
< n
; i
++)
1415 tree o
= TREE_VEC_ELT (x
, i
);
1416 if (o
&& TREE_CODE (o
) == MODIFY_EXPR
)
1417 o
= TREE_OPERAND (o
, 1);
1418 if (o
&& (TREE_CODE (o
) == PLUS_EXPR
|| TREE_CODE (o
) == MINUS_EXPR
1419 || TREE_CODE (o
) == POINTER_PLUS_EXPR
))
1421 cp_walk_tree (&TREE_OPERAND (o
, 0), cp_fold_r
, data
, NULL
);
1422 cp_walk_tree (&TREE_OPERAND (o
, 1), cp_fold_r
, data
, NULL
);
1426 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt
), cp_fold_r
, data
, NULL
);
1431 if (IF_STMT_CONSTEVAL_P (stmt
))
1433 /* Don't walk THEN_CLAUSE (stmt) for consteval if. IF_COND is always
1434 boolean_false_node. */
1435 cp_walk_tree (&ELSE_CLAUSE (stmt
), cp_fold_r
, data
, NULL
);
1436 cp_walk_tree (&IF_SCOPE (stmt
), cp_fold_r
, data
, NULL
);
1442 /* cp_genericize_{init,target}_expr are only for genericize time; they're
1443 here rather than in cp_genericize to avoid problems with the invisible
1444 reference transition. */
1446 if (data
->flags
& ff_genericize
)
1447 cp_genericize_init_expr (stmt_p
);
1451 if (data
->flags
& ff_genericize
)
1452 cp_genericize_target_expr (stmt_p
);
1454 /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
1455 that case, strip it in favor of this one. */
1456 if (tree
&init
= TARGET_EXPR_INITIAL (stmt
))
1458 cp_walk_tree (&init
, cp_fold_r
, data
, NULL
);
1459 cp_walk_tree (&TARGET_EXPR_CLEANUP (stmt
), cp_fold_r
, data
, NULL
);
1461 if (TREE_CODE (init
) == TARGET_EXPR
)
1463 tree sub
= TARGET_EXPR_INITIAL (init
);
1464 maybe_replace_decl (&sub
, TARGET_EXPR_SLOT (init
),
1465 TARGET_EXPR_SLOT (stmt
));
1478 /* Fold ALL the trees! FIXME we should be able to remove this, but
1479 apparently that still causes optimization regressions. */
1482 cp_fold_function (tree fndecl
)
1484 /* By now all manifestly-constant-evaluated expressions will have
1485 been constant-evaluated already if possible, so we can safely
1486 pass ff_mce_false. */
1487 cp_fold_data
data (ff_genericize
| ff_mce_false
);
1488 cp_walk_tree (&DECL_SAVED_TREE (fndecl
), cp_fold_r
, &data
, NULL
);
1490 /* This is merely an optimization: if FNDECL has no i-e expressions,
1491 we'll not save c_f_d, and we can safely say that FNDECL will not
1492 be promoted to consteval. */
1493 if (deferred_escalating_exprs
1494 && !deferred_escalating_exprs
->contains (current_function_decl
))
1495 DECL_ESCALATION_CHECKED_P (fndecl
) = true;
1498 /* We've stashed immediate-escalating functions. Now see if they indeed
1499 ought to be promoted to consteval. */
1502 process_and_check_pending_immediate_escalating_fns ()
1504 /* This will be null for -fno-immediate-escalation. */
1505 if (!deferred_escalating_exprs
)
1508 for (auto e
: *deferred_escalating_exprs
)
1509 if (TREE_CODE (e
) == FUNCTION_DECL
&& !DECL_ESCALATION_CHECKED_P (e
))
1510 cp_fold_immediate (&DECL_SAVED_TREE (e
), mce_false
, e
);
1512 /* We've escalated every function that could have been promoted to
1513 consteval. Check that we are not taking the address of a consteval
1515 for (auto e
: *deferred_escalating_exprs
)
1517 if (TREE_CODE (e
) == FUNCTION_DECL
)
1519 tree decl
= (TREE_CODE (e
) == PTRMEM_CST
1520 ? PTRMEM_CST_MEMBER (e
)
1521 : TREE_OPERAND (e
, 0));
1522 if (DECL_IMMEDIATE_FUNCTION_P (decl
))
1523 taking_address_of_imm_fn_error (e
, decl
);
1526 deferred_escalating_exprs
= nullptr;
1529 /* Turn SPACESHIP_EXPR EXPR into GENERIC. */
1531 static tree
genericize_spaceship (tree expr
)
1533 iloc_sentinel
s (cp_expr_location (expr
));
1534 tree type
= TREE_TYPE (expr
);
1535 tree op0
= TREE_OPERAND (expr
, 0);
1536 tree op1
= TREE_OPERAND (expr
, 1);
1537 return genericize_spaceship (input_location
, type
, op0
, op1
);
1540 /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1541 to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1542 the middle-end (c++/88256). If EXPR is a DECL, use add_stmt and return
1543 NULL_TREE; otherwise return a COMPOUND_STMT of the DECL_EXPR and EXPR. */
1546 predeclare_vla (tree expr
)
1548 tree type
= TREE_TYPE (expr
);
1549 if (type
== error_mark_node
)
1551 if (is_typedef_decl (expr
))
1552 type
= DECL_ORIGINAL_TYPE (expr
);
1554 /* We need to strip pointers for gimplify_type_sizes. */
1556 while (POINTER_TYPE_P (vla
))
1558 if (TYPE_NAME (vla
))
1560 vla
= TREE_TYPE (vla
);
1562 if (vla
== type
|| TYPE_NAME (vla
)
1563 || !variably_modified_type_p (vla
, NULL_TREE
))
1566 tree decl
= build_decl (input_location
, TYPE_DECL
, NULL_TREE
, vla
);
1567 DECL_ARTIFICIAL (decl
) = 1;
1568 TYPE_NAME (vla
) = decl
;
1569 tree dexp
= build_stmt (input_location
, DECL_EXPR
, decl
);
1577 expr
= build2 (COMPOUND_EXPR
, type
, dexp
, expr
);
1582 /* Perform any pre-gimplification lowering of C++ front end trees to
1586 cp_genericize_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
1588 tree stmt
= *stmt_p
;
1589 struct cp_genericize_data
*wtd
= (struct cp_genericize_data
*) data
;
1590 hash_set
<tree
> *p_set
= wtd
->p_set
;
1592 /* If in an OpenMP context, note var uses. */
1593 if (UNLIKELY (wtd
->omp_ctx
!= NULL
)
1595 || TREE_CODE (stmt
) == PARM_DECL
1596 || TREE_CODE (stmt
) == RESULT_DECL
)
1597 && omp_var_to_track (stmt
))
1598 omp_cxx_notice_variable (wtd
->omp_ctx
, stmt
);
1600 /* Don't dereference parms in a thunk, pass the references through. */
1601 if ((TREE_CODE (stmt
) == CALL_EXPR
&& call_from_lambda_thunk_p (stmt
))
1602 || (TREE_CODE (stmt
) == AGGR_INIT_EXPR
&& AGGR_INIT_FROM_THUNK_P (stmt
)))
1608 /* Dereference invisible reference parms. */
1609 if (wtd
->handle_invisiref_parm_p
&& is_invisiref_parm (stmt
))
1611 *stmt_p
= convert_from_reference (stmt
);
1612 p_set
->add (*stmt_p
);
1617 /* Map block scope extern declarations to visible declarations with the
1618 same name and type in outer scopes if any. */
1619 if (VAR_OR_FUNCTION_DECL_P (stmt
) && DECL_LOCAL_DECL_P (stmt
))
1620 if (tree alias
= DECL_LOCAL_DECL_ALIAS (stmt
))
1622 if (alias
!= error_mark_node
)
1625 TREE_USED (alias
) |= TREE_USED (stmt
);
1631 if (TREE_CODE (stmt
) == INTEGER_CST
1632 && TYPE_REF_P (TREE_TYPE (stmt
))
1633 && (flag_sanitize
& (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1634 && !wtd
->no_sanitize_p
)
1636 ubsan_maybe_instrument_reference (stmt_p
);
1637 if (*stmt_p
!= stmt
)
1644 /* Other than invisiref parms, don't walk the same tree twice. */
1645 if (p_set
->contains (stmt
))
1651 switch (TREE_CODE (stmt
))
1654 if (is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
1656 /* If in an OpenMP context, note var uses. */
1657 if (UNLIKELY (wtd
->omp_ctx
!= NULL
)
1658 && omp_var_to_track (TREE_OPERAND (stmt
, 0)))
1659 omp_cxx_notice_variable (wtd
->omp_ctx
, TREE_OPERAND (stmt
, 0));
1660 *stmt_p
= fold_convert (TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
1666 if (TREE_OPERAND (stmt
, 0))
1668 if (is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
1669 /* Don't dereference an invisiref RESULT_DECL inside a
1672 if (RETURN_EXPR_LOCAL_ADDR_P (stmt
))
1674 /* Don't return the address of a local variable. */
1675 tree
*p
= &TREE_OPERAND (stmt
, 0);
1676 while (TREE_CODE (*p
) == COMPOUND_EXPR
)
1677 p
= &TREE_OPERAND (*p
, 0);
1678 if (TREE_CODE (*p
) == INIT_EXPR
)
1680 tree op
= TREE_OPERAND (*p
, 1);
1681 tree new_op
= build2 (COMPOUND_EXPR
, TREE_TYPE (op
), op
,
1682 build_zero_cst (TREE_TYPE (op
)));
1683 TREE_OPERAND (*p
, 1) = new_op
;
1690 switch (OMP_CLAUSE_CODE (stmt
))
1692 case OMP_CLAUSE_LASTPRIVATE
:
1693 /* Don't dereference an invisiref in OpenMP clauses. */
1694 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1697 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt
))
1698 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt
),
1699 cp_genericize_r
, data
, NULL
);
1702 case OMP_CLAUSE_PRIVATE
:
1703 /* Don't dereference an invisiref in OpenMP clauses. */
1704 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1706 else if (wtd
->omp_ctx
!= NULL
)
1708 /* Private clause doesn't cause any references to the
1709 var in outer contexts, avoid calling
1710 omp_cxx_notice_variable for it. */
1711 struct cp_genericize_omp_taskreg
*old
= wtd
->omp_ctx
;
1712 wtd
->omp_ctx
= NULL
;
1713 cp_walk_tree (&OMP_CLAUSE_DECL (stmt
), cp_genericize_r
,
1719 case OMP_CLAUSE_SHARED
:
1720 case OMP_CLAUSE_FIRSTPRIVATE
:
1721 case OMP_CLAUSE_COPYIN
:
1722 case OMP_CLAUSE_COPYPRIVATE
:
1723 case OMP_CLAUSE_INCLUSIVE
:
1724 case OMP_CLAUSE_EXCLUSIVE
:
1725 /* Don't dereference an invisiref in OpenMP clauses. */
1726 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1729 case OMP_CLAUSE_REDUCTION
:
1730 case OMP_CLAUSE_IN_REDUCTION
:
1731 case OMP_CLAUSE_TASK_REDUCTION
:
1732 /* Don't dereference an invisiref in reduction clause's
1733 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1734 still needs to be genericized. */
1735 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1738 if (OMP_CLAUSE_REDUCTION_INIT (stmt
))
1739 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt
),
1740 cp_genericize_r
, data
, NULL
);
1741 if (OMP_CLAUSE_REDUCTION_MERGE (stmt
))
1742 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt
),
1743 cp_genericize_r
, data
, NULL
);
1751 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1752 to lower this construct before scanning it, so we need to lower these
1753 before doing anything else. */
1755 *stmt_p
= build2_loc (EXPR_LOCATION (stmt
),
1756 CLEANUP_EH_ONLY (stmt
) ? TRY_CATCH_EXPR
1759 CLEANUP_BODY (stmt
),
1760 CLEANUP_EXPR (stmt
));
1764 genericize_if_stmt (stmt_p
);
1765 /* *stmt_p has changed, tail recurse to handle it again. */
1766 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
1768 /* COND_EXPR might have incompatible types in branches if one or both
1769 arms are bitfields. Fix it up now. */
1773 = (TREE_OPERAND (stmt
, 1)
1774 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 1))
1777 = (TREE_OPERAND (stmt
, 2)
1778 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 2))
1781 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1782 TREE_TYPE (TREE_OPERAND (stmt
, 1))))
1784 TREE_OPERAND (stmt
, 1)
1785 = fold_convert (type_left
, TREE_OPERAND (stmt
, 1));
1786 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1790 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1791 TREE_TYPE (TREE_OPERAND (stmt
, 2))))
1793 TREE_OPERAND (stmt
, 2)
1794 = fold_convert (type_right
, TREE_OPERAND (stmt
, 2));
1795 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1802 if (UNLIKELY (wtd
->omp_ctx
!= NULL
))
1805 for (decl
= BIND_EXPR_VARS (stmt
); decl
; decl
= DECL_CHAIN (decl
))
1807 && !DECL_EXTERNAL (decl
)
1808 && omp_var_to_track (decl
))
1811 = splay_tree_lookup (wtd
->omp_ctx
->variables
,
1812 (splay_tree_key
) decl
);
1814 splay_tree_insert (wtd
->omp_ctx
->variables
,
1815 (splay_tree_key
) decl
,
1817 ? OMP_CLAUSE_DEFAULT_SHARED
1818 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1821 if (sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
| SANITIZE_VPTR
))
1823 /* The point here is to not sanitize static initializers. */
1824 bool no_sanitize_p
= wtd
->no_sanitize_p
;
1825 wtd
->no_sanitize_p
= true;
1826 for (tree decl
= BIND_EXPR_VARS (stmt
);
1828 decl
= DECL_CHAIN (decl
))
1830 && TREE_STATIC (decl
)
1831 && DECL_INITIAL (decl
))
1832 cp_walk_tree (&DECL_INITIAL (decl
), cp_genericize_r
, data
, NULL
);
1833 wtd
->no_sanitize_p
= no_sanitize_p
;
1835 wtd
->bind_expr_stack
.safe_push (stmt
);
1836 cp_walk_tree (&BIND_EXPR_BODY (stmt
),
1837 cp_genericize_r
, data
, NULL
);
1838 wtd
->bind_expr_stack
.pop ();
1841 case ASSERTION_STMT
:
1842 case PRECONDITION_STMT
:
1843 case POSTCONDITION_STMT
:
1845 if (tree check
= build_contract_check (stmt
))
1848 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
1851 /* If we didn't build a check, replace it with void_node so we don't
1852 leak contracts into GENERIC. */
1853 *stmt_p
= void_node
;
1860 tree block
= NULL_TREE
;
1862 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1863 BLOCK, and append an IMPORTED_DECL to its
1864 BLOCK_VARS chained list. */
1865 if (wtd
->bind_expr_stack
.exists ())
1868 for (i
= wtd
->bind_expr_stack
.length () - 1; i
>= 0; i
--)
1869 if ((block
= BIND_EXPR_BLOCK (wtd
->bind_expr_stack
[i
])))
1874 tree decl
= TREE_OPERAND (stmt
, 0);
1877 if (undeduced_auto_decl (decl
))
1878 /* Omit from the GENERIC, the back-end can't handle it. */;
1881 tree using_directive
= make_node (IMPORTED_DECL
);
1882 TREE_TYPE (using_directive
) = void_type_node
;
1883 DECL_CONTEXT (using_directive
) = current_function_decl
;
1884 DECL_SOURCE_LOCATION (using_directive
)
1885 = cp_expr_loc_or_input_loc (stmt
);
1887 IMPORTED_DECL_ASSOCIATED_DECL (using_directive
) = decl
;
1888 DECL_CHAIN (using_directive
) = BLOCK_VARS (block
);
1889 BLOCK_VARS (block
) = using_directive
;
1892 /* The USING_STMT won't appear in GENERIC. */
1893 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1899 if (TREE_CODE (DECL_EXPR_DECL (stmt
)) == USING_DECL
)
1901 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1902 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1907 tree d
= DECL_EXPR_DECL (stmt
);
1909 gcc_assert (CP_DECL_THREAD_LOCAL_P (d
) == DECL_THREAD_LOCAL_P (d
));
1917 struct cp_genericize_omp_taskreg omp_ctx
;
1922 cp_walk_tree (&OMP_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
1923 omp_ctx
.is_parallel
= TREE_CODE (stmt
) == OMP_PARALLEL
;
1924 omp_ctx
.default_shared
= omp_ctx
.is_parallel
;
1925 omp_ctx
.outer
= wtd
->omp_ctx
;
1926 omp_ctx
.variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
1927 wtd
->omp_ctx
= &omp_ctx
;
1928 for (c
= OMP_CLAUSES (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
1929 switch (OMP_CLAUSE_CODE (c
))
1931 case OMP_CLAUSE_SHARED
:
1932 case OMP_CLAUSE_PRIVATE
:
1933 case OMP_CLAUSE_FIRSTPRIVATE
:
1934 case OMP_CLAUSE_LASTPRIVATE
:
1935 decl
= OMP_CLAUSE_DECL (c
);
1936 if (decl
== error_mark_node
|| !omp_var_to_track (decl
))
1938 n
= splay_tree_lookup (omp_ctx
.variables
, (splay_tree_key
) decl
);
1941 splay_tree_insert (omp_ctx
.variables
, (splay_tree_key
) decl
,
1942 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
1943 ? OMP_CLAUSE_DEFAULT_SHARED
1944 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1945 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
&& omp_ctx
.outer
)
1946 omp_cxx_notice_variable (omp_ctx
.outer
, decl
);
1948 case OMP_CLAUSE_DEFAULT
:
1949 if (OMP_CLAUSE_DEFAULT_KIND (c
) == OMP_CLAUSE_DEFAULT_SHARED
)
1950 omp_ctx
.default_shared
= true;
1954 if (TREE_CODE (stmt
) == OMP_TASKLOOP
)
1955 c_genericize_control_stmt (stmt_p
, walk_subtrees
, data
,
1956 cp_genericize_r
, cp_walk_subtrees
);
1958 cp_walk_tree (&OMP_BODY (stmt
), cp_genericize_r
, data
, NULL
);
1959 wtd
->omp_ctx
= omp_ctx
.outer
;
1960 splay_tree_delete (omp_ctx
.variables
);
1965 cfun
->has_omp_target
= true;
1971 tree try_block
= wtd
->try_block
;
1972 wtd
->try_block
= stmt
;
1973 cp_walk_tree (&TRY_STMTS (stmt
), cp_genericize_r
, data
, NULL
);
1974 wtd
->try_block
= try_block
;
1975 cp_walk_tree (&TRY_HANDLERS (stmt
), cp_genericize_r
, data
, NULL
);
1979 case MUST_NOT_THROW_EXPR
:
1980 /* MUST_NOT_THROW_COND might be something else with TM. */
1981 if (MUST_NOT_THROW_COND (stmt
) == NULL_TREE
)
1984 tree try_block
= wtd
->try_block
;
1985 wtd
->try_block
= stmt
;
1986 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_genericize_r
, data
, NULL
);
1987 wtd
->try_block
= try_block
;
1993 location_t loc
= location_of (stmt
);
1994 if (warning_suppressed_p (stmt
/* What warning? */))
1996 else if (wtd
->try_block
)
1998 if (TREE_CODE (wtd
->try_block
) == MUST_NOT_THROW_EXPR
)
2000 auto_diagnostic_group d
;
2001 if (warning_at (loc
, OPT_Wterminate
,
2002 "%<throw%> will always call %<terminate%>")
2003 && cxx_dialect
>= cxx11
2004 && DECL_DESTRUCTOR_P (current_function_decl
))
2005 inform (loc
, "in C++11 destructors default to %<noexcept%>");
2010 if (warn_cxx11_compat
&& cxx_dialect
< cxx11
2011 && DECL_DESTRUCTOR_P (current_function_decl
)
2012 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl
))
2014 && (get_defaulted_eh_spec (current_function_decl
)
2015 == empty_except_spec
))
2016 warning_at (loc
, OPT_Wc__11_compat
,
2017 "in C++11 this %<throw%> will call %<terminate%> "
2018 "because destructors default to %<noexcept%>");
2024 gcc_checking_assert (!AGGREGATE_TYPE_P (TREE_TYPE (stmt
)));
2025 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt
));
2028 case SPACESHIP_EXPR
:
2029 *stmt_p
= genericize_spaceship (*stmt_p
);
2033 /* By the time we get here we're handing off to the back end, so we don't
2034 need or want to preserve PTRMEM_CST anymore. */
2035 *stmt_p
= cplus_expand_constant (stmt
);
2040 /* For MEM_REF, make sure not to sanitize the second operand even
2041 if it has reference type. It is just an offset with a type
2042 holding other information. There is no other processing we
2043 need to do for INTEGER_CSTs, so just ignore the second argument
2045 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_genericize_r
, data
, NULL
);
2050 *stmt_p
= predeclare_vla (*stmt_p
);
2052 /* Warn of new allocations that are not big enough for the target
2055 && TREE_CODE (TREE_OPERAND (stmt
, 0)) == CALL_EXPR
2056 && POINTER_TYPE_P (TREE_TYPE (stmt
)))
2058 if (tree fndecl
= get_callee_fndecl (TREE_OPERAND (stmt
, 0)))
2059 if (DECL_IS_MALLOC (fndecl
))
2061 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (fndecl
));
2062 tree alloc_size
= lookup_attribute ("alloc_size", attrs
);
2064 warn_for_alloc_size (EXPR_LOCATION (stmt
),
2065 TREE_TYPE (TREE_TYPE (stmt
)),
2066 TREE_OPERAND (stmt
, 0), alloc_size
);
2070 if (!wtd
->no_sanitize_p
2071 && sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
)
2072 && TYPE_REF_P (TREE_TYPE (stmt
)))
2073 ubsan_maybe_instrument_reference (stmt_p
);
2077 /* Evaluate function concept checks instead of treating them as
2078 normal functions. */
2079 if (concept_check_p (stmt
))
2081 *stmt_p
= evaluate_concept_check (stmt
);
2082 * walk_subtrees
= 0;
2086 if (!wtd
->no_sanitize_p
2087 && sanitize_flags_p ((SANITIZE_NULL
2088 | SANITIZE_ALIGNMENT
| SANITIZE_VPTR
)))
2090 tree fn
= CALL_EXPR_FN (stmt
);
2092 && !error_operand_p (fn
)
2093 && INDIRECT_TYPE_P (TREE_TYPE (fn
))
2094 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) == METHOD_TYPE
)
2097 = TREE_CODE (fn
) == ADDR_EXPR
2098 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
2099 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn
, 0));
2100 if (sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
2101 ubsan_maybe_instrument_member_call (stmt
, is_ctor
);
2102 if (sanitize_flags_p (SANITIZE_VPTR
) && !is_ctor
)
2103 cp_ubsan_maybe_instrument_member_call (stmt
);
2105 else if (fn
== NULL_TREE
2106 && CALL_EXPR_IFN (stmt
) == IFN_UBSAN_NULL
2107 && TREE_CODE (CALL_EXPR_ARG (stmt
, 0)) == INTEGER_CST
2108 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt
, 0))))
2112 case AGGR_INIT_EXPR
:
2113 /* For calls to a multi-versioned function, overload resolution
2114 returns the function with the highest target priority, that is,
2115 the version that will checked for dispatching first. If this
2116 version is inlinable, a direct call to this version can be made
2117 otherwise the call should go through the dispatcher. */
2119 tree fn
= cp_get_callee_fndecl_nofold (stmt
);
2120 if (fn
&& DECL_FUNCTION_VERSIONED (fn
)
2121 && (current_function_decl
== NULL
2122 || !targetm
.target_option
.can_inline_p (current_function_decl
,
2124 if (tree dis
= get_function_version_dispatcher (fn
))
2126 mark_versions_used (dis
);
2127 dis
= build_address (dis
);
2128 if (TREE_CODE (stmt
) == CALL_EXPR
)
2129 CALL_EXPR_FN (stmt
) = dis
;
2131 AGGR_INIT_EXPR_FN (stmt
) = dis
;
2137 if (TARGET_EXPR_INITIAL (stmt
)
2138 && TREE_CODE (TARGET_EXPR_INITIAL (stmt
)) == CONSTRUCTOR
2139 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt
)))
2140 TARGET_EXPR_NO_ELIDE (stmt
) = 1;
2143 case TEMPLATE_ID_EXPR
:
2144 gcc_assert (concept_check_p (stmt
));
2145 /* Emit the value of the concept check. */
2146 *stmt_p
= evaluate_concept_check (stmt
);
2150 case OMP_DISTRIBUTE
:
2151 /* Need to explicitly instantiate copy ctors on class iterators of
2152 composite distribute parallel for. */
2153 if (OMP_FOR_INIT (*stmt_p
) == NULL_TREE
)
2155 tree
*data
[4] = { NULL
, NULL
, NULL
, NULL
};
2156 tree inner
= walk_tree (&OMP_FOR_BODY (*stmt_p
),
2157 find_combined_omp_for
, data
, NULL
);
2158 if (inner
!= NULL_TREE
2159 && TREE_CODE (inner
) == OMP_FOR
)
2161 for (int i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner
)); i
++)
2162 if (OMP_FOR_ORIG_DECLS (inner
)
2163 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner
),
2165 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner
),
2168 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner
), i
);
2169 /* Class iterators aren't allowed on OMP_SIMD, so the only
2170 case we need to solve is distribute parallel for. */
2171 gcc_assert (TREE_CODE (inner
) == OMP_FOR
2173 tree orig_decl
= TREE_PURPOSE (orig
);
2174 tree c
, cl
= NULL_TREE
;
2175 for (c
= OMP_FOR_CLAUSES (inner
);
2176 c
; c
= OMP_CLAUSE_CHAIN (c
))
2177 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
2178 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
2179 && OMP_CLAUSE_DECL (c
) == orig_decl
)
2184 if (cl
== NULL_TREE
)
2186 for (c
= OMP_PARALLEL_CLAUSES (*data
[1]);
2187 c
; c
= OMP_CLAUSE_CHAIN (c
))
2188 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
2189 && OMP_CLAUSE_DECL (c
) == orig_decl
)
2197 orig_decl
= require_complete_type (orig_decl
);
2198 tree inner_type
= TREE_TYPE (orig_decl
);
2199 if (orig_decl
== error_mark_node
)
2201 if (TYPE_REF_P (TREE_TYPE (orig_decl
)))
2202 inner_type
= TREE_TYPE (inner_type
);
2204 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
2205 inner_type
= TREE_TYPE (inner_type
);
2206 get_copy_ctor (inner_type
, tf_warning_or_error
);
2223 case STATEMENT_LIST
:
2224 /* These cases are handled by shared code. */
2225 c_genericize_control_stmt (stmt_p
, walk_subtrees
, data
,
2226 cp_genericize_r
, cp_walk_subtrees
);
2230 *stmt_p
= build1_loc (EXPR_LOCATION (stmt
), VIEW_CONVERT_EXPR
,
2231 TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
2235 if (IS_TYPE_OR_DECL_P (stmt
))
2240 p_set
->add (*stmt_p
);
2245 /* Lower C++ front end trees to GENERIC in T_P. */
2248 cp_genericize_tree (tree
* t_p
, bool handle_invisiref_parm_p
)
2250 struct cp_genericize_data wtd
;
2252 wtd
.p_set
= new hash_set
<tree
>;
2253 wtd
.bind_expr_stack
.create (0);
2255 wtd
.try_block
= NULL_TREE
;
2256 wtd
.no_sanitize_p
= false;
2257 wtd
.handle_invisiref_parm_p
= handle_invisiref_parm_p
;
2258 cp_walk_tree (t_p
, cp_genericize_r
, &wtd
, NULL
);
2260 if (sanitize_flags_p (SANITIZE_VPTR
))
2261 cp_ubsan_instrument_member_accesses (t_p
);
2264 /* If a function that should end with a return in non-void
2265 function doesn't obviously end with return, add ubsan
2266 instrumentation code to verify it at runtime. If -fsanitize=return
2267 is not enabled, instrument __builtin_unreachable. */
2270 cp_maybe_instrument_return (tree fndecl
)
2272 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
2273 || DECL_CONSTRUCTOR_P (fndecl
)
2274 || DECL_DESTRUCTOR_P (fndecl
)
2275 || !targetm
.warn_func_return (fndecl
))
2278 if (!sanitize_flags_p (SANITIZE_RETURN
, fndecl
)
2279 /* Don't add __builtin_unreachable () if not optimizing, it will not
2280 improve any optimizations in that case, just break UB code.
2281 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
2282 UBSan covers this with ubsan_instrument_return above where sufficient
2283 information is provided, while the __builtin_unreachable () below
2284 if return sanitization is disabled will just result in hard to
2285 understand runtime error without location. */
2286 && ((!optimize
&& !flag_unreachable_traps
)
2287 || sanitize_flags_p (SANITIZE_UNREACHABLE
, fndecl
)))
2290 tree t
= DECL_SAVED_TREE (fndecl
);
2293 switch (TREE_CODE (t
))
2296 t
= BIND_EXPR_BODY (t
);
2298 case TRY_FINALLY_EXPR
:
2299 case CLEANUP_POINT_EXPR
:
2300 t
= TREE_OPERAND (t
, 0);
2302 case STATEMENT_LIST
:
2304 tree_stmt_iterator i
= tsi_last (t
);
2305 while (!tsi_end_p (i
))
2307 tree p
= tsi_stmt (i
);
2308 if (TREE_CODE (p
) != DEBUG_BEGIN_STMT
)
2328 tree
*p
= &DECL_SAVED_TREE (fndecl
);
2329 if (TREE_CODE (*p
) == BIND_EXPR
)
2330 p
= &BIND_EXPR_BODY (*p
);
2332 location_t loc
= DECL_SOURCE_LOCATION (fndecl
);
2333 if (sanitize_flags_p (SANITIZE_RETURN
, fndecl
))
2334 t
= ubsan_instrument_return (loc
);
2336 t
= build_builtin_unreachable (BUILTINS_LOCATION
);
2338 append_to_statement_list (t
, p
);
2342 cp_genericize (tree fndecl
)
2346 /* Fix up the types of parms passed by invisible reference. */
2347 for (t
= DECL_ARGUMENTS (fndecl
); t
; t
= DECL_CHAIN (t
))
2348 if (TREE_ADDRESSABLE (TREE_TYPE (t
)))
2350 /* If a function's arguments are copied to create a thunk,
2351 then DECL_BY_REFERENCE will be set -- but the type of the
2352 argument will be a pointer type, so we will never get
2354 gcc_assert (!DECL_BY_REFERENCE (t
));
2355 gcc_assert (DECL_ARG_TYPE (t
) != TREE_TYPE (t
));
2356 TREE_TYPE (t
) = DECL_ARG_TYPE (t
);
2357 DECL_BY_REFERENCE (t
) = 1;
2358 TREE_ADDRESSABLE (t
) = 0;
2362 /* Do the same for the return value. */
2363 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl
))))
2365 t
= DECL_RESULT (fndecl
);
2366 TREE_TYPE (t
) = build_reference_type (TREE_TYPE (t
));
2367 DECL_BY_REFERENCE (t
) = 1;
2368 TREE_ADDRESSABLE (t
) = 0;
2372 /* Adjust DECL_VALUE_EXPR of the original var. */
2373 tree outer
= outer_curly_brace_block (current_function_decl
);
2377 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
2379 && DECL_NAME (t
) == DECL_NAME (var
)
2380 && DECL_HAS_VALUE_EXPR_P (var
)
2381 && DECL_VALUE_EXPR (var
) == t
)
2383 tree val
= convert_from_reference (t
);
2384 SET_DECL_VALUE_EXPR (var
, val
);
2390 /* If we're a clone, the body is already GIMPLE. */
2391 if (DECL_CLONED_FUNCTION_P (fndecl
))
2394 /* Allow cp_genericize calls to be nested. */
2395 bc_state_t save_state
;
2396 save_bc_state (&save_state
);
2398 /* We do want to see every occurrence of the parms, so we can't just use
2399 walk_tree's hash functionality. */
2400 cp_genericize_tree (&DECL_SAVED_TREE (fndecl
), true);
2402 cp_maybe_instrument_return (fndecl
);
2404 /* Do everything else. */
2405 c_genericize (fndecl
);
2406 restore_bc_state (&save_state
);
2409 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
2410 NULL if there is in fact nothing to do. ARG2 may be null if FN
2411 actually only takes one argument. */
2414 cxx_omp_clause_apply_fn (tree fn
, tree arg1
, tree arg2
)
2416 tree defparm
, parm
, t
;
2424 nargs
= list_length (DECL_ARGUMENTS (fn
));
2425 argarray
= XALLOCAVEC (tree
, nargs
);
2427 defparm
= TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn
)));
2429 defparm
= TREE_CHAIN (defparm
);
2431 bool is_method
= TREE_CODE (TREE_TYPE (fn
)) == METHOD_TYPE
;
2432 if (TREE_CODE (TREE_TYPE (arg1
)) == ARRAY_TYPE
)
2434 tree inner_type
= TREE_TYPE (arg1
);
2435 tree start1
, end1
, p1
;
2436 tree start2
= NULL
, p2
= NULL
;
2437 tree ret
= NULL
, lab
;
2443 inner_type
= TREE_TYPE (inner_type
);
2444 start1
= build4 (ARRAY_REF
, inner_type
, start1
,
2445 size_zero_node
, NULL
, NULL
);
2447 start2
= build4 (ARRAY_REF
, inner_type
, start2
,
2448 size_zero_node
, NULL
, NULL
);
2450 while (TREE_CODE (inner_type
) == ARRAY_TYPE
);
2451 start1
= build_fold_addr_expr_loc (input_location
, start1
);
2453 start2
= build_fold_addr_expr_loc (input_location
, start2
);
2455 end1
= TYPE_SIZE_UNIT (TREE_TYPE (arg1
));
2456 end1
= fold_build_pointer_plus (start1
, end1
);
2458 p1
= create_tmp_var (TREE_TYPE (start1
));
2459 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, start1
);
2460 append_to_statement_list (t
, &ret
);
2464 p2
= create_tmp_var (TREE_TYPE (start2
));
2465 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, start2
);
2466 append_to_statement_list (t
, &ret
);
2469 lab
= create_artificial_label (input_location
);
2470 t
= build1 (LABEL_EXPR
, void_type_node
, lab
);
2471 append_to_statement_list (t
, &ret
);
2476 /* Handle default arguments. */
2477 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
2478 parm
= TREE_CHAIN (parm
), i
++)
2479 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
2480 TREE_PURPOSE (parm
), fn
,
2481 i
- is_method
, tf_warning_or_error
);
2482 t
= build_call_a (fn
, i
, argarray
);
2483 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (t
)))
2484 t
= build_cplus_new (TREE_TYPE (t
), t
, tf_warning_or_error
);
2485 t
= fold_convert (void_type_node
, t
);
2486 t
= fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
2487 append_to_statement_list (t
, &ret
);
2489 t
= fold_build_pointer_plus (p1
, TYPE_SIZE_UNIT (inner_type
));
2490 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, t
);
2491 append_to_statement_list (t
, &ret
);
2495 t
= fold_build_pointer_plus (p2
, TYPE_SIZE_UNIT (inner_type
));
2496 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, t
);
2497 append_to_statement_list (t
, &ret
);
2500 t
= build2 (NE_EXPR
, boolean_type_node
, p1
, end1
);
2501 t
= build3 (COND_EXPR
, void_type_node
, t
, build_and_jump (&lab
), NULL
);
2502 append_to_statement_list (t
, &ret
);
2508 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg1
);
2510 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg2
);
2511 /* Handle default arguments. */
2512 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
2513 parm
= TREE_CHAIN (parm
), i
++)
2514 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
2515 TREE_PURPOSE (parm
), fn
,
2516 i
- is_method
, tf_warning_or_error
);
2517 t
= build_call_a (fn
, i
, argarray
);
2518 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (t
)))
2519 t
= build_cplus_new (TREE_TYPE (t
), t
, tf_warning_or_error
);
2520 t
= fold_convert (void_type_node
, t
);
2521 return fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
2525 /* Return code to initialize DECL with its default constructor, or
2526 NULL if there's nothing to do. */
2529 cxx_omp_clause_default_ctor (tree clause
, tree decl
, tree
/*outer*/)
2531 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2535 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), decl
, NULL
);
2540 /* Return code to initialize DST with a copy constructor from SRC. */
2543 cxx_omp_clause_copy_ctor (tree clause
, tree dst
, tree src
)
2545 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2549 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), dst
, src
);
2551 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
2556 /* Similarly, except use an assignment operator instead. */
2559 cxx_omp_clause_assign_op (tree clause
, tree dst
, tree src
)
2561 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2565 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 2), dst
, src
);
2567 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
2572 /* Return code to destroy DECL. */
2575 cxx_omp_clause_dtor (tree clause
, tree decl
)
2577 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2581 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 1), decl
, NULL
);
2586 /* True if OpenMP should privatize what this DECL points to rather
2587 than the DECL itself. */
2590 cxx_omp_privatize_by_reference (const_tree decl
)
2592 return (TYPE_REF_P (TREE_TYPE (decl
))
2593 || is_invisiref_parm (decl
));
2596 /* Return true if DECL is const qualified var having no mutable member. */
2598 cxx_omp_const_qual_no_mutable (tree decl
)
2600 tree type
= TREE_TYPE (decl
);
2601 if (TYPE_REF_P (type
))
2603 if (!is_invisiref_parm (decl
))
2605 type
= TREE_TYPE (type
);
2607 if (TREE_CODE (decl
) == RESULT_DECL
&& DECL_NAME (decl
))
2609 /* NVR doesn't preserve const qualification of the
2611 tree outer
= outer_curly_brace_block (current_function_decl
);
2615 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
2617 && DECL_NAME (decl
) == DECL_NAME (var
)
2618 && (TYPE_MAIN_VARIANT (type
)
2619 == TYPE_MAIN_VARIANT (TREE_TYPE (var
))))
2621 if (TYPE_READONLY (TREE_TYPE (var
)))
2622 type
= TREE_TYPE (var
);
2628 if (type
== error_mark_node
)
2631 /* Variables with const-qualified type having no mutable member
2632 are predetermined shared. */
2633 if (TYPE_READONLY (type
) && !cp_has_mutable_p (type
))
2639 /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
2640 of DECL is predetermined. */
2642 enum omp_clause_default_kind
2643 cxx_omp_predetermined_sharing_1 (tree decl
)
2645 /* Static data members are predetermined shared. */
2646 if (TREE_STATIC (decl
))
2648 tree ctx
= CP_DECL_CONTEXT (decl
);
2649 if (TYPE_P (ctx
) && MAYBE_CLASS_TYPE_P (ctx
))
2650 return OMP_CLAUSE_DEFAULT_SHARED
;
2652 if (c_omp_predefined_variable (decl
))
2653 return OMP_CLAUSE_DEFAULT_SHARED
;
2656 /* this may not be specified in data-sharing clauses, still we need
2657 to predetermined it firstprivate. */
2658 if (decl
== current_class_ptr
)
2659 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
2661 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
2664 /* Likewise, but also include the artificial vars. We don't want to
2665 disallow the artificial vars being mentioned in explicit clauses,
2666 as we use artificial vars e.g. for loop constructs with random
2667 access iterators other than pointers, but during gimplification
2668 we want to treat them as predetermined. */
2670 enum omp_clause_default_kind
2671 cxx_omp_predetermined_sharing (tree decl
)
2673 enum omp_clause_default_kind ret
= cxx_omp_predetermined_sharing_1 (decl
);
2674 if (ret
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
2677 /* Predetermine artificial variables holding integral values, those
2678 are usually result of gimplify_one_sizepos or SAVE_EXPR
2681 && DECL_ARTIFICIAL (decl
)
2682 && INTEGRAL_TYPE_P (TREE_TYPE (decl
))
2683 && !(DECL_LANG_SPECIFIC (decl
)
2684 && DECL_OMP_PRIVATIZED_MEMBER (decl
)))
2685 return OMP_CLAUSE_DEFAULT_SHARED
;
2687 /* Similarly for typeinfo symbols. */
2688 if (VAR_P (decl
) && DECL_ARTIFICIAL (decl
) && DECL_TINFO_P (decl
))
2689 return OMP_CLAUSE_DEFAULT_SHARED
;
2691 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
2694 enum omp_clause_defaultmap_kind
2695 cxx_omp_predetermined_mapping (tree decl
)
2697 /* Predetermine artificial variables holding integral values, those
2698 are usually result of gimplify_one_sizepos or SAVE_EXPR
2701 && DECL_ARTIFICIAL (decl
)
2702 && INTEGRAL_TYPE_P (TREE_TYPE (decl
))
2703 && !(DECL_LANG_SPECIFIC (decl
)
2704 && DECL_OMP_PRIVATIZED_MEMBER (decl
)))
2705 return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
;
2707 if (c_omp_predefined_variable (decl
))
2708 return OMP_CLAUSE_DEFAULTMAP_TO
;
2710 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
;
2713 /* Finalize an implicitly determined clause. */
2716 cxx_omp_finish_clause (tree c
, gimple_seq
*, bool /* openacc */)
2718 tree decl
, inner_type
;
2719 bool make_shared
= false;
2721 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
2722 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
2723 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LASTPRIVATE
2724 || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
)))
2727 decl
= OMP_CLAUSE_DECL (c
);
2728 decl
= require_complete_type (decl
);
2729 inner_type
= TREE_TYPE (decl
);
2730 if (decl
== error_mark_node
)
2732 else if (TYPE_REF_P (TREE_TYPE (decl
)))
2733 inner_type
= TREE_TYPE (inner_type
);
2735 /* We're interested in the base element, not arrays. */
2736 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
2737 inner_type
= TREE_TYPE (inner_type
);
2739 /* Check for special function availability by building a call to one.
2740 Save the results, because later we won't be in the right context
2741 for making these queries. */
2742 bool first
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
;
2743 bool last
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
;
2745 && CLASS_TYPE_P (inner_type
)
2746 && cxx_omp_create_clause_info (c
, inner_type
, !first
, first
, last
,
2752 OMP_CLAUSE_CODE (c
) = OMP_CLAUSE_SHARED
;
2753 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
) = 0;
2754 OMP_CLAUSE_SHARED_READONLY (c
) = 0;
2758 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2759 disregarded in OpenMP construct, because it is going to be
2760 remapped during OpenMP lowering. SHARED is true if DECL
2761 is going to be shared, false if it is going to be privatized. */
2764 cxx_omp_disregard_value_expr (tree decl
, bool shared
)
2769 && DECL_HAS_VALUE_EXPR_P (decl
)
2770 && DECL_ARTIFICIAL (decl
)
2771 && DECL_LANG_SPECIFIC (decl
)
2772 && DECL_OMP_PRIVATIZED_MEMBER (decl
))
2774 if (VAR_P (decl
) && DECL_CONTEXT (decl
) && is_capture_proxy (decl
))
2779 /* Fold expression X which is used as an rvalue if RVAL is true. */
2782 cp_fold_maybe_rvalue (tree x
, bool rval
, fold_flags_t flags
)
2786 x
= cp_fold (x
, flags
);
2788 x
= mark_rvalue_use (x
);
2789 if (rval
&& DECL_P (x
)
2790 && !TYPE_REF_P (TREE_TYPE (x
)))
2792 tree v
= decl_constant_value (x
);
2793 if (v
!= x
&& v
!= error_mark_node
)
2805 cp_fold_maybe_rvalue (tree x
, bool rval
)
2807 return cp_fold_maybe_rvalue (x
, rval
, ff_none
);
2810 /* Fold expression X which is used as an rvalue. */
2813 cp_fold_rvalue (tree x
, fold_flags_t flags
)
2815 return cp_fold_maybe_rvalue (x
, true, flags
);
2819 cp_fold_rvalue (tree x
)
2821 return cp_fold_rvalue (x
, ff_none
);
2824 /* Perform folding on expression X. */
2827 cp_fully_fold (tree x
, mce_value manifestly_const_eval
)
2829 if (processing_template_decl
)
2831 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2832 have to call both. */
2833 if (cxx_dialect
>= cxx11
)
2835 x
= maybe_constant_value (x
, /*decl=*/NULL_TREE
, manifestly_const_eval
);
2836 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2837 a TARGET_EXPR; undo that here. */
2838 if (TREE_CODE (x
) == TARGET_EXPR
)
2839 x
= TARGET_EXPR_INITIAL (x
);
2840 else if (TREE_CODE (x
) == VIEW_CONVERT_EXPR
2841 && TREE_CODE (TREE_OPERAND (x
, 0)) == CONSTRUCTOR
2842 && TREE_TYPE (TREE_OPERAND (x
, 0)) == TREE_TYPE (x
))
2843 x
= TREE_OPERAND (x
, 0);
2845 fold_flags_t flags
= ff_none
;
2846 if (manifestly_const_eval
== mce_false
)
2847 flags
|= ff_mce_false
;
2848 return cp_fold_rvalue (x
, flags
);
2852 cp_fully_fold (tree x
)
2854 return cp_fully_fold (x
, mce_unknown
);
2857 /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2861 cp_fully_fold_init (tree x
)
2863 if (processing_template_decl
)
2865 x
= cp_fully_fold (x
, mce_false
);
2866 cp_fold_data
data (ff_mce_false
);
2867 cp_walk_tree (&x
, cp_fold_r
, &data
, NULL
);
2871 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2872 and certain changes are made to the folding done. Or should be (FIXME). We
2873 never touch maybe_const, as it is only used for the C front-end
2874 C_MAYBE_CONST_EXPR. */
2877 c_fully_fold (tree x
, bool /*in_init*/, bool */
*maybe_const*/
, bool lval
)
2879 return cp_fold_maybe_rvalue (x
, !lval
);
2882 static GTY((deletable
)) hash_map
<tree
, tree
> *fold_caches
[2];
2884 /* Subroutine of cp_fold. Returns which fold cache to use according
2885 to the given flags. We need multiple caches since the result of
2886 folding may depend on which flags are used. */
2888 static hash_map
<tree
, tree
> *&
2889 get_fold_cache (fold_flags_t flags
)
2891 if (flags
& ff_mce_false
)
2892 return fold_caches
[1];
2894 return fold_caches
[0];
2897 /* Dispose of the whole FOLD_CACHE. */
2900 clear_fold_cache (void)
2902 for (auto& fold_cache
: fold_caches
)
2903 if (fold_cache
!= NULL
)
2904 fold_cache
->empty ();
2907 /* This function tries to fold an expression X.
2908 To avoid combinatorial explosion, folding results are kept in fold_cache.
2909 If X is invalid, we don't fold at all.
2910 For performance reasons we don't cache expressions representing a
2911 declaration or constant.
2912 Function returns X or its folded variant. */
2915 cp_fold (tree x
, fold_flags_t flags
)
2917 tree op0
, op1
, op2
, op3
;
2918 tree org_x
= x
, r
= NULL_TREE
;
2919 enum tree_code code
;
2921 bool rval_ops
= true;
2923 if (!x
|| x
== error_mark_node
)
2926 if (EXPR_P (x
) && (!TREE_TYPE (x
) || TREE_TYPE (x
) == error_mark_node
))
2929 /* Don't bother to cache DECLs or constants. */
2930 if (DECL_P (x
) || CONSTANT_CLASS_P (x
))
2933 auto& fold_cache
= get_fold_cache (flags
);
2934 if (fold_cache
== NULL
)
2935 fold_cache
= hash_map
<tree
, tree
>::create_ggc (101);
2937 if (tree
*cached
= fold_cache
->get (x
))
2939 /* unshare_expr doesn't recurse into SAVE_EXPRs. If SAVE_EXPR's
2940 argument has been folded into a tree invariant, make sure it is
2941 unshared. See PR112727. */
2942 if (TREE_CODE (x
) == SAVE_EXPR
&& *cached
!= x
)
2943 return unshare_expr (*cached
);
2947 uid_sensitive_constexpr_evaluation_checker c
;
2949 code
= TREE_CODE (x
);
2952 case CLEANUP_POINT_EXPR
:
2953 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2955 r
= cp_fold_rvalue (TREE_OPERAND (x
, 0), flags
);
2956 if (!TREE_SIDE_EFFECTS (r
))
2961 x
= fold_sizeof_expr (x
);
2964 case VIEW_CONVERT_EXPR
:
2967 case NON_LVALUE_EXPR
:
2970 if (VOID_TYPE_P (TREE_TYPE (x
)))
2972 /* This is just to make sure we don't end up with casts to
2973 void from error_mark_node. If we just return x, then
2974 cp_fold_r might fold the operand into error_mark_node and
2975 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2976 during gimplification doesn't like such casts.
2977 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2978 folding of the operand should be in the caches and if in cp_fold_r
2979 it will modify it in place. */
2980 op0
= cp_fold (TREE_OPERAND (x
, 0), flags
);
2981 if (op0
== error_mark_node
)
2982 x
= error_mark_node
;
2986 loc
= EXPR_LOCATION (x
);
2987 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
, flags
);
2989 if (code
== CONVERT_EXPR
2990 && SCALAR_TYPE_P (TREE_TYPE (x
))
2991 && op0
!= void_node
)
2992 /* During parsing we used convert_to_*_nofold; re-convert now using the
2993 folding variants, since fold() doesn't do those transformations. */
2994 x
= fold (convert (TREE_TYPE (x
), op0
));
2995 else if (op0
!= TREE_OPERAND (x
, 0))
2997 if (op0
== error_mark_node
)
2998 x
= error_mark_node
;
3000 x
= fold_build1_loc (loc
, code
, TREE_TYPE (x
), op0
);
3005 /* Conversion of an out-of-range value has implementation-defined
3006 behavior; the language considers it different from arithmetic
3007 overflow, which is undefined. */
3008 if (TREE_CODE (op0
) == INTEGER_CST
3009 && TREE_OVERFLOW_P (x
) && !TREE_OVERFLOW_P (op0
))
3010 TREE_OVERFLOW (x
) = false;
3014 case EXCESS_PRECISION_EXPR
:
3015 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
, flags
);
3016 x
= fold_convert_loc (EXPR_LOCATION (x
), TREE_TYPE (x
), op0
);
3020 /* We don't need the decltype(auto) obfuscation anymore. */
3021 if (REF_PARENTHESIZED_P (x
))
3023 tree p
= maybe_undo_parenthesized_ref (x
);
3025 return cp_fold (p
, flags
);
3030 loc
= EXPR_LOCATION (x
);
3031 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), false, flags
);
3033 /* Cope with user tricks that amount to offsetof. */
3034 if (op0
!= error_mark_node
3035 && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0
)))
3037 tree val
= get_base_address (op0
);
3039 && INDIRECT_REF_P (val
)
3040 && COMPLETE_TYPE_P (TREE_TYPE (val
))
3041 && TREE_CONSTANT (TREE_OPERAND (val
, 0)))
3043 val
= TREE_OPERAND (val
, 0);
3045 val
= maybe_constant_value (val
);
3046 if (TREE_CODE (val
) == INTEGER_CST
)
3047 return fold_offsetof (op0
, TREE_TYPE (x
));
3057 case FIX_TRUNC_EXPR
:
3063 case TRUTH_NOT_EXPR
:
3064 case FIXED_CONVERT_EXPR
:
3067 loc
= EXPR_LOCATION (x
);
3068 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
, flags
);
3071 if (op0
!= TREE_OPERAND (x
, 0))
3073 if (op0
== error_mark_node
)
3074 x
= error_mark_node
;
3077 x
= fold_build1_loc (loc
, code
, TREE_TYPE (x
), op0
);
3078 if (code
== INDIRECT_REF
3079 && (INDIRECT_REF_P (x
) || TREE_CODE (x
) == MEM_REF
))
3081 TREE_READONLY (x
) = TREE_READONLY (org_x
);
3082 TREE_SIDE_EFFECTS (x
) = TREE_SIDE_EFFECTS (org_x
);
3083 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
3090 gcc_assert (TREE_CODE (x
) != COND_EXPR
3091 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x
, 0))));
3094 case UNARY_PLUS_EXPR
:
3095 op0
= cp_fold_rvalue (TREE_OPERAND (x
, 0), flags
);
3096 if (op0
== error_mark_node
)
3097 x
= error_mark_node
;
3099 x
= fold_convert (TREE_TYPE (x
), op0
);
3102 case POSTDECREMENT_EXPR
:
3103 case POSTINCREMENT_EXPR
:
3105 case PREDECREMENT_EXPR
:
3106 case PREINCREMENT_EXPR
:
3111 case POINTER_PLUS_EXPR
:
3113 case POINTER_DIFF_EXPR
:
3116 case TRUNC_DIV_EXPR
:
3118 case FLOOR_DIV_EXPR
:
3119 case ROUND_DIV_EXPR
:
3120 case TRUNC_MOD_EXPR
:
3122 case ROUND_MOD_EXPR
:
3124 case EXACT_DIV_EXPR
:
3134 case TRUTH_AND_EXPR
:
3135 case TRUTH_ANDIF_EXPR
:
3137 case TRUTH_ORIF_EXPR
:
3138 case TRUTH_XOR_EXPR
:
3139 case LT_EXPR
: case LE_EXPR
:
3140 case GT_EXPR
: case GE_EXPR
:
3141 case EQ_EXPR
: case NE_EXPR
:
3142 case UNORDERED_EXPR
: case ORDERED_EXPR
:
3143 case UNLT_EXPR
: case UNLE_EXPR
:
3144 case UNGT_EXPR
: case UNGE_EXPR
:
3145 case UNEQ_EXPR
: case LTGT_EXPR
:
3146 case RANGE_EXPR
: case COMPLEX_EXPR
:
3148 loc
= EXPR_LOCATION (x
);
3149 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
, flags
);
3150 op1
= cp_fold_rvalue (TREE_OPERAND (x
, 1), flags
);
3152 /* decltype(nullptr) has only one value, so optimize away all comparisons
3153 with that type right away, keeping them in the IL causes troubles for
3154 various optimizations. */
3155 if (COMPARISON_CLASS_P (org_x
)
3156 && TREE_CODE (TREE_TYPE (op0
)) == NULLPTR_TYPE
3157 && TREE_CODE (TREE_TYPE (op1
)) == NULLPTR_TYPE
)
3162 x
= constant_boolean_node (true, TREE_TYPE (x
));
3165 x
= constant_boolean_node (false, TREE_TYPE (x
));
3170 return omit_two_operands_loc (loc
, TREE_TYPE (x
), x
,
3174 if (op0
!= TREE_OPERAND (x
, 0) || op1
!= TREE_OPERAND (x
, 1))
3176 if (op0
== error_mark_node
|| op1
== error_mark_node
)
3177 x
= error_mark_node
;
3179 x
= fold_build2_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
);
3184 /* This is only needed for -Wnonnull-compare and only if
3185 TREE_NO_WARNING (org_x), but to avoid that option affecting code
3186 generation, we do it always. */
3187 if (COMPARISON_CLASS_P (org_x
))
3189 if (x
== error_mark_node
|| TREE_CODE (x
) == INTEGER_CST
)
3191 else if (COMPARISON_CLASS_P (x
))
3193 if (warn_nonnull_compare
3194 && warning_suppressed_p (org_x
, OPT_Wnonnull_compare
))
3195 suppress_warning (x
, OPT_Wnonnull_compare
);
3197 /* Otherwise give up on optimizing these, let GIMPLE folders
3198 optimize those later on. */
3199 else if (op0
!= TREE_OPERAND (org_x
, 0)
3200 || op1
!= TREE_OPERAND (org_x
, 1))
3202 x
= build2_loc (loc
, code
, TREE_TYPE (org_x
), op0
, op1
);
3203 if (warn_nonnull_compare
3204 && warning_suppressed_p (org_x
, OPT_Wnonnull_compare
))
3205 suppress_warning (x
, OPT_Wnonnull_compare
);
3215 loc
= EXPR_LOCATION (x
);
3216 op0
= cp_fold_rvalue (TREE_OPERAND (x
, 0), flags
);
3217 op1
= cp_fold (TREE_OPERAND (x
, 1), flags
);
3218 op2
= cp_fold (TREE_OPERAND (x
, 2), flags
);
3220 if (TREE_CODE (TREE_TYPE (x
)) == BOOLEAN_TYPE
)
3222 warning_sentinel
s (warn_int_in_bool_context
);
3223 if (!VOID_TYPE_P (TREE_TYPE (op1
)))
3224 op1
= cp_truthvalue_conversion (op1
, tf_warning_or_error
);
3225 if (!VOID_TYPE_P (TREE_TYPE (op2
)))
3226 op2
= cp_truthvalue_conversion (op2
, tf_warning_or_error
);
3228 else if (VOID_TYPE_P (TREE_TYPE (x
)))
3230 if (TREE_CODE (op0
) == INTEGER_CST
)
3232 /* If the condition is constant, fold can fold away
3233 the COND_EXPR. If some statement-level uses of COND_EXPR
3234 have one of the branches NULL, avoid folding crash. */
3236 op1
= build_empty_stmt (loc
);
3238 op2
= build_empty_stmt (loc
);
3242 /* Otherwise, don't bother folding a void condition, since
3243 it can't produce a constant value. */
3244 if (op0
!= TREE_OPERAND (x
, 0)
3245 || op1
!= TREE_OPERAND (x
, 1)
3246 || op2
!= TREE_OPERAND (x
, 2))
3247 x
= build3_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
);
3252 if (op0
!= TREE_OPERAND (x
, 0)
3253 || op1
!= TREE_OPERAND (x
, 1)
3254 || op2
!= TREE_OPERAND (x
, 2))
3256 if (op0
== error_mark_node
3257 || op1
== error_mark_node
3258 || op2
== error_mark_node
)
3259 x
= error_mark_node
;
3261 x
= fold_build3_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
);
3266 /* A COND_EXPR might have incompatible types in branches if one or both
3267 arms are bitfields. If folding exposed such a branch, fix it up. */
3268 if (TREE_CODE (x
) != code
3269 && x
!= error_mark_node
3270 && !useless_type_conversion_p (TREE_TYPE (org_x
), TREE_TYPE (x
)))
3271 x
= fold_convert (TREE_TYPE (org_x
), x
);
3277 tree callee
= get_callee_fndecl (x
);
3279 /* "Inline" calls to std::move/forward and other cast-like functions
3280 by simply folding them into a corresponding cast to their return
3281 type. This is cheaper than relying on the middle end to do so, and
3282 also means we avoid generating useless debug info for them at all.
3284 At this point the argument has already been converted into a
3285 reference, so it suffices to use a NOP_EXPR to express the
3287 if ((OPTION_SET_P (flag_fold_simple_inlines
)
3288 ? flag_fold_simple_inlines
3290 && call_expr_nargs (x
) == 1
3291 && decl_in_std_namespace_p (callee
)
3292 && DECL_NAME (callee
) != NULL_TREE
3293 && (id_equal (DECL_NAME (callee
), "move")
3294 || id_equal (DECL_NAME (callee
), "forward")
3295 || id_equal (DECL_NAME (callee
), "addressof")
3296 /* This addressof equivalent is used heavily in libstdc++. */
3297 || id_equal (DECL_NAME (callee
), "__addressof")
3298 || id_equal (DECL_NAME (callee
), "as_const")))
3300 r
= CALL_EXPR_ARG (x
, 0);
3301 /* Check that the return and argument types are sane before
3303 if (INDIRECT_TYPE_P (TREE_TYPE (x
))
3304 && INDIRECT_TYPE_P (TREE_TYPE (r
)))
3306 if (!same_type_p (TREE_TYPE (x
), TREE_TYPE (r
)))
3307 r
= build_nop (TREE_TYPE (x
), r
);
3308 x
= cp_fold (r
, flags
);
3313 int sv
= optimize
, nw
= sv
;
3315 /* Some built-in function calls will be evaluated at compile-time in
3316 fold (). Set optimize to 1 when folding __builtin_constant_p inside
3317 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
3318 if (callee
&& fndecl_built_in_p (callee
) && !optimize
3319 && DECL_IS_BUILTIN_CONSTANT_P (callee
)
3320 && current_function_decl
3321 && DECL_DECLARED_CONSTEXPR_P (current_function_decl
))
3324 if (callee
&& fndecl_built_in_p (callee
, BUILT_IN_FRONTEND
))
3326 iloc_sentinel
ils (EXPR_LOCATION (x
));
3327 switch (DECL_FE_FUNCTION_CODE (callee
))
3329 case CP_BUILT_IN_IS_CONSTANT_EVALUATED
:
3330 /* Defer folding __builtin_is_constant_evaluated unless
3331 we know this isn't a manifestly constant-evaluated
3333 if (flags
& ff_mce_false
)
3334 x
= boolean_false_node
;
3336 case CP_BUILT_IN_SOURCE_LOCATION
:
3337 x
= fold_builtin_source_location (x
);
3339 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER
:
3340 x
= fold_builtin_is_corresponding_member
3341 (EXPR_LOCATION (x
), call_expr_nargs (x
),
3342 &CALL_EXPR_ARG (x
, 0));
3344 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS
:
3345 x
= fold_builtin_is_pointer_inverconvertible_with_class
3346 (EXPR_LOCATION (x
), call_expr_nargs (x
),
3347 &CALL_EXPR_ARG (x
, 0));
3356 && fndecl_built_in_p (callee
, CP_BUILT_IN_SOURCE_LOCATION
,
3359 x
= fold_builtin_source_location (x
);
3363 bool changed
= false;
3364 int m
= call_expr_nargs (x
);
3365 for (int i
= 0; i
< m
; i
++)
3367 r
= cp_fold (CALL_EXPR_ARG (x
, i
), flags
);
3368 if (r
!= CALL_EXPR_ARG (x
, i
))
3370 if (r
== error_mark_node
)
3372 x
= error_mark_node
;
3377 CALL_EXPR_ARG (x
, i
) = r
;
3381 if (x
== error_mark_node
)
3388 if (TREE_CODE (r
) != CALL_EXPR
)
3390 x
= cp_fold (r
, flags
);
3396 /* Invoke maybe_constant_value for functions declared
3397 constexpr and not called with AGGR_INIT_EXPRs.
3399 Do constexpr expansion of expressions where the call itself is not
3400 constant, but the call followed by an INDIRECT_REF is. */
3401 if (callee
&& DECL_DECLARED_CONSTEXPR_P (callee
)
3404 mce_value manifestly_const_eval
= mce_unknown
;
3405 if (flags
& ff_mce_false
)
3406 /* Allow folding __builtin_is_constant_evaluated to false during
3407 constexpr evaluation of this call. */
3408 manifestly_const_eval
= mce_false
;
3409 r
= maybe_constant_value (x
, /*decl=*/NULL_TREE
,
3410 manifestly_const_eval
);
3414 if (TREE_CODE (r
) != CALL_EXPR
)
3416 if (DECL_CONSTRUCTOR_P (callee
))
3418 loc
= EXPR_LOCATION (x
);
3419 tree a
= CALL_EXPR_ARG (x
, 0);
3420 bool return_this
= targetm
.cxx
.cdtor_returns_this ();
3422 a
= cp_save_expr (a
);
3423 tree s
= build_fold_indirect_ref_loc (loc
, a
);
3424 r
= cp_build_init_expr (s
, r
);
3426 r
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (x
), r
,
3427 fold_convert_loc (loc
, TREE_TYPE (x
), a
));
3440 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (x
);
3441 vec
<constructor_elt
, va_gc
> *nelts
= NULL
;
3442 FOR_EACH_VEC_SAFE_ELT (elts
, i
, p
)
3444 tree op
= cp_fold (p
->value
, flags
);
3447 if (op
== error_mark_node
)
3449 x
= error_mark_node
;
3454 nelts
= elts
->copy ();
3455 (*nelts
)[i
].value
= op
;
3460 x
= build_constructor (TREE_TYPE (x
), nelts
);
3461 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x
)
3462 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x
);
3463 CONSTRUCTOR_MUTABLE_POISON (x
)
3464 = CONSTRUCTOR_MUTABLE_POISON (org_x
);
3466 if (VECTOR_TYPE_P (TREE_TYPE (x
)))
3472 bool changed
= false;
3473 int n
= TREE_VEC_LENGTH (x
);
3475 for (int i
= 0; i
< n
; i
++)
3477 tree op
= cp_fold (TREE_VEC_ELT (x
, i
), flags
);
3478 if (op
!= TREE_VEC_ELT (x
, i
))
3482 TREE_VEC_ELT (x
, i
) = op
;
3491 case ARRAY_RANGE_REF
:
3493 loc
= EXPR_LOCATION (x
);
3494 op0
= cp_fold (TREE_OPERAND (x
, 0), flags
);
3495 op1
= cp_fold (TREE_OPERAND (x
, 1), flags
);
3496 op2
= cp_fold (TREE_OPERAND (x
, 2), flags
);
3497 op3
= cp_fold (TREE_OPERAND (x
, 3), flags
);
3499 if (op0
!= TREE_OPERAND (x
, 0)
3500 || op1
!= TREE_OPERAND (x
, 1)
3501 || op2
!= TREE_OPERAND (x
, 2)
3502 || op3
!= TREE_OPERAND (x
, 3))
3504 if (op0
== error_mark_node
3505 || op1
== error_mark_node
3506 || op2
== error_mark_node
3507 || op3
== error_mark_node
)
3508 x
= error_mark_node
;
3511 x
= build4_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
, op3
);
3512 TREE_READONLY (x
) = TREE_READONLY (org_x
);
3513 TREE_SIDE_EFFECTS (x
) = TREE_SIDE_EFFECTS (org_x
);
3514 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
3522 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
3523 folding, evaluates to an invariant. In that case no need to wrap
3524 this folded tree with a SAVE_EXPR. */
3525 r
= cp_fold (TREE_OPERAND (x
, 0), flags
);
3526 if (tree_invariant_p (r
))
3531 x
= evaluate_requires_expr (x
);
3538 if (EXPR_P (x
) && TREE_CODE (x
) == code
)
3540 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
3541 copy_warning (x
, org_x
);
3544 if (!c
.evaluation_restricted_p ())
3546 fold_cache
->put (org_x
, x
);
3547 /* Prevent that we try to fold an already folded result again. */
3549 fold_cache
->put (x
, x
);
3555 /* Look up "hot", "cold", "likely" or "unlikely" in attribute list LIST. */
3558 lookup_hotness_attribute (tree list
)
3560 for (; list
; list
= TREE_CHAIN (list
))
3562 tree name
= get_attribute_name (list
);
3563 if ((is_attribute_p ("hot", name
)
3564 || is_attribute_p ("cold", name
)
3565 || is_attribute_p ("likely", name
)
3566 || is_attribute_p ("unlikely", name
))
3567 && is_attribute_namespace_p ("", list
))
3573 /* Remove "hot", "cold", "likely" and "unlikely" attributes from LIST. */
3576 remove_hotness_attribute (tree list
)
3578 for (tree
*p
= &list
; *p
; )
3581 tree name
= get_attribute_name (l
);
3582 if ((is_attribute_p ("hot", name
)
3583 || is_attribute_p ("cold", name
)
3584 || is_attribute_p ("likely", name
)
3585 || is_attribute_p ("unlikely", name
))
3586 && is_attribute_namespace_p ("", l
))
3588 *p
= TREE_CHAIN (l
);
3591 p
= &TREE_CHAIN (l
);
3596 /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
3600 process_stmt_hotness_attribute (tree std_attrs
, location_t attrs_loc
)
3602 if (std_attrs
== error_mark_node
)
3604 if (tree attr
= lookup_hotness_attribute (std_attrs
))
3606 tree name
= get_attribute_name (attr
);
3607 bool hot
= (is_attribute_p ("hot", name
)
3608 || is_attribute_p ("likely", name
));
3609 tree pred
= build_predict_expr (hot
? PRED_HOT_LABEL
: PRED_COLD_LABEL
,
3610 hot
? TAKEN
: NOT_TAKEN
);
3611 SET_EXPR_LOCATION (pred
, attrs_loc
);
3613 if (tree other
= lookup_hotness_attribute (TREE_CHAIN (attr
)))
3614 warning (OPT_Wattributes
, "ignoring attribute %qE after earlier %qE",
3615 get_attribute_name (other
), name
);
3616 std_attrs
= remove_hotness_attribute (std_attrs
);
3621 /* Build IFN_ASSUME internal call for assume condition ARG. */
3624 build_assume_call (location_t loc
, tree arg
)
3626 if (!processing_template_decl
)
3627 arg
= fold_build_cleanup_point_expr (TREE_TYPE (arg
), arg
);
3628 return build_call_expr_internal_loc (loc
, IFN_ASSUME
, void_type_node
,
3632 /* If [[assume (cond)]] appears on this statement, handle it. */
3635 process_stmt_assume_attribute (tree std_attrs
, tree statement
,
3636 location_t attrs_loc
)
3638 if (std_attrs
== error_mark_node
)
3640 tree attr
= lookup_attribute ("gnu", "assume", std_attrs
);
3643 /* The next token after the assume attribute is not ';'. */
3646 warning_at (attrs_loc
, OPT_Wattributes
,
3647 "%<assume%> attribute not followed by %<;%>");
3650 for (; attr
; attr
= lookup_attribute ("gnu", "assume", TREE_CHAIN (attr
)))
3652 tree args
= TREE_VALUE (attr
);
3653 if (args
&& PACK_EXPANSION_P (args
))
3655 auto_diagnostic_group d
;
3656 error_at (attrs_loc
, "pack expansion of %qE attribute",
3657 get_attribute_name (attr
));
3658 if (cxx_dialect
>= cxx17
)
3659 inform (attrs_loc
, "use fold expression in the attribute "
3660 "argument instead");
3663 int nargs
= list_length (args
);
3666 auto_diagnostic_group d
;
3667 error_at (attrs_loc
, "wrong number of arguments specified for "
3668 "%qE attribute", get_attribute_name (attr
));
3669 inform (attrs_loc
, "expected %i, found %i", 1, nargs
);
3673 tree arg
= TREE_VALUE (args
);
3674 if (!type_dependent_expression_p (arg
))
3675 arg
= contextual_conv_bool (arg
, tf_warning_or_error
);
3676 if (error_operand_p (arg
))
3678 finish_expr_stmt (build_assume_call (attrs_loc
, arg
));
3681 return remove_attribute ("gnu", "assume", std_attrs
);
3684 /* Return the type std::source_location::__impl after performing
3685 verification on it. */
3688 get_source_location_impl_type ()
3690 tree name
= get_identifier ("source_location");
3691 tree decl
= lookup_qualified_name (std_node
, name
);
3692 if (TREE_CODE (decl
) != TYPE_DECL
)
3694 auto_diagnostic_group d
;
3695 if (decl
== error_mark_node
|| TREE_CODE (decl
) == TREE_LIST
)
3696 qualified_name_lookup_error (std_node
, name
, decl
, input_location
);
3698 error ("%qD is not a type", decl
);
3699 return error_mark_node
;
3701 name
= get_identifier ("__impl");
3702 tree type
= TREE_TYPE (decl
);
3703 decl
= lookup_qualified_name (type
, name
);
3704 if (TREE_CODE (decl
) != TYPE_DECL
)
3706 auto_diagnostic_group d
;
3707 if (decl
== error_mark_node
|| TREE_CODE (decl
) == TREE_LIST
)
3708 qualified_name_lookup_error (type
, name
, decl
, input_location
);
3710 error ("%qD is not a type", decl
);
3711 return error_mark_node
;
3713 type
= TREE_TYPE (decl
);
3714 if (TREE_CODE (type
) != RECORD_TYPE
)
3716 error ("%qD is not a class type", decl
);
3717 return error_mark_node
;
3721 for (tree field
= TYPE_FIELDS (type
);
3722 (field
= next_aggregate_field (field
)) != NULL_TREE
;
3723 field
= DECL_CHAIN (field
))
3725 if (DECL_NAME (field
) != NULL_TREE
)
3727 const char *n
= IDENTIFIER_POINTER (DECL_NAME (field
));
3728 if (strcmp (n
, "_M_file_name") == 0
3729 || strcmp (n
, "_M_function_name") == 0)
3731 if (TREE_TYPE (field
) != const_string_type_node
)
3733 error ("%qD does not have %<const char *%> type", field
);
3734 return error_mark_node
;
3739 else if (strcmp (n
, "_M_line") == 0 || strcmp (n
, "_M_column") == 0)
3741 if (TREE_CODE (TREE_TYPE (field
)) != INTEGER_TYPE
)
3743 error ("%qD does not have integral type", field
);
3744 return error_mark_node
;
3755 error ("%<std::source_location::__impl%> does not contain only "
3756 "non-static data members %<_M_file_name%>, "
3757 "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
3758 return error_mark_node
;
3760 return build_qualified_type (type
, TYPE_QUAL_CONST
);
3763 /* Type for source_location_table hash_set. */
3764 struct GTY((for_user
)) source_location_table_entry
{
3770 /* Traits class for function start hash maps below. */
3772 struct source_location_table_entry_hash
3773 : ggc_remove
<source_location_table_entry
>
3775 typedef source_location_table_entry value_type
;
3776 typedef source_location_table_entry compare_type
;
3779 hash (const source_location_table_entry
&ref
)
3781 inchash::hash
hstate (0);
3782 hstate
.add_int (ref
.loc
);
3783 hstate
.add_int (ref
.uid
);
3784 return hstate
.end ();
3788 equal (const source_location_table_entry
&ref1
,
3789 const source_location_table_entry
&ref2
)
3791 return ref1
.loc
== ref2
.loc
&& ref1
.uid
== ref2
.uid
;
3795 mark_deleted (source_location_table_entry
&ref
)
3797 ref
.loc
= UNKNOWN_LOCATION
;
3799 ref
.var
= NULL_TREE
;
3802 static const bool empty_zero_p
= true;
3805 mark_empty (source_location_table_entry
&ref
)
3807 ref
.loc
= UNKNOWN_LOCATION
;
3809 ref
.var
= NULL_TREE
;
3813 is_deleted (const source_location_table_entry
&ref
)
3815 return (ref
.loc
== UNKNOWN_LOCATION
3817 && ref
.var
== NULL_TREE
);
3821 is_empty (const source_location_table_entry
&ref
)
3823 return (ref
.loc
== UNKNOWN_LOCATION
3825 && ref
.var
== NULL_TREE
);
3829 pch_nx (source_location_table_entry
&p
)
3831 extern void gt_pch_nx (source_location_table_entry
&);
3836 pch_nx (source_location_table_entry
&p
, gt_pointer_operator op
, void *cookie
)
3838 extern void gt_pch_nx (source_location_table_entry
*, gt_pointer_operator
,
3840 gt_pch_nx (&p
, op
, cookie
);
3844 static GTY(()) hash_table
<source_location_table_entry_hash
>
3845 *source_location_table
;
3846 static GTY(()) unsigned int source_location_id
;
3848 /* Fold the __builtin_source_location () call T. */
3851 fold_builtin_source_location (const_tree t
)
3853 gcc_assert (TREE_CODE (t
) == CALL_EXPR
);
3854 /* TREE_TYPE (t) is const std::source_location::__impl* */
3855 tree source_location_impl
= TREE_TYPE (TREE_TYPE (t
));
3856 if (source_location_impl
== error_mark_node
)
3857 return build_zero_cst (const_ptr_type_node
);
3858 gcc_assert (CLASS_TYPE_P (source_location_impl
)
3859 && id_equal (TYPE_IDENTIFIER (source_location_impl
), "__impl"));
3861 location_t loc
= EXPR_LOCATION (t
);
3862 if (source_location_table
== NULL
)
3863 source_location_table
3864 = hash_table
<source_location_table_entry_hash
>::create_ggc (64);
3865 const line_map_ordinary
*map
;
3866 source_location_table_entry entry
;
3868 = linemap_resolve_location (line_table
, loc
, LRK_MACRO_EXPANSION_POINT
,
3870 entry
.uid
= current_function_decl
? DECL_UID (current_function_decl
) : -1;
3871 entry
.var
= error_mark_node
;
3872 source_location_table_entry
*entryp
3873 = source_location_table
->find_slot (entry
, INSERT
);
3880 ASM_GENERATE_INTERNAL_LABEL (tmp_name
, "Lsrc_loc", source_location_id
++);
3881 var
= build_decl (loc
, VAR_DECL
, get_identifier (tmp_name
),
3882 source_location_impl
);
3883 TREE_STATIC (var
) = 1;
3884 TREE_PUBLIC (var
) = 0;
3885 DECL_ARTIFICIAL (var
) = 1;
3886 DECL_IGNORED_P (var
) = 1;
3887 DECL_EXTERNAL (var
) = 0;
3888 DECL_DECLARED_CONSTEXPR_P (var
) = 1;
3889 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var
) = 1;
3890 layout_decl (var
, 0);
3892 vec
<constructor_elt
, va_gc
> *v
= NULL
;
3894 for (tree field
= TYPE_FIELDS (source_location_impl
);
3895 (field
= next_aggregate_field (field
)) != NULL_TREE
;
3896 field
= DECL_CHAIN (field
))
3898 const char *n
= IDENTIFIER_POINTER (DECL_NAME (field
));
3899 tree val
= NULL_TREE
;
3900 if (strcmp (n
, "_M_file_name") == 0)
3902 if (const char *fname
= LOCATION_FILE (loc
))
3904 fname
= remap_macro_filename (fname
);
3905 val
= build_string_literal (fname
);
3908 val
= build_string_literal ("");
3910 else if (strcmp (n
, "_M_function_name") == 0)
3912 const char *name
= "";
3914 if (current_function_decl
)
3915 name
= cxx_printable_name (current_function_decl
, 2);
3917 val
= build_string_literal (name
);
3919 else if (strcmp (n
, "_M_line") == 0)
3920 val
= build_int_cst (TREE_TYPE (field
), LOCATION_LINE (loc
));
3921 else if (strcmp (n
, "_M_column") == 0)
3922 val
= build_int_cst (TREE_TYPE (field
), LOCATION_COLUMN (loc
));
3925 CONSTRUCTOR_APPEND_ELT (v
, field
, val
);
3928 tree ctor
= build_constructor (source_location_impl
, v
);
3929 TREE_CONSTANT (ctor
) = 1;
3930 TREE_STATIC (ctor
) = 1;
3931 DECL_INITIAL (var
) = ctor
;
3932 varpool_node::finalize_decl (var
);
3937 return build_fold_addr_expr_with_type_loc (loc
, var
, TREE_TYPE (t
));
3940 #include "gt-cp-cp-gimplify.h"