1 /* C++-specific tree lowering bits; see also c-gimplify.cc and gimple.cc.
3 Copyright (C) 2002-2024 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "basic-block.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
37 #include "gcc-rich-location.h"
41 #include "file-prefix-map.h"
43 #include "omp-general.h"
46 /* Keep track of forward references to immediate-escalating functions in
47 case they become consteval. This vector contains ADDR_EXPRs and
48 PTRMEM_CSTs; it also stores FUNCTION_DECLs that had an escalating
49 function call in them, to check that they can be evaluated to a constant,
50 and immediate-escalating functions that may become consteval. */
51 static GTY(()) hash_set
<tree
> *deferred_escalating_exprs
;
54 remember_escalating_expr (tree t
)
56 if (!deferred_escalating_exprs
)
57 deferred_escalating_exprs
= hash_set
<tree
>::create_ggc (37);
58 deferred_escalating_exprs
->add (t
);
61 /* Flags for cp_fold and cp_fold_r. */
65 /* Whether we're being called from cp_fold_function. */
66 ff_genericize
= 1 << 0,
67 /* Whether we're folding a point where we know we're
68 definitely not in a manifestly constant-evaluated
70 ff_mce_false
= 1 << 1,
73 using fold_flags_t
= int;
79 cp_fold_data (fold_flags_t flags
): flags (flags
) {}
82 /* Forward declarations. */
84 static tree
cp_genericize_r (tree
*, int *, void *);
85 static tree
cp_fold_r (tree
*, int *, void *);
86 static void cp_genericize_tree (tree
*, bool);
87 static tree
cp_fold (tree
, fold_flags_t
);
88 static tree
cp_fold_immediate_r (tree
*, int *, void *);
90 /* Genericize a TRY_BLOCK. */
93 genericize_try_block (tree
*stmt_p
)
95 tree body
= TRY_STMTS (*stmt_p
);
96 tree cleanup
= TRY_HANDLERS (*stmt_p
);
98 *stmt_p
= build2 (TRY_CATCH_EXPR
, void_type_node
, body
, cleanup
);
101 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
104 genericize_catch_block (tree
*stmt_p
)
106 tree type
= HANDLER_TYPE (*stmt_p
);
107 tree body
= HANDLER_BODY (*stmt_p
);
109 /* FIXME should the caught type go in TREE_TYPE? */
110 *stmt_p
= build2 (CATCH_EXPR
, void_type_node
, type
, body
);
113 /* A terser interface for building a representation of an exception
117 build_gimple_eh_filter_tree (tree body
, tree allowed
, tree failure
)
121 /* FIXME should the allowed types go in TREE_TYPE? */
122 t
= build2 (EH_FILTER_EXPR
, void_type_node
, allowed
, NULL_TREE
);
123 append_to_statement_list (failure
, &EH_FILTER_FAILURE (t
));
125 t
= build2 (TRY_CATCH_EXPR
, void_type_node
, NULL_TREE
, t
);
126 append_to_statement_list (body
, &TREE_OPERAND (t
, 0));
131 /* Genericize an EH_SPEC_BLOCK by converting it to a
132 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
135 genericize_eh_spec_block (tree
*stmt_p
)
137 tree body
= EH_SPEC_STMTS (*stmt_p
);
138 tree allowed
= EH_SPEC_RAISES (*stmt_p
);
139 tree failure
= build_call_n (call_unexpected_fn
, 1, build_exc_ptr ());
141 *stmt_p
= build_gimple_eh_filter_tree (body
, allowed
, failure
);
142 suppress_warning (*stmt_p
);
143 suppress_warning (TREE_OPERAND (*stmt_p
, 1));
146 /* Return the first non-compound statement in STMT. */
149 first_stmt (tree stmt
)
151 switch (TREE_CODE (stmt
))
154 if (tree_statement_list_node
*p
= STATEMENT_LIST_HEAD (stmt
))
155 return first_stmt (p
->stmt
);
159 return first_stmt (BIND_EXPR_BODY (stmt
));
166 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
169 genericize_if_stmt (tree
*stmt_p
)
171 tree stmt
, cond
, then_
, else_
;
172 location_t locus
= EXPR_LOCATION (*stmt_p
);
175 cond
= IF_COND (stmt
);
176 then_
= THEN_CLAUSE (stmt
);
177 else_
= ELSE_CLAUSE (stmt
);
181 tree ft
= first_stmt (then_
);
182 tree fe
= first_stmt (else_
);
184 if (TREE_CODE (ft
) == PREDICT_EXPR
185 && TREE_CODE (fe
) == PREDICT_EXPR
186 && (pr
= PREDICT_EXPR_PREDICTOR (ft
)) == PREDICT_EXPR_PREDICTOR (fe
)
187 && (pr
== PRED_HOT_LABEL
|| pr
== PRED_COLD_LABEL
))
189 gcc_rich_location
richloc (EXPR_LOC_OR_LOC (ft
, locus
));
190 richloc
.add_range (EXPR_LOC_OR_LOC (fe
, locus
));
191 warning_at (&richloc
, OPT_Wattributes
,
192 "both branches of %<if%> statement marked as %qs",
193 pr
== PRED_HOT_LABEL
? "likely" : "unlikely");
198 then_
= build_empty_stmt (locus
);
200 else_
= build_empty_stmt (locus
);
202 /* consteval if has been verified not to have the then_/else_ blocks
203 entered by gotos/case labels from elsewhere, and as then_ block
204 can contain unfolded immediate function calls, we have to discard
205 the then_ block regardless of whether else_ has side-effects or not. */
206 if (IF_STMT_CONSTEVAL_P (stmt
))
208 if (block_may_fallthru (then_
))
209 stmt
= build3 (COND_EXPR
, void_type_node
, boolean_false_node
,
214 else if (IF_STMT_CONSTEXPR_P (stmt
))
215 stmt
= integer_nonzerop (cond
) ? then_
: else_
;
216 /* ??? This optimization doesn't seem to belong here, but removing it
217 causes -Wreturn-type regressions (e.g. 107310). */
218 else if (integer_nonzerop (cond
) && !TREE_SIDE_EFFECTS (else_
))
220 else if (integer_zerop (cond
) && !TREE_SIDE_EFFECTS (then_
))
223 stmt
= build3 (COND_EXPR
, void_type_node
, cond
, then_
, else_
);
224 protected_set_expr_location_if_unset (stmt
, locus
);
228 /* Hook into the middle of gimplifying an OMP_FOR node. */
230 static enum gimplify_status
231 cp_gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
233 tree for_stmt
= *expr_p
;
234 gimple_seq seq
= NULL
;
236 /* Protect ourselves from recursion. */
237 if (OMP_FOR_GIMPLIFYING_P (for_stmt
))
239 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 1;
241 gimplify_and_add (for_stmt
, &seq
);
242 gimple_seq_add_seq (pre_p
, seq
);
244 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 0;
249 /* Gimplify an EXPR_STMT node. */
252 gimplify_expr_stmt (tree
*stmt_p
)
254 tree stmt
= EXPR_STMT_EXPR (*stmt_p
);
256 if (stmt
== error_mark_node
)
259 /* Gimplification of a statement expression will nullify the
260 statement if all its side effects are moved to *PRE_P and *POST_P.
262 In this case we will not want to emit the gimplified statement.
263 However, we may still want to emit a warning, so we do that before
265 if (stmt
&& warn_unused_value
)
267 if (!TREE_SIDE_EFFECTS (stmt
))
269 if (!IS_EMPTY_STMT (stmt
)
270 && !VOID_TYPE_P (TREE_TYPE (stmt
))
271 && !warning_suppressed_p (stmt
, OPT_Wunused_value
))
272 warning (OPT_Wunused_value
, "statement with no effect");
275 warn_if_unused_value (stmt
, input_location
);
278 if (stmt
== NULL_TREE
)
279 stmt
= alloc_stmt_list ();
284 /* Gimplify initialization from an AGGR_INIT_EXPR. */
287 cp_gimplify_init_expr (tree
*expr_p
)
289 tree from
= TREE_OPERAND (*expr_p
, 1);
290 tree to
= TREE_OPERAND (*expr_p
, 0);
293 if (TREE_CODE (from
) == TARGET_EXPR
)
294 if (tree init
= TARGET_EXPR_INITIAL (from
))
296 /* Make sure that we expected to elide this temporary. But also allow
297 gimplify_modify_expr_rhs to elide temporaries of trivial type. */
298 gcc_checking_assert (TARGET_EXPR_ELIDING_P (from
)
299 || !TREE_ADDRESSABLE (TREE_TYPE (from
)));
300 if (target_expr_needs_replace (from
))
302 /* If this was changed by cp_genericize_target_expr, we need to
303 walk into it to replace uses of the slot. */
304 replace_decl (&init
, TARGET_EXPR_SLOT (from
), to
);
312 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
313 inside the TARGET_EXPR. */
316 tree sub
= TREE_CODE (t
) == COMPOUND_EXPR
? TREE_OPERAND (t
, 0) : t
;
318 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
319 replace the slot operand with our target.
321 Should we add a target parm to gimplify_expr instead? No, as in this
322 case we want to replace the INIT_EXPR. */
323 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
324 || TREE_CODE (sub
) == VEC_INIT_EXPR
)
326 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
)
327 AGGR_INIT_EXPR_SLOT (sub
) = to
;
329 VEC_INIT_EXPR_SLOT (sub
) = to
;
332 /* The initialization is now a side-effect, so the container can
335 TREE_TYPE (from
) = void_type_node
;
338 /* Handle aggregate NSDMI. */
339 replace_placeholders (sub
, to
);
344 t
= TREE_OPERAND (t
, 1);
349 /* Gimplify a MUST_NOT_THROW_EXPR. */
351 static enum gimplify_status
352 gimplify_must_not_throw_expr (tree
*expr_p
, gimple_seq
*pre_p
)
355 tree temp
= voidify_wrapper_expr (stmt
, NULL
);
356 tree body
= TREE_OPERAND (stmt
, 0);
357 gimple_seq try_
= NULL
;
358 gimple_seq catch_
= NULL
;
361 gimplify_and_add (body
, &try_
);
362 mnt
= gimple_build_eh_must_not_throw (call_terminate_fn
);
363 gimple_seq_add_stmt_without_update (&catch_
, mnt
);
364 mnt
= gimple_build_try (try_
, catch_
, GIMPLE_TRY_CATCH
);
366 gimple_seq_add_stmt_without_update (pre_p
, mnt
);
377 /* Return TRUE if an operand (OP) of a given TYPE being copied is
378 really just an empty class copy.
380 Check that the operand has a simple form so that TARGET_EXPRs and
381 non-empty CONSTRUCTORs get reduced properly, and we leave the
382 return slot optimization alone because it isn't a copy. */
385 simple_empty_class_p (tree type
, tree op
, tree_code code
)
387 if (TREE_CODE (op
) == COMPOUND_EXPR
)
388 return simple_empty_class_p (type
, TREE_OPERAND (op
, 1), code
);
389 if (SIMPLE_TARGET_EXPR_P (op
)
390 && TYPE_HAS_TRIVIAL_DESTRUCTOR (type
))
391 /* The TARGET_EXPR is itself a simple copy, look through it. */
392 return simple_empty_class_p (type
, TARGET_EXPR_INITIAL (op
), code
);
394 if (TREE_CODE (op
) == PARM_DECL
395 && TREE_ADDRESSABLE (TREE_TYPE (op
)))
397 tree fn
= DECL_CONTEXT (op
);
398 if (DECL_THUNK_P (fn
)
399 || lambda_static_thunk_p (fn
))
400 /* In a thunk, we pass through invisible reference parms, so this isn't
406 (TREE_CODE (op
) == EMPTY_CLASS_EXPR
407 || code
== MODIFY_EXPR
408 || is_gimple_lvalue (op
)
409 || INDIRECT_REF_P (op
)
410 || (TREE_CODE (op
) == CONSTRUCTOR
411 && CONSTRUCTOR_NELTS (op
) == 0)
412 || (TREE_CODE (op
) == CALL_EXPR
413 && !CALL_EXPR_RETURN_SLOT_OPT (op
)))
414 && !TREE_CLOBBER_P (op
)
415 && is_really_empty_class (type
, /*ignore_vptr*/true);
418 /* Returns true if evaluating E as an lvalue has side-effects;
419 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
420 have side-effects until there is a read or write through it. */
423 lvalue_has_side_effects (tree e
)
425 if (!TREE_SIDE_EFFECTS (e
))
427 while (handled_component_p (e
))
429 if (TREE_CODE (e
) == ARRAY_REF
430 && TREE_SIDE_EFFECTS (TREE_OPERAND (e
, 1)))
432 e
= TREE_OPERAND (e
, 0);
435 /* Just naming a variable has no side-effects. */
437 else if (INDIRECT_REF_P (e
))
438 /* Similarly, indirection has no side-effects. */
439 return TREE_SIDE_EFFECTS (TREE_OPERAND (e
, 0));
441 /* For anything else, trust TREE_SIDE_EFFECTS. */
442 return TREE_SIDE_EFFECTS (e
);
445 /* Return true if FN is an immediate-escalating function. */
448 immediate_escalating_function_p (tree fn
)
450 if (!fn
|| !flag_immediate_escalation
)
453 gcc_checking_assert (TREE_CODE (fn
) == FUNCTION_DECL
);
455 if (DECL_IMMEDIATE_FUNCTION_P (fn
))
458 /* An immediate-escalating function is
459 -- the call operator of a lambda that is not declared with the consteval
461 if (LAMBDA_FUNCTION_P (fn
))
463 /* -- a defaulted special member function that is not declared with the
464 consteval specifier */
465 special_function_kind sfk
= special_memfn_p (fn
);
466 if (sfk
!= sfk_none
&& DECL_DEFAULTED_FN (fn
))
468 /* -- a function that results from the instantiation of a templated entity
469 defined with the constexpr specifier. */
470 return is_instantiation_of_constexpr (fn
);
473 /* Return true if FN is an immediate-escalating function that has not been
474 checked for escalating expressions.. */
477 unchecked_immediate_escalating_function_p (tree fn
)
479 return (immediate_escalating_function_p (fn
)
480 && !DECL_ESCALATION_CHECKED_P (fn
));
483 /* Promote FN to an immediate function, including its clones. */
486 promote_function_to_consteval (tree fn
)
488 SET_DECL_IMMEDIATE_FUNCTION_P (fn
);
489 DECL_ESCALATION_CHECKED_P (fn
) = true;
491 FOR_EACH_CLONE (clone
, fn
)
493 SET_DECL_IMMEDIATE_FUNCTION_P (clone
);
494 DECL_ESCALATION_CHECKED_P (clone
) = true;
498 /* A wrapper around cp_fold_immediate_r. Return a non-null tree if
499 we found a non-constant immediate function, or taking the address
500 of an immediate function. */
503 cp_fold_immediate (tree
*tp
, mce_value manifestly_const_eval
,
504 tree decl
/*= current_function_decl*/)
506 if (cxx_dialect
<= cxx17
)
509 temp_override
<tree
> cfd (current_function_decl
, decl
);
511 fold_flags_t flags
= ff_none
;
512 if (manifestly_const_eval
== mce_false
)
513 flags
|= ff_mce_false
;
515 cp_fold_data
data (flags
);
516 int save_errorcount
= errorcount
;
517 tree r
= cp_walk_tree_without_duplicates (tp
, cp_fold_immediate_r
, &data
);
518 if (errorcount
> save_errorcount
)
519 return integer_one_node
;
523 /* Maybe say that FN (a function decl with DECL_IMMEDIATE_FUNCTION_P set)
524 was initially not an immediate function, but was promoted to one because
525 its body contained an immediate-escalating expression or conversion. */
528 maybe_explain_promoted_consteval (location_t loc
, tree fn
)
530 if (DECL_ESCALATION_CHECKED_P (fn
))
532 /* See if we can figure out what made the function consteval. */
533 tree x
= cp_fold_immediate (&DECL_SAVED_TREE (fn
), mce_unknown
, NULL_TREE
);
535 inform (cp_expr_loc_or_loc (x
, loc
),
536 "%qD was promoted to an immediate function because its "
537 "body contains an immediate-escalating expression %qE", fn
, x
);
539 inform (loc
, "%qD was promoted to an immediate function", fn
);
543 /* Gimplify *EXPR_P as rvalue into an expression that can't be modified
544 by expressions with side-effects in other operands. */
546 static enum gimplify_status
547 gimplify_to_rvalue (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
548 bool (*gimple_test_f
) (tree
))
550 enum gimplify_status t
551 = gimplify_expr (expr_p
, pre_p
, post_p
, gimple_test_f
, fb_rvalue
);
554 else if (is_gimple_variable (*expr_p
) && TREE_CODE (*expr_p
) != SSA_NAME
)
555 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
);
559 /* Like gimplify_arg, but if ORDERED is set (which should be set if
560 any of the arguments this argument is sequenced before has
561 TREE_SIDE_EFFECTS set, make sure expressions with is_gimple_reg_type type
562 are gimplified into SSA_NAME or a fresh temporary and for
563 non-is_gimple_reg_type we don't optimize away TARGET_EXPRs. */
565 static enum gimplify_status
566 cp_gimplify_arg (tree
*arg_p
, gimple_seq
*pre_p
, location_t call_location
,
569 enum gimplify_status t
;
571 && !is_gimple_reg_type (TREE_TYPE (*arg_p
))
572 && TREE_CODE (*arg_p
) == TARGET_EXPR
)
574 /* gimplify_arg would strip away the TARGET_EXPR, but
575 that can mean we don't copy the argument and some following
576 argument with side-effect could modify it. */
577 protected_set_expr_location (*arg_p
, call_location
);
578 return gimplify_expr (arg_p
, pre_p
, NULL
, is_gimple_lvalue
, fb_either
);
582 t
= gimplify_arg (arg_p
, pre_p
, call_location
);
586 && is_gimple_reg_type (TREE_TYPE (*arg_p
))
587 && is_gimple_variable (*arg_p
)
588 && TREE_CODE (*arg_p
) != SSA_NAME
589 /* No need to force references into register, references
590 can't be modified. */
591 && !TYPE_REF_P (TREE_TYPE (*arg_p
))
592 /* And this can't be modified either. */
593 && *arg_p
!= current_class_ptr
)
594 *arg_p
= get_initialized_tmp_var (*arg_p
, pre_p
);
600 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
603 cp_gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
605 int saved_stmts_are_full_exprs_p
= 0;
606 location_t loc
= cp_expr_loc_or_input_loc (*expr_p
);
607 enum tree_code code
= TREE_CODE (*expr_p
);
608 enum gimplify_status ret
;
610 if (STATEMENT_CODE_P (code
))
612 saved_stmts_are_full_exprs_p
= stmts_are_full_exprs_p ();
613 current_stmt_tree ()->stmts_are_full_exprs_p
614 = STMT_IS_FULL_EXPR_P (*expr_p
);
620 simplify_aggr_init_expr (expr_p
);
626 *expr_p
= expand_vec_init_expr (NULL_TREE
, *expr_p
,
627 tf_warning_or_error
);
629 cp_fold_data
data (ff_genericize
| ff_mce_false
);
630 cp_walk_tree (expr_p
, cp_fold_r
, &data
, NULL
);
631 cp_genericize_tree (expr_p
, false);
632 copy_if_shared (expr_p
);
638 /* FIXME communicate throw type to back end, probably by moving
639 THROW_EXPR into ../tree.def. */
640 *expr_p
= TREE_OPERAND (*expr_p
, 0);
644 case MUST_NOT_THROW_EXPR
:
645 ret
= gimplify_must_not_throw_expr (expr_p
, pre_p
);
648 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
649 LHS of an assignment might also be involved in the RHS, as in bug
652 cp_gimplify_init_expr (expr_p
);
653 if (TREE_CODE (*expr_p
) != INIT_EXPR
)
659 /* If the back end isn't clever enough to know that the lhs and rhs
660 types are the same, add an explicit conversion. */
661 tree op0
= TREE_OPERAND (*expr_p
, 0);
662 tree op1
= TREE_OPERAND (*expr_p
, 1);
664 if (!error_operand_p (op0
)
665 && !error_operand_p (op1
)
666 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0
))
667 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1
)))
668 && !useless_type_conversion_p (TREE_TYPE (op1
), TREE_TYPE (op0
)))
669 TREE_OPERAND (*expr_p
, 1) = build1 (VIEW_CONVERT_EXPR
,
670 TREE_TYPE (op0
), op1
);
672 else if (simple_empty_class_p (TREE_TYPE (op0
), op1
, code
))
674 while (TREE_CODE (op1
) == TARGET_EXPR
)
675 /* We're disconnecting the initializer from its target,
676 don't create a temporary. */
677 op1
= TARGET_EXPR_INITIAL (op1
);
679 /* Remove any copies of empty classes. Also drop volatile
680 variables on the RHS to avoid infinite recursion from
681 gimplify_expr trying to load the value. */
682 if (TREE_SIDE_EFFECTS (op1
))
684 if (TREE_THIS_VOLATILE (op1
)
685 && (REFERENCE_CLASS_P (op1
) || DECL_P (op1
)))
686 op1
= build_fold_addr_expr (op1
);
688 gimplify_and_add (op1
, pre_p
);
690 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
691 is_gimple_lvalue
, fb_lvalue
);
692 *expr_p
= TREE_OPERAND (*expr_p
, 0);
693 if (code
== RETURN_EXPR
&& REFERENCE_CLASS_P (*expr_p
))
694 /* Avoid 'return *<retval>;' */
695 *expr_p
= TREE_OPERAND (*expr_p
, 0);
697 /* P0145 says that the RHS is sequenced before the LHS.
698 gimplify_modify_expr gimplifies the RHS before the LHS, but that
699 isn't quite strong enough in two cases:
701 1) gimplify.cc wants to leave a CALL_EXPR on the RHS, which would
702 mean it's evaluated after the LHS.
704 2) the value calculation of the RHS is also sequenced before the
705 LHS, so for scalar assignment we need to preevaluate if the
706 RHS could be affected by LHS side-effects even if it has no
707 side-effects of its own. We don't need this for classes because
708 class assignment takes its RHS by reference. */
709 else if (flag_strong_eval_order
> 1
710 && TREE_CODE (*expr_p
) == MODIFY_EXPR
711 && lvalue_has_side_effects (op0
)
712 && (TREE_CODE (op1
) == CALL_EXPR
713 || (SCALAR_TYPE_P (TREE_TYPE (op1
))
714 && !TREE_CONSTANT (op1
))))
715 TREE_OPERAND (*expr_p
, 1) = get_initialized_tmp_var (op1
, pre_p
);
720 case EMPTY_CLASS_EXPR
:
721 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
722 *expr_p
= build_constructor (TREE_TYPE (*expr_p
), NULL
);
727 *expr_p
= BASELINK_FUNCTIONS (*expr_p
);
732 genericize_try_block (expr_p
);
737 genericize_catch_block (expr_p
);
742 genericize_eh_spec_block (expr_p
);
764 ret
= cp_gimplify_omp_for (expr_p
, pre_p
);
768 gimplify_expr_stmt (expr_p
);
772 case UNARY_PLUS_EXPR
:
774 tree arg
= TREE_OPERAND (*expr_p
, 0);
775 tree type
= TREE_TYPE (*expr_p
);
776 *expr_p
= (TREE_TYPE (arg
) != type
) ? fold_convert (type
, arg
)
784 if (flag_strong_eval_order
== 2
785 && CALL_EXPR_FN (*expr_p
)
786 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p
)
787 && cp_get_callee_fndecl_nofold (*expr_p
) == NULL_TREE
)
789 tree fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
790 enum gimplify_status t
791 = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p
), pre_p
, NULL
,
792 is_gimple_call_addr
);
795 /* GIMPLE considers most pointer conversion useless, but for
796 calls we actually care about the exact function pointer type. */
797 else if (TREE_TYPE (CALL_EXPR_FN (*expr_p
)) != fnptrtype
)
798 CALL_EXPR_FN (*expr_p
)
799 = build1 (NOP_EXPR
, fnptrtype
, CALL_EXPR_FN (*expr_p
));
801 if (!CALL_EXPR_FN (*expr_p
))
802 /* Internal function call. */;
803 else if (CALL_EXPR_REVERSE_ARGS (*expr_p
))
805 /* This is a call to a (compound) assignment operator that used
806 the operator syntax; gimplify the RHS first. */
807 gcc_assert (call_expr_nargs (*expr_p
) == 2);
808 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p
));
809 enum gimplify_status t
810 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p
, 1), pre_p
, loc
,
811 TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p
, 0)));
815 else if (CALL_EXPR_ORDERED_ARGS (*expr_p
))
817 /* Leave the last argument for gimplify_call_expr, to avoid problems
818 with __builtin_va_arg_pack(). */
819 int nargs
= call_expr_nargs (*expr_p
) - 1;
820 int last_side_effects_arg
= -1;
821 for (int i
= nargs
; i
> 0; --i
)
822 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p
, i
)))
824 last_side_effects_arg
= i
;
827 for (int i
= 0; i
< nargs
; ++i
)
829 enum gimplify_status t
830 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
, loc
,
831 i
< last_side_effects_arg
);
836 else if (flag_strong_eval_order
837 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p
))
839 /* If flag_strong_eval_order, evaluate the object argument first. */
840 tree fntype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
841 if (INDIRECT_TYPE_P (fntype
))
842 fntype
= TREE_TYPE (fntype
);
843 if (TREE_CODE (fntype
) == METHOD_TYPE
)
845 int nargs
= call_expr_nargs (*expr_p
);
846 bool side_effects
= false;
847 for (int i
= 1; i
< nargs
; ++i
)
848 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p
, i
)))
853 enum gimplify_status t
854 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p
, 0), pre_p
, loc
,
862 tree decl
= cp_get_callee_fndecl_nofold (*expr_p
);
865 if (fndecl_built_in_p (decl
, BUILT_IN_FRONTEND
))
866 switch (DECL_FE_FUNCTION_CODE (decl
))
868 case CP_BUILT_IN_IS_CONSTANT_EVALUATED
:
869 *expr_p
= boolean_false_node
;
871 case CP_BUILT_IN_SOURCE_LOCATION
:
873 = fold_builtin_source_location (*expr_p
);
875 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER
:
877 = fold_builtin_is_corresponding_member
878 (EXPR_LOCATION (*expr_p
), call_expr_nargs (*expr_p
),
879 &CALL_EXPR_ARG (*expr_p
, 0));
881 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS
:
883 = fold_builtin_is_pointer_inverconvertible_with_class
884 (EXPR_LOCATION (*expr_p
), call_expr_nargs (*expr_p
),
885 &CALL_EXPR_ARG (*expr_p
, 0));
890 else if (fndecl_built_in_p (decl
, BUILT_IN_CLZG
, BUILT_IN_CTZG
))
891 ret
= (enum gimplify_status
) c_gimplify_expr (expr_p
, pre_p
,
894 /* All consteval functions should have been processed by now. */
895 gcc_checking_assert (!immediate_invocation_p (decl
));
900 /* A TARGET_EXPR that expresses direct-initialization should have been
901 elided by cp_gimplify_init_expr. */
902 gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p
));
903 /* Likewise, but allow extra temps of trivial type so that
904 gimplify_init_ctor_preeval can materialize subobjects of a CONSTRUCTOR
905 on the rhs of an assignment, as in constexpr-aggr1.C. */
906 gcc_checking_assert (!TARGET_EXPR_ELIDING_P (*expr_p
)
907 || !TREE_ADDRESSABLE (TREE_TYPE (*expr_p
)));
912 *expr_p
= cplus_expand_constant (*expr_p
);
913 if (TREE_CODE (*expr_p
) == PTRMEM_CST
)
920 if (TREE_OPERAND (*expr_p
, 0)
921 && (TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == INIT_EXPR
922 || TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == MODIFY_EXPR
))
924 expr_p
= &TREE_OPERAND (*expr_p
, 0);
925 /* Avoid going through the INIT_EXPR case, which can
926 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
927 goto modify_expr_case
;
932 ret
= (enum gimplify_status
) c_gimplify_expr (expr_p
, pre_p
, post_p
);
936 /* Restore saved state. */
937 if (STATEMENT_CODE_P (code
))
938 current_stmt_tree ()->stmts_are_full_exprs_p
939 = saved_stmts_are_full_exprs_p
;
945 is_invisiref_parm (const_tree t
)
947 return ((TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
948 && DECL_BY_REFERENCE (t
));
951 /* A stable comparison routine for use with splay trees and DECLs. */
954 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
959 return DECL_UID (a
) - DECL_UID (b
);
962 /* OpenMP context during genericization. */
964 struct cp_genericize_omp_taskreg
968 struct cp_genericize_omp_taskreg
*outer
;
969 splay_tree variables
;
972 /* Return true if genericization should try to determine if
973 DECL is firstprivate or shared within task regions. */
976 omp_var_to_track (tree decl
)
978 tree type
= TREE_TYPE (decl
);
979 if (is_invisiref_parm (decl
))
980 type
= TREE_TYPE (type
);
981 else if (TYPE_REF_P (type
))
982 type
= TREE_TYPE (type
);
983 while (TREE_CODE (type
) == ARRAY_TYPE
)
984 type
= TREE_TYPE (type
);
985 if (type
== error_mark_node
|| !CLASS_TYPE_P (type
))
987 if (VAR_P (decl
) && CP_DECL_THREAD_LOCAL_P (decl
))
989 if (cxx_omp_predetermined_sharing (decl
) != OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
994 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
997 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg
*omp_ctx
, tree decl
)
999 splay_tree_node n
= splay_tree_lookup (omp_ctx
->variables
,
1000 (splay_tree_key
) decl
);
1003 int flags
= OMP_CLAUSE_DEFAULT_SHARED
;
1005 omp_cxx_notice_variable (omp_ctx
->outer
, decl
);
1006 if (!omp_ctx
->default_shared
)
1008 struct cp_genericize_omp_taskreg
*octx
;
1010 for (octx
= omp_ctx
->outer
; octx
; octx
= octx
->outer
)
1012 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
1013 if (n
&& n
->value
!= OMP_CLAUSE_DEFAULT_SHARED
)
1015 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
1018 if (octx
->is_parallel
)
1022 && (TREE_CODE (decl
) == PARM_DECL
1023 || (!(TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
1024 && DECL_CONTEXT (decl
) == current_function_decl
)))
1025 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
1026 if (flags
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
1028 /* DECL is implicitly determined firstprivate in
1029 the current task construct. Ensure copy ctor and
1030 dtor are instantiated, because during gimplification
1031 it will be already too late. */
1032 tree type
= TREE_TYPE (decl
);
1033 if (is_invisiref_parm (decl
))
1034 type
= TREE_TYPE (type
);
1035 else if (TYPE_REF_P (type
))
1036 type
= TREE_TYPE (type
);
1037 while (TREE_CODE (type
) == ARRAY_TYPE
)
1038 type
= TREE_TYPE (type
);
1039 get_copy_ctor (type
, tf_none
);
1040 get_dtor (type
, tf_none
);
1043 splay_tree_insert (omp_ctx
->variables
, (splay_tree_key
) decl
, flags
);
1047 /* True if any of the element initializers in CTOR are TARGET_EXPRs that are
1048 not expected to elide, e.g. because unsafe_copy_elision_p is true. */
1051 any_non_eliding_target_exprs (tree ctor
)
1053 for (const constructor_elt
&e
: *CONSTRUCTOR_ELTS (ctor
))
1055 if (TREE_CODE (e
.value
) == TARGET_EXPR
1056 && !TARGET_EXPR_ELIDING_P (e
.value
))
1062 /* If we might need to clean up a partially constructed object, break down the
1063 CONSTRUCTOR with split_nonconstant_init. Also expand VEC_INIT_EXPR at this
1064 point. If initializing TO with FROM is non-trivial, overwrite *REPLACE with
1068 cp_genericize_init (tree
*replace
, tree from
, tree to
, vec
<tree
,va_gc
>** flags
)
1070 tree init
= NULL_TREE
;
1071 if (TREE_CODE (from
) == VEC_INIT_EXPR
)
1072 init
= expand_vec_init_expr (to
, from
, tf_warning_or_error
, flags
);
1073 else if (TREE_CODE (from
) == CONSTRUCTOR
1074 && TREE_SIDE_EFFECTS (from
)
1075 && ((flag_exceptions
1076 && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (from
)))
1077 || any_non_eliding_target_exprs (from
)))
1079 to
= cp_stabilize_reference (to
);
1080 replace_placeholders (from
, to
);
1081 init
= split_nonconstant_init (to
, from
);
1086 if (*replace
== from
)
1087 /* Make cp_gimplify_init_expr call replace_decl on this
1088 TARGET_EXPR_INITIAL. */
1089 init
= fold_convert (void_type_node
, init
);
1094 /* For an INIT_EXPR, replace the INIT_EXPR itself. */
1097 cp_genericize_init_expr (tree
*stmt_p
)
1099 iloc_sentinel ils
= EXPR_LOCATION (*stmt_p
);
1100 tree to
= TREE_OPERAND (*stmt_p
, 0);
1101 tree from
= TREE_OPERAND (*stmt_p
, 1);
1102 if (SIMPLE_TARGET_EXPR_P (from
)
1103 /* Return gets confused if we clobber its INIT_EXPR this soon. */
1104 && TREE_CODE (to
) != RESULT_DECL
)
1105 from
= TARGET_EXPR_INITIAL (from
);
1106 cp_genericize_init (stmt_p
, from
, to
, nullptr);
1109 /* For a TARGET_EXPR, change the TARGET_EXPR_INITIAL. We will need to use
1110 replace_decl later when we know what we're initializing. */
1113 cp_genericize_target_expr (tree
*stmt_p
)
1115 iloc_sentinel ils
= EXPR_LOCATION (*stmt_p
);
1116 tree slot
= TARGET_EXPR_SLOT (*stmt_p
);
1117 vec
<tree
, va_gc
> *flags
= make_tree_vector ();
1118 cp_genericize_init (&TARGET_EXPR_INITIAL (*stmt_p
),
1119 TARGET_EXPR_INITIAL (*stmt_p
), slot
, &flags
);
1120 gcc_assert (!DECL_INITIAL (slot
));
1121 for (tree f
: flags
)
1123 /* Once initialization is complete TARGET_EXPR_CLEANUP becomes active, so
1124 disable any subobject cleanups. */
1125 tree d
= build_disable_temp_cleanup (f
);
1126 auto &r
= TARGET_EXPR_INITIAL (*stmt_p
);
1127 r
= add_stmt_to_compound (r
, d
);
1129 release_tree_vector (flags
);
1132 /* Similar to if (target_expr_needs_replace) replace_decl, but TP is the
1133 TARGET_EXPR_INITIAL, and this also updates *_SLOT. We need this extra
1134 replacement when cp_folding TARGET_EXPR to preserve the invariant that
1135 AGGR_INIT_EXPR_SLOT agrees with the enclosing TARGET_EXPR_SLOT. */
1138 maybe_replace_decl (tree
*tp
, tree decl
, tree replacement
)
1140 if (!*tp
|| !VOID_TYPE_P (TREE_TYPE (*tp
)))
1143 while (TREE_CODE (t
) == COMPOUND_EXPR
)
1144 t
= TREE_OPERAND (t
, 1);
1145 if (TREE_CODE (t
) == AGGR_INIT_EXPR
)
1146 replace_decl (&AGGR_INIT_EXPR_SLOT (t
), decl
, replacement
);
1147 else if (TREE_CODE (t
) == VEC_INIT_EXPR
)
1148 replace_decl (&VEC_INIT_EXPR_SLOT (t
), decl
, replacement
);
1150 replace_decl (tp
, decl
, replacement
);
1154 /* Genericization context. */
1156 struct cp_genericize_data
1158 hash_set
<tree
> *p_set
;
1159 auto_vec
<tree
> bind_expr_stack
;
1160 struct cp_genericize_omp_taskreg
*omp_ctx
;
1163 bool handle_invisiref_parm_p
;
1166 /* Emit an error about taking the address of an immediate function.
1167 EXPR is the whole expression; DECL is the immediate function. */
1170 taking_address_of_imm_fn_error (tree expr
, tree decl
)
1172 auto_diagnostic_group d
;
1173 const location_t loc
= (TREE_CODE (expr
) == PTRMEM_CST
1174 ? PTRMEM_CST_LOCATION (expr
)
1175 : EXPR_LOCATION (expr
));
1176 error_at (loc
, "taking address of an immediate function %qD", decl
);
1177 maybe_explain_promoted_consteval (loc
, decl
);
1180 /* A subroutine of cp_fold_r to handle immediate functions. */
1183 cp_fold_immediate_r (tree
*stmt_p
, int *walk_subtrees
, void *data_
)
1185 auto data
= static_cast<cp_fold_data
*>(data_
);
1186 tree stmt
= *stmt_p
;
1187 /* The purpose of this is not to emit errors for mce_unknown. */
1188 const tsubst_flags_t complain
= (data
->flags
& ff_mce_false
1189 ? tf_error
: tf_none
);
1190 const tree_code code
= TREE_CODE (stmt
);
1192 /* No need to look into types or unevaluated operands.
1193 NB: This affects cp_fold_r as well. */
1195 || unevaluated_p (code
)
1196 /* We do not use in_immediate_context here because it checks
1197 more than is desirable, e.g., sk_template_parms. */
1198 || cp_unevaluated_operand
1199 || (current_function_decl
1200 && DECL_IMMEDIATE_FUNCTION_P (current_function_decl
)))
1206 tree decl
= NULL_TREE
;
1207 bool call_p
= false;
1209 /* We are looking for &fn or fn(). */
1213 case AGGR_INIT_EXPR
:
1214 if (tree fn
= cp_get_callee (stmt
))
1215 if (TREE_CODE (fn
) != ADDR_EXPR
|| ADDR_EXPR_DENOTES_CALL_P (fn
))
1216 decl
= cp_get_fndecl_from_callee (fn
, /*fold*/false);
1220 decl
= PTRMEM_CST_MEMBER (stmt
);
1223 if (!ADDR_EXPR_DENOTES_CALL_P (stmt
))
1224 decl
= TREE_OPERAND (stmt
, 0);
1230 if (!decl
|| TREE_CODE (decl
) != FUNCTION_DECL
)
1233 /* Fully escalate once all templates have been instantiated. What we're
1234 calling is not a consteval function but it may become one. This
1235 requires recursing; DECL may be promoted to consteval because it
1236 contains an escalating expression E, but E itself may have to be
1237 promoted first, etc. */
1238 if (at_eof
> 1 && unchecked_immediate_escalating_function_p (decl
))
1240 /* Set before the actual walk to avoid endless recursion. */
1241 DECL_ESCALATION_CHECKED_P (decl
) = true;
1242 /* We're only looking for the first escalating expression. Let us not
1243 walk more trees than necessary, hence mce_unknown. */
1244 cp_fold_immediate (&DECL_SAVED_TREE (decl
), mce_unknown
, decl
);
1247 /* [expr.const]p16 "An expression or conversion is immediate-escalating if
1248 it is not initially in an immediate function context and it is either
1249 -- an immediate invocation that is not a constant expression and is not
1250 a subexpression of an immediate invocation."
1252 If we are in an immediate-escalating function, the immediate-escalating
1253 expression or conversion makes it an immediate function. So STMT does
1254 not need to produce a constant expression. */
1255 if (DECL_IMMEDIATE_FUNCTION_P (decl
))
1257 tree e
= cxx_constant_value (stmt
, tf_none
);
1258 if (e
== error_mark_node
)
1260 /* This takes care of, e.g.,
1261 template <typename T>
1262 constexpr int f(T t)
1266 where id (consteval) causes f<int> to be promoted. */
1267 if (immediate_escalating_function_p (current_function_decl
))
1268 promote_function_to_consteval (current_function_decl
);
1269 else if (complain
& tf_error
)
1273 auto_diagnostic_group d
;
1274 location_t loc
= cp_expr_loc_or_input_loc (stmt
);
1275 error_at (loc
, "call to consteval function %qE is "
1276 "not a constant expression", stmt
);
1277 /* Explain why it's not a constant expression. */
1278 *stmt_p
= cxx_constant_value (stmt
, complain
);
1279 maybe_explain_promoted_consteval (loc
, decl
);
1281 else if (!data
->pset
.add (stmt
))
1283 taking_address_of_imm_fn_error (stmt
, decl
);
1284 *stmt_p
= build_zero_cst (TREE_TYPE (stmt
));
1286 /* If we're giving hard errors, continue the walk rather than
1287 bailing out after the first error. */
1293 /* We've evaluated the consteval function call. */
1297 /* We've encountered a function call that may turn out to be consteval
1298 later. Store its caller so that we can ensure that the call is
1299 a constant expression. */
1300 else if (unchecked_immediate_escalating_function_p (decl
))
1302 /* Make sure we're not inserting new elements while walking
1303 the deferred_escalating_exprs hash table; if we are, it's
1304 likely that a function wasn't properly marked checked for
1306 gcc_checking_assert (at_eof
<= 1);
1307 if (current_function_decl
)
1308 remember_escalating_expr (current_function_decl
);
1309 /* auto p = &f<int>; in the global scope won't be ensconced in
1310 a function we could store for later at this point. (If there's
1311 no c_f_d at this point and we're dealing with a call, we should
1312 see the call when cp_fold_function __static_i_and_d.) */
1314 remember_escalating_expr (stmt
);
1320 /* Perform any pre-gimplification folding of C++ front end trees to
1322 Note: The folding of non-omp cases is something to move into
1323 the middle-end. As for now we have most foldings only on GENERIC
1324 in fold-const, we need to perform this before transformation to
1327 ??? This is algorithmically weird because walk_tree works in pre-order, so
1328 we see outer expressions before inner expressions. This isn't as much of an
1329 issue because cp_fold recurses into subexpressions in many cases, but then
1330 walk_tree walks back into those subexpressions again. We avoid the
1331 resulting complexity problem by caching the result of cp_fold, but it's
1335 cp_fold_r (tree
*stmt_p
, int *walk_subtrees
, void *data_
)
1337 cp_fold_data
*data
= (cp_fold_data
*)data_
;
1338 tree stmt
= *stmt_p
;
1339 enum tree_code code
= TREE_CODE (stmt
);
1341 if (cxx_dialect
>= cxx20
)
1343 /* Unfortunately we must handle code like
1345 where we have to check bar too. The cp_fold call below could
1346 fold the ?: into a constant before we've checked it. */
1347 if (code
== COND_EXPR
)
1349 auto then_fn
= cp_fold_r
, else_fn
= cp_fold_r
;
1350 /* See if we can figure out if either of the branches is dead. If it
1351 is, we don't need to do everything that cp_fold_r does. */
1352 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_fold_r
, data
, nullptr);
1353 if (integer_zerop (TREE_OPERAND (stmt
, 0)))
1354 then_fn
= cp_fold_immediate_r
;
1355 else if (integer_nonzerop (TREE_OPERAND (stmt
, 0)))
1356 else_fn
= cp_fold_immediate_r
;
1358 if (TREE_OPERAND (stmt
, 1))
1359 cp_walk_tree (&TREE_OPERAND (stmt
, 1), then_fn
, data
,
1361 if (TREE_OPERAND (stmt
, 2))
1362 cp_walk_tree (&TREE_OPERAND (stmt
, 2), else_fn
, data
,
1365 /* Don't return yet, still need the cp_fold below. */
1368 cp_fold_immediate_r (stmt_p
, walk_subtrees
, data
);
1371 *stmt_p
= stmt
= cp_fold (*stmt_p
, data
->flags
);
1373 /* For certain trees, like +foo(), the cp_fold above will remove the +,
1374 and the subsequent tree walk would go straight down to the CALL_EXPR's
1375 operands, meaning that cp_fold_immediate_r would never see the
1376 CALL_EXPR. Ew :(. */
1377 if (TREE_CODE (stmt
) == CALL_EXPR
&& code
!= CALL_EXPR
)
1378 cp_fold_immediate_r (stmt_p
, walk_subtrees
, data
);
1380 if (data
->pset
.add (stmt
))
1382 /* Don't walk subtrees of stmts we've already walked once, otherwise
1383 we can have exponential complexity with e.g. lots of nested
1384 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1385 always the same tree, which the first time cp_fold_r has been
1386 called on it had the subtrees walked. */
1391 code
= TREE_CODE (stmt
);
1398 case OMP_DISTRIBUTE
:
1404 cp_walk_tree (&OMP_FOR_BODY (stmt
), cp_fold_r
, data
, NULL
);
1405 cp_walk_tree (&OMP_FOR_CLAUSES (stmt
), cp_fold_r
, data
, NULL
);
1406 cp_walk_tree (&OMP_FOR_INIT (stmt
), cp_fold_r
, data
, NULL
);
1407 x
= OMP_FOR_COND (stmt
);
1408 if (x
&& TREE_CODE_CLASS (TREE_CODE (x
)) == tcc_comparison
)
1410 cp_walk_tree (&TREE_OPERAND (x
, 0), cp_fold_r
, data
, NULL
);
1411 cp_walk_tree (&TREE_OPERAND (x
, 1), cp_fold_r
, data
, NULL
);
1413 else if (x
&& TREE_CODE (x
) == TREE_VEC
)
1415 n
= TREE_VEC_LENGTH (x
);
1416 for (i
= 0; i
< n
; i
++)
1418 tree o
= TREE_VEC_ELT (x
, i
);
1419 if (o
&& TREE_CODE_CLASS (TREE_CODE (o
)) == tcc_comparison
)
1420 cp_walk_tree (&TREE_OPERAND (o
, 1), cp_fold_r
, data
, NULL
);
1423 x
= OMP_FOR_INCR (stmt
);
1424 if (x
&& TREE_CODE (x
) == TREE_VEC
)
1426 n
= TREE_VEC_LENGTH (x
);
1427 for (i
= 0; i
< n
; i
++)
1429 tree o
= TREE_VEC_ELT (x
, i
);
1430 if (o
&& TREE_CODE (o
) == MODIFY_EXPR
)
1431 o
= TREE_OPERAND (o
, 1);
1432 if (o
&& (TREE_CODE (o
) == PLUS_EXPR
|| TREE_CODE (o
) == MINUS_EXPR
1433 || TREE_CODE (o
) == POINTER_PLUS_EXPR
))
1435 cp_walk_tree (&TREE_OPERAND (o
, 0), cp_fold_r
, data
, NULL
);
1436 cp_walk_tree (&TREE_OPERAND (o
, 1), cp_fold_r
, data
, NULL
);
1440 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt
), cp_fold_r
, data
, NULL
);
1445 if (IF_STMT_CONSTEVAL_P (stmt
))
1447 /* Don't walk THEN_CLAUSE (stmt) for consteval if. IF_COND is always
1448 boolean_false_node. */
1449 cp_walk_tree (&ELSE_CLAUSE (stmt
), cp_fold_r
, data
, NULL
);
1450 cp_walk_tree (&IF_SCOPE (stmt
), cp_fold_r
, data
, NULL
);
1456 /* cp_genericize_{init,target}_expr are only for genericize time; they're
1457 here rather than in cp_genericize to avoid problems with the invisible
1458 reference transition. */
1460 if (data
->flags
& ff_genericize
)
1461 cp_genericize_init_expr (stmt_p
);
1465 if (data
->flags
& ff_genericize
)
1466 cp_genericize_target_expr (stmt_p
);
1468 /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
1469 that case, strip it in favor of this one. */
1470 if (tree
&init
= TARGET_EXPR_INITIAL (stmt
))
1472 cp_walk_tree (&init
, cp_fold_r
, data
, NULL
);
1473 cp_walk_tree (&TARGET_EXPR_CLEANUP (stmt
), cp_fold_r
, data
, NULL
);
1475 if (TREE_CODE (init
) == TARGET_EXPR
)
1477 tree sub
= TARGET_EXPR_INITIAL (init
);
1478 maybe_replace_decl (&sub
, TARGET_EXPR_SLOT (init
),
1479 TARGET_EXPR_SLOT (stmt
));
1492 /* Fold ALL the trees! FIXME we should be able to remove this, but
1493 apparently that still causes optimization regressions. */
1496 cp_fold_function (tree fndecl
)
1498 /* By now all manifestly-constant-evaluated expressions will have
1499 been constant-evaluated already if possible, so we can safely
1500 pass ff_mce_false. */
1501 cp_fold_data
data (ff_genericize
| ff_mce_false
);
1502 cp_walk_tree (&DECL_SAVED_TREE (fndecl
), cp_fold_r
, &data
, NULL
);
1504 /* This is merely an optimization: if FNDECL has no i-e expressions,
1505 we'll not save c_f_d, and we can safely say that FNDECL will not
1506 be promoted to consteval. */
1507 if (deferred_escalating_exprs
1508 && !deferred_escalating_exprs
->contains (current_function_decl
))
1509 DECL_ESCALATION_CHECKED_P (fndecl
) = true;
1512 /* We've stashed immediate-escalating functions. Now see if they indeed
1513 ought to be promoted to consteval. */
1516 process_and_check_pending_immediate_escalating_fns ()
1518 /* This will be null for -fno-immediate-escalation. */
1519 if (!deferred_escalating_exprs
)
1522 for (auto e
: *deferred_escalating_exprs
)
1523 if (TREE_CODE (e
) == FUNCTION_DECL
&& !DECL_ESCALATION_CHECKED_P (e
))
1524 cp_fold_immediate (&DECL_SAVED_TREE (e
), mce_false
, e
);
1526 /* We've escalated every function that could have been promoted to
1527 consteval. Check that we are not taking the address of a consteval
1529 for (auto e
: *deferred_escalating_exprs
)
1531 if (TREE_CODE (e
) == FUNCTION_DECL
)
1533 tree decl
= (TREE_CODE (e
) == PTRMEM_CST
1534 ? PTRMEM_CST_MEMBER (e
)
1535 : TREE_OPERAND (e
, 0));
1536 if (DECL_IMMEDIATE_FUNCTION_P (decl
))
1537 taking_address_of_imm_fn_error (e
, decl
);
1540 deferred_escalating_exprs
= nullptr;
1543 /* Turn SPACESHIP_EXPR EXPR into GENERIC. */
1545 static tree
genericize_spaceship (tree expr
)
1547 iloc_sentinel
s (cp_expr_location (expr
));
1548 tree type
= TREE_TYPE (expr
);
1549 tree op0
= TREE_OPERAND (expr
, 0);
1550 tree op1
= TREE_OPERAND (expr
, 1);
1551 return genericize_spaceship (input_location
, type
, op0
, op1
);
1554 /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1555 to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1556 the middle-end (c++/88256). If EXPR is a DECL, use add_stmt and return
1557 NULL_TREE; otherwise return a COMPOUND_STMT of the DECL_EXPR and EXPR. */
1560 predeclare_vla (tree expr
)
1562 tree type
= TREE_TYPE (expr
);
1563 if (type
== error_mark_node
)
1565 if (is_typedef_decl (expr
))
1566 type
= DECL_ORIGINAL_TYPE (expr
);
1568 /* We need to strip pointers for gimplify_type_sizes. */
1570 while (POINTER_TYPE_P (vla
))
1572 if (TYPE_NAME (vla
))
1574 vla
= TREE_TYPE (vla
);
1576 if (vla
== type
|| TYPE_NAME (vla
)
1577 || !variably_modified_type_p (vla
, NULL_TREE
))
1580 tree decl
= build_decl (input_location
, TYPE_DECL
, NULL_TREE
, vla
);
1581 DECL_ARTIFICIAL (decl
) = 1;
1582 TYPE_NAME (vla
) = decl
;
1583 tree dexp
= build_stmt (input_location
, DECL_EXPR
, decl
);
1591 expr
= build2 (COMPOUND_EXPR
, type
, dexp
, expr
);
1596 /* Perform any pre-gimplification lowering of C++ front end trees to
1600 cp_genericize_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
1602 tree stmt
= *stmt_p
;
1603 struct cp_genericize_data
*wtd
= (struct cp_genericize_data
*) data
;
1604 hash_set
<tree
> *p_set
= wtd
->p_set
;
1606 /* If in an OpenMP context, note var uses. */
1607 if (UNLIKELY (wtd
->omp_ctx
!= NULL
)
1609 || TREE_CODE (stmt
) == PARM_DECL
1610 || TREE_CODE (stmt
) == RESULT_DECL
)
1611 && omp_var_to_track (stmt
))
1612 omp_cxx_notice_variable (wtd
->omp_ctx
, stmt
);
1614 /* Don't dereference parms in a thunk, pass the references through. */
1615 if ((TREE_CODE (stmt
) == CALL_EXPR
&& call_from_lambda_thunk_p (stmt
))
1616 || (TREE_CODE (stmt
) == AGGR_INIT_EXPR
&& AGGR_INIT_FROM_THUNK_P (stmt
)))
1622 /* Dereference invisible reference parms. */
1623 if (wtd
->handle_invisiref_parm_p
&& is_invisiref_parm (stmt
))
1625 *stmt_p
= convert_from_reference (stmt
);
1626 p_set
->add (*stmt_p
);
1631 /* Map block scope extern declarations to visible declarations with the
1632 same name and type in outer scopes if any. */
1633 if (VAR_OR_FUNCTION_DECL_P (stmt
) && DECL_LOCAL_DECL_P (stmt
))
1634 if (tree alias
= DECL_LOCAL_DECL_ALIAS (stmt
))
1636 if (alias
!= error_mark_node
)
1639 TREE_USED (alias
) |= TREE_USED (stmt
);
1645 if (TREE_CODE (stmt
) == INTEGER_CST
1646 && TYPE_REF_P (TREE_TYPE (stmt
))
1647 && (flag_sanitize
& (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1648 && !wtd
->no_sanitize_p
)
1650 ubsan_maybe_instrument_reference (stmt_p
);
1651 if (*stmt_p
!= stmt
)
1658 /* Other than invisiref parms, don't walk the same tree twice. */
1659 if (p_set
->contains (stmt
))
1665 switch (TREE_CODE (stmt
))
1668 if (is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
1670 /* If in an OpenMP context, note var uses. */
1671 if (UNLIKELY (wtd
->omp_ctx
!= NULL
)
1672 && omp_var_to_track (TREE_OPERAND (stmt
, 0)))
1673 omp_cxx_notice_variable (wtd
->omp_ctx
, TREE_OPERAND (stmt
, 0));
1674 *stmt_p
= fold_convert (TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
1680 if (TREE_OPERAND (stmt
, 0))
1682 if (is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
1683 /* Don't dereference an invisiref RESULT_DECL inside a
1686 if (RETURN_EXPR_LOCAL_ADDR_P (stmt
))
1688 /* Don't return the address of a local variable. */
1689 tree
*p
= &TREE_OPERAND (stmt
, 0);
1690 while (TREE_CODE (*p
) == COMPOUND_EXPR
)
1691 p
= &TREE_OPERAND (*p
, 0);
1692 if (TREE_CODE (*p
) == INIT_EXPR
)
1694 tree op
= TREE_OPERAND (*p
, 1);
1695 tree new_op
= build2 (COMPOUND_EXPR
, TREE_TYPE (op
), op
,
1696 build_zero_cst (TREE_TYPE (op
)));
1697 TREE_OPERAND (*p
, 1) = new_op
;
1704 switch (OMP_CLAUSE_CODE (stmt
))
1706 case OMP_CLAUSE_LASTPRIVATE
:
1707 /* Don't dereference an invisiref in OpenMP clauses. */
1708 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1711 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt
))
1712 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt
),
1713 cp_genericize_r
, data
, NULL
);
1716 case OMP_CLAUSE_PRIVATE
:
1717 /* Don't dereference an invisiref in OpenMP clauses. */
1718 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1720 else if (wtd
->omp_ctx
!= NULL
)
1722 /* Private clause doesn't cause any references to the
1723 var in outer contexts, avoid calling
1724 omp_cxx_notice_variable for it. */
1725 struct cp_genericize_omp_taskreg
*old
= wtd
->omp_ctx
;
1726 wtd
->omp_ctx
= NULL
;
1727 cp_walk_tree (&OMP_CLAUSE_DECL (stmt
), cp_genericize_r
,
1733 case OMP_CLAUSE_SHARED
:
1734 case OMP_CLAUSE_FIRSTPRIVATE
:
1735 case OMP_CLAUSE_COPYIN
:
1736 case OMP_CLAUSE_COPYPRIVATE
:
1737 case OMP_CLAUSE_INCLUSIVE
:
1738 case OMP_CLAUSE_EXCLUSIVE
:
1739 /* Don't dereference an invisiref in OpenMP clauses. */
1740 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1743 case OMP_CLAUSE_REDUCTION
:
1744 case OMP_CLAUSE_IN_REDUCTION
:
1745 case OMP_CLAUSE_TASK_REDUCTION
:
1746 /* Don't dereference an invisiref in reduction clause's
1747 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1748 still needs to be genericized. */
1749 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1752 if (OMP_CLAUSE_REDUCTION_INIT (stmt
))
1753 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt
),
1754 cp_genericize_r
, data
, NULL
);
1755 if (OMP_CLAUSE_REDUCTION_MERGE (stmt
))
1756 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt
),
1757 cp_genericize_r
, data
, NULL
);
1765 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1766 to lower this construct before scanning it, so we need to lower these
1767 before doing anything else. */
1769 *stmt_p
= build2_loc (EXPR_LOCATION (stmt
),
1770 CLEANUP_EH_ONLY (stmt
) ? TRY_CATCH_EXPR
1773 CLEANUP_BODY (stmt
),
1774 CLEANUP_EXPR (stmt
));
1778 genericize_if_stmt (stmt_p
);
1779 /* *stmt_p has changed, tail recurse to handle it again. */
1780 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
1782 /* COND_EXPR might have incompatible types in branches if one or both
1783 arms are bitfields. Fix it up now. */
1787 = (TREE_OPERAND (stmt
, 1)
1788 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 1))
1791 = (TREE_OPERAND (stmt
, 2)
1792 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 2))
1795 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1796 TREE_TYPE (TREE_OPERAND (stmt
, 1))))
1798 TREE_OPERAND (stmt
, 1)
1799 = fold_convert (type_left
, TREE_OPERAND (stmt
, 1));
1800 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1804 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1805 TREE_TYPE (TREE_OPERAND (stmt
, 2))))
1807 TREE_OPERAND (stmt
, 2)
1808 = fold_convert (type_right
, TREE_OPERAND (stmt
, 2));
1809 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1816 if (UNLIKELY (wtd
->omp_ctx
!= NULL
))
1819 for (decl
= BIND_EXPR_VARS (stmt
); decl
; decl
= DECL_CHAIN (decl
))
1821 && !DECL_EXTERNAL (decl
)
1822 && omp_var_to_track (decl
))
1825 = splay_tree_lookup (wtd
->omp_ctx
->variables
,
1826 (splay_tree_key
) decl
);
1828 splay_tree_insert (wtd
->omp_ctx
->variables
,
1829 (splay_tree_key
) decl
,
1831 ? OMP_CLAUSE_DEFAULT_SHARED
1832 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1835 if (sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
| SANITIZE_VPTR
))
1837 /* The point here is to not sanitize static initializers. */
1838 bool no_sanitize_p
= wtd
->no_sanitize_p
;
1839 wtd
->no_sanitize_p
= true;
1840 for (tree decl
= BIND_EXPR_VARS (stmt
);
1842 decl
= DECL_CHAIN (decl
))
1844 && TREE_STATIC (decl
)
1845 && DECL_INITIAL (decl
))
1846 cp_walk_tree (&DECL_INITIAL (decl
), cp_genericize_r
, data
, NULL
);
1847 wtd
->no_sanitize_p
= no_sanitize_p
;
1849 wtd
->bind_expr_stack
.safe_push (stmt
);
1850 cp_walk_tree (&BIND_EXPR_BODY (stmt
),
1851 cp_genericize_r
, data
, NULL
);
1852 wtd
->bind_expr_stack
.pop ();
1855 case ASSERTION_STMT
:
1856 case PRECONDITION_STMT
:
1857 case POSTCONDITION_STMT
:
1859 if (tree check
= build_contract_check (stmt
))
1862 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
1865 /* If we didn't build a check, replace it with void_node so we don't
1866 leak contracts into GENERIC. */
1867 *stmt_p
= void_node
;
1874 tree block
= NULL_TREE
;
1876 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1877 BLOCK, and append an IMPORTED_DECL to its
1878 BLOCK_VARS chained list. */
1879 if (wtd
->bind_expr_stack
.exists ())
1882 for (i
= wtd
->bind_expr_stack
.length () - 1; i
>= 0; i
--)
1883 if ((block
= BIND_EXPR_BLOCK (wtd
->bind_expr_stack
[i
])))
1888 tree decl
= TREE_OPERAND (stmt
, 0);
1891 if (undeduced_auto_decl (decl
))
1892 /* Omit from the GENERIC, the back-end can't handle it. */;
1895 tree using_directive
= make_node (IMPORTED_DECL
);
1896 TREE_TYPE (using_directive
) = void_type_node
;
1897 DECL_CONTEXT (using_directive
) = current_function_decl
;
1898 DECL_SOURCE_LOCATION (using_directive
)
1899 = cp_expr_loc_or_input_loc (stmt
);
1901 IMPORTED_DECL_ASSOCIATED_DECL (using_directive
) = decl
;
1902 DECL_CHAIN (using_directive
) = BLOCK_VARS (block
);
1903 BLOCK_VARS (block
) = using_directive
;
1906 /* The USING_STMT won't appear in GENERIC. */
1907 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1913 if (TREE_CODE (DECL_EXPR_DECL (stmt
)) == USING_DECL
)
1915 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1916 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1921 tree d
= DECL_EXPR_DECL (stmt
);
1923 gcc_assert (CP_DECL_THREAD_LOCAL_P (d
) == DECL_THREAD_LOCAL_P (d
));
1931 struct cp_genericize_omp_taskreg omp_ctx
;
1936 cp_walk_tree (&OMP_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
1937 omp_ctx
.is_parallel
= TREE_CODE (stmt
) == OMP_PARALLEL
;
1938 omp_ctx
.default_shared
= omp_ctx
.is_parallel
;
1939 omp_ctx
.outer
= wtd
->omp_ctx
;
1940 omp_ctx
.variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
1941 wtd
->omp_ctx
= &omp_ctx
;
1942 for (c
= OMP_CLAUSES (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
1943 switch (OMP_CLAUSE_CODE (c
))
1945 case OMP_CLAUSE_SHARED
:
1946 case OMP_CLAUSE_PRIVATE
:
1947 case OMP_CLAUSE_FIRSTPRIVATE
:
1948 case OMP_CLAUSE_LASTPRIVATE
:
1949 decl
= OMP_CLAUSE_DECL (c
);
1950 if (decl
== error_mark_node
|| !omp_var_to_track (decl
))
1952 n
= splay_tree_lookup (omp_ctx
.variables
, (splay_tree_key
) decl
);
1955 splay_tree_insert (omp_ctx
.variables
, (splay_tree_key
) decl
,
1956 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
1957 ? OMP_CLAUSE_DEFAULT_SHARED
1958 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1959 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
&& omp_ctx
.outer
)
1960 omp_cxx_notice_variable (omp_ctx
.outer
, decl
);
1962 case OMP_CLAUSE_DEFAULT
:
1963 if (OMP_CLAUSE_DEFAULT_KIND (c
) == OMP_CLAUSE_DEFAULT_SHARED
)
1964 omp_ctx
.default_shared
= true;
1968 if (TREE_CODE (stmt
) == OMP_TASKLOOP
)
1969 c_genericize_control_stmt (stmt_p
, walk_subtrees
, data
,
1970 cp_genericize_r
, cp_walk_subtrees
);
1972 cp_walk_tree (&OMP_BODY (stmt
), cp_genericize_r
, data
, NULL
);
1973 wtd
->omp_ctx
= omp_ctx
.outer
;
1974 splay_tree_delete (omp_ctx
.variables
);
1979 cfun
->has_omp_target
= true;
1985 tree try_block
= wtd
->try_block
;
1986 wtd
->try_block
= stmt
;
1987 cp_walk_tree (&TRY_STMTS (stmt
), cp_genericize_r
, data
, NULL
);
1988 wtd
->try_block
= try_block
;
1989 cp_walk_tree (&TRY_HANDLERS (stmt
), cp_genericize_r
, data
, NULL
);
1993 case MUST_NOT_THROW_EXPR
:
1994 /* MUST_NOT_THROW_COND might be something else with TM. */
1995 if (MUST_NOT_THROW_COND (stmt
) == NULL_TREE
)
1998 tree try_block
= wtd
->try_block
;
1999 wtd
->try_block
= stmt
;
2000 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_genericize_r
, data
, NULL
);
2001 wtd
->try_block
= try_block
;
2007 location_t loc
= location_of (stmt
);
2008 if (warning_suppressed_p (stmt
/* What warning? */))
2010 else if (wtd
->try_block
)
2012 if (TREE_CODE (wtd
->try_block
) == MUST_NOT_THROW_EXPR
)
2014 auto_diagnostic_group d
;
2015 if (warning_at (loc
, OPT_Wterminate
,
2016 "%<throw%> will always call %<terminate%>")
2017 && cxx_dialect
>= cxx11
2018 && DECL_DESTRUCTOR_P (current_function_decl
))
2019 inform (loc
, "in C++11 destructors default to %<noexcept%>");
2024 if (warn_cxx11_compat
&& cxx_dialect
< cxx11
2025 && DECL_DESTRUCTOR_P (current_function_decl
)
2026 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl
))
2028 && (get_defaulted_eh_spec (current_function_decl
)
2029 == empty_except_spec
))
2030 warning_at (loc
, OPT_Wc__11_compat
,
2031 "in C++11 this %<throw%> will call %<terminate%> "
2032 "because destructors default to %<noexcept%>");
2038 gcc_checking_assert (!AGGREGATE_TYPE_P (TREE_TYPE (stmt
)));
2039 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt
));
2042 case SPACESHIP_EXPR
:
2043 *stmt_p
= genericize_spaceship (*stmt_p
);
2047 /* By the time we get here we're handing off to the back end, so we don't
2048 need or want to preserve PTRMEM_CST anymore. */
2049 *stmt_p
= cplus_expand_constant (stmt
);
2054 /* For MEM_REF, make sure not to sanitize the second operand even
2055 if it has reference type. It is just an offset with a type
2056 holding other information. There is no other processing we
2057 need to do for INTEGER_CSTs, so just ignore the second argument
2059 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_genericize_r
, data
, NULL
);
2064 *stmt_p
= predeclare_vla (*stmt_p
);
2066 /* Warn of new allocations that are not big enough for the target
2069 && TREE_CODE (TREE_OPERAND (stmt
, 0)) == CALL_EXPR
2070 && POINTER_TYPE_P (TREE_TYPE (stmt
)))
2072 if (tree fndecl
= get_callee_fndecl (TREE_OPERAND (stmt
, 0)))
2073 if (DECL_IS_MALLOC (fndecl
))
2075 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (fndecl
));
2076 tree alloc_size
= lookup_attribute ("alloc_size", attrs
);
2078 warn_for_alloc_size (EXPR_LOCATION (stmt
),
2079 TREE_TYPE (TREE_TYPE (stmt
)),
2080 TREE_OPERAND (stmt
, 0), alloc_size
);
2084 if (!wtd
->no_sanitize_p
2085 && sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
)
2086 && TYPE_REF_P (TREE_TYPE (stmt
)))
2087 ubsan_maybe_instrument_reference (stmt_p
);
2091 /* Evaluate function concept checks instead of treating them as
2092 normal functions. */
2093 if (concept_check_p (stmt
))
2095 *stmt_p
= evaluate_concept_check (stmt
);
2096 * walk_subtrees
= 0;
2100 if (!wtd
->no_sanitize_p
2101 && sanitize_flags_p ((SANITIZE_NULL
2102 | SANITIZE_ALIGNMENT
| SANITIZE_VPTR
)))
2104 tree fn
= CALL_EXPR_FN (stmt
);
2106 && !error_operand_p (fn
)
2107 && INDIRECT_TYPE_P (TREE_TYPE (fn
))
2108 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) == METHOD_TYPE
)
2111 = TREE_CODE (fn
) == ADDR_EXPR
2112 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
2113 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn
, 0));
2114 if (sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
2115 ubsan_maybe_instrument_member_call (stmt
, is_ctor
);
2116 if (sanitize_flags_p (SANITIZE_VPTR
) && !is_ctor
)
2117 cp_ubsan_maybe_instrument_member_call (stmt
);
2119 else if (fn
== NULL_TREE
2120 && CALL_EXPR_IFN (stmt
) == IFN_UBSAN_NULL
2121 && TREE_CODE (CALL_EXPR_ARG (stmt
, 0)) == INTEGER_CST
2122 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt
, 0))))
2126 case AGGR_INIT_EXPR
:
2127 /* For calls to a multi-versioned function, overload resolution
2128 returns the function with the highest target priority, that is,
2129 the version that will checked for dispatching first. If this
2130 version is inlinable, a direct call to this version can be made
2131 otherwise the call should go through the dispatcher. */
2133 tree fn
= cp_get_callee_fndecl_nofold (stmt
);
2134 if (fn
&& DECL_FUNCTION_VERSIONED (fn
)
2135 && (current_function_decl
== NULL
2136 || !targetm
.target_option
.can_inline_p (current_function_decl
,
2138 if (tree dis
= get_function_version_dispatcher (fn
))
2140 mark_versions_used (dis
);
2141 dis
= build_address (dis
);
2142 if (TREE_CODE (stmt
) == CALL_EXPR
)
2143 CALL_EXPR_FN (stmt
) = dis
;
2145 AGGR_INIT_EXPR_FN (stmt
) = dis
;
2151 if (TARGET_EXPR_INITIAL (stmt
)
2152 && TREE_CODE (TARGET_EXPR_INITIAL (stmt
)) == CONSTRUCTOR
2153 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt
)))
2154 TARGET_EXPR_NO_ELIDE (stmt
) = 1;
2157 case TEMPLATE_ID_EXPR
:
2158 gcc_assert (concept_check_p (stmt
));
2159 /* Emit the value of the concept check. */
2160 *stmt_p
= evaluate_concept_check (stmt
);
2164 case OMP_DISTRIBUTE
:
2165 /* Need to explicitly instantiate copy ctors on class iterators of
2166 composite distribute parallel for. */
2167 if (OMP_FOR_INIT (*stmt_p
) == NULL_TREE
)
2169 tree
*data
[4] = { NULL
, NULL
, NULL
, NULL
};
2170 tree inner
= walk_tree (&OMP_FOR_BODY (*stmt_p
),
2171 find_combined_omp_for
, data
, NULL
);
2172 if (inner
!= NULL_TREE
2173 && TREE_CODE (inner
) == OMP_FOR
)
2175 for (int i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner
)); i
++)
2176 if (TREE_VEC_ELT (OMP_FOR_INIT (inner
), i
)
2177 && OMP_FOR_ORIG_DECLS (inner
)
2178 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner
),
2180 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner
),
2183 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner
), i
);
2184 /* Class iterators aren't allowed on OMP_SIMD, so the only
2185 case we need to solve is distribute parallel for. */
2186 gcc_assert (TREE_CODE (inner
) == OMP_FOR
2188 tree orig_decl
= TREE_PURPOSE (orig
);
2189 tree c
, cl
= NULL_TREE
;
2190 for (c
= OMP_FOR_CLAUSES (inner
);
2191 c
; c
= OMP_CLAUSE_CHAIN (c
))
2192 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
2193 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
2194 && OMP_CLAUSE_DECL (c
) == orig_decl
)
2199 if (cl
== NULL_TREE
)
2201 for (c
= OMP_PARALLEL_CLAUSES (*data
[1]);
2202 c
; c
= OMP_CLAUSE_CHAIN (c
))
2203 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
2204 && OMP_CLAUSE_DECL (c
) == orig_decl
)
2212 orig_decl
= require_complete_type (orig_decl
);
2213 tree inner_type
= TREE_TYPE (orig_decl
);
2214 if (orig_decl
== error_mark_node
)
2216 if (TYPE_REF_P (TREE_TYPE (orig_decl
)))
2217 inner_type
= TREE_TYPE (inner_type
);
2219 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
2220 inner_type
= TREE_TYPE (inner_type
);
2221 get_copy_ctor (inner_type
, tf_warning_or_error
);
2240 case STATEMENT_LIST
:
2241 /* These cases are handled by shared code. */
2242 c_genericize_control_stmt (stmt_p
, walk_subtrees
, data
,
2243 cp_genericize_r
, cp_walk_subtrees
);
2247 *stmt_p
= build1_loc (EXPR_LOCATION (stmt
), VIEW_CONVERT_EXPR
,
2248 TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
2252 if (IS_TYPE_OR_DECL_P (stmt
))
2257 p_set
->add (*stmt_p
);
2262 /* Lower C++ front end trees to GENERIC in T_P. */
2265 cp_genericize_tree (tree
* t_p
, bool handle_invisiref_parm_p
)
2267 struct cp_genericize_data wtd
;
2269 wtd
.p_set
= new hash_set
<tree
>;
2270 wtd
.bind_expr_stack
.create (0);
2272 wtd
.try_block
= NULL_TREE
;
2273 wtd
.no_sanitize_p
= false;
2274 wtd
.handle_invisiref_parm_p
= handle_invisiref_parm_p
;
2275 cp_walk_tree (t_p
, cp_genericize_r
, &wtd
, NULL
);
2277 if (sanitize_flags_p (SANITIZE_VPTR
))
2278 cp_ubsan_instrument_member_accesses (t_p
);
2281 /* If a function that should end with a return in non-void
2282 function doesn't obviously end with return, add ubsan
2283 instrumentation code to verify it at runtime. If -fsanitize=return
2284 is not enabled, instrument __builtin_unreachable. */
2287 cp_maybe_instrument_return (tree fndecl
)
2289 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
2290 || DECL_CONSTRUCTOR_P (fndecl
)
2291 || DECL_DESTRUCTOR_P (fndecl
)
2292 || !targetm
.warn_func_return (fndecl
))
2295 if (!sanitize_flags_p (SANITIZE_RETURN
, fndecl
)
2296 /* Don't add __builtin_unreachable () if not optimizing, it will not
2297 improve any optimizations in that case, just break UB code.
2298 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
2299 UBSan covers this with ubsan_instrument_return above where sufficient
2300 information is provided, while the __builtin_unreachable () below
2301 if return sanitization is disabled will just result in hard to
2302 understand runtime error without location. */
2303 && ((!optimize
&& !flag_unreachable_traps
)
2304 || sanitize_flags_p (SANITIZE_UNREACHABLE
, fndecl
)))
2307 tree t
= DECL_SAVED_TREE (fndecl
);
2310 switch (TREE_CODE (t
))
2313 t
= BIND_EXPR_BODY (t
);
2315 case TRY_FINALLY_EXPR
:
2316 case CLEANUP_POINT_EXPR
:
2317 t
= TREE_OPERAND (t
, 0);
2319 case STATEMENT_LIST
:
2321 tree_stmt_iterator i
= tsi_last (t
);
2322 while (!tsi_end_p (i
))
2324 tree p
= tsi_stmt (i
);
2325 if (TREE_CODE (p
) != DEBUG_BEGIN_STMT
)
2345 tree
*p
= &DECL_SAVED_TREE (fndecl
);
2346 if (TREE_CODE (*p
) == BIND_EXPR
)
2347 p
= &BIND_EXPR_BODY (*p
);
2349 location_t loc
= DECL_SOURCE_LOCATION (fndecl
);
2350 if (sanitize_flags_p (SANITIZE_RETURN
, fndecl
))
2351 t
= ubsan_instrument_return (loc
);
2353 t
= build_builtin_unreachable (BUILTINS_LOCATION
);
2355 append_to_statement_list (t
, p
);
2359 cp_genericize (tree fndecl
)
2363 /* Fix up the types of parms passed by invisible reference. */
2364 for (t
= DECL_ARGUMENTS (fndecl
); t
; t
= DECL_CHAIN (t
))
2365 if (TREE_ADDRESSABLE (TREE_TYPE (t
)))
2367 /* If a function's arguments are copied to create a thunk,
2368 then DECL_BY_REFERENCE will be set -- but the type of the
2369 argument will be a pointer type, so we will never get
2371 gcc_assert (!DECL_BY_REFERENCE (t
));
2372 gcc_assert (DECL_ARG_TYPE (t
) != TREE_TYPE (t
));
2373 TREE_TYPE (t
) = DECL_ARG_TYPE (t
);
2374 DECL_BY_REFERENCE (t
) = 1;
2375 TREE_ADDRESSABLE (t
) = 0;
2379 /* Do the same for the return value. */
2380 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl
))))
2382 t
= DECL_RESULT (fndecl
);
2383 TREE_TYPE (t
) = build_reference_type (TREE_TYPE (t
));
2384 DECL_BY_REFERENCE (t
) = 1;
2385 TREE_ADDRESSABLE (t
) = 0;
2389 /* Adjust DECL_VALUE_EXPR of the original var. */
2390 tree outer
= outer_curly_brace_block (current_function_decl
);
2394 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
2396 && DECL_NAME (t
) == DECL_NAME (var
)
2397 && DECL_HAS_VALUE_EXPR_P (var
)
2398 && DECL_VALUE_EXPR (var
) == t
)
2400 tree val
= convert_from_reference (t
);
2401 SET_DECL_VALUE_EXPR (var
, val
);
2407 /* If we're a clone, the body is already GIMPLE. */
2408 if (DECL_CLONED_FUNCTION_P (fndecl
))
2411 /* Allow cp_genericize calls to be nested. */
2412 bc_state_t save_state
;
2413 save_bc_state (&save_state
);
2415 /* We do want to see every occurrence of the parms, so we can't just use
2416 walk_tree's hash functionality. */
2417 cp_genericize_tree (&DECL_SAVED_TREE (fndecl
), true);
2419 cp_maybe_instrument_return (fndecl
);
2421 /* Do everything else. */
2422 c_genericize (fndecl
);
2423 restore_bc_state (&save_state
);
2426 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
2427 NULL if there is in fact nothing to do. ARG2 may be null if FN
2428 actually only takes one argument. */
2431 cxx_omp_clause_apply_fn (tree fn
, tree arg1
, tree arg2
)
2433 tree defparm
, parm
, t
;
2441 nargs
= list_length (DECL_ARGUMENTS (fn
));
2442 argarray
= XALLOCAVEC (tree
, nargs
);
2444 defparm
= TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn
)));
2446 defparm
= TREE_CHAIN (defparm
);
2448 bool is_method
= TREE_CODE (TREE_TYPE (fn
)) == METHOD_TYPE
;
2449 if (TREE_CODE (TREE_TYPE (arg1
)) == ARRAY_TYPE
)
2451 tree inner_type
= TREE_TYPE (arg1
);
2452 tree start1
, end1
, p1
;
2453 tree start2
= NULL
, p2
= NULL
;
2454 tree ret
= NULL
, lab
;
2460 inner_type
= TREE_TYPE (inner_type
);
2461 start1
= build4 (ARRAY_REF
, inner_type
, start1
,
2462 size_zero_node
, NULL
, NULL
);
2464 start2
= build4 (ARRAY_REF
, inner_type
, start2
,
2465 size_zero_node
, NULL
, NULL
);
2467 while (TREE_CODE (inner_type
) == ARRAY_TYPE
);
2468 start1
= build_fold_addr_expr_loc (input_location
, start1
);
2470 start2
= build_fold_addr_expr_loc (input_location
, start2
);
2472 end1
= TYPE_SIZE_UNIT (TREE_TYPE (arg1
));
2473 end1
= fold_build_pointer_plus (start1
, end1
);
2475 p1
= create_tmp_var (TREE_TYPE (start1
));
2476 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, start1
);
2477 append_to_statement_list (t
, &ret
);
2481 p2
= create_tmp_var (TREE_TYPE (start2
));
2482 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, start2
);
2483 append_to_statement_list (t
, &ret
);
2486 lab
= create_artificial_label (input_location
);
2487 t
= build1 (LABEL_EXPR
, void_type_node
, lab
);
2488 append_to_statement_list (t
, &ret
);
2493 /* Handle default arguments. */
2494 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
2495 parm
= TREE_CHAIN (parm
), i
++)
2496 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
2497 TREE_PURPOSE (parm
), fn
,
2498 i
- is_method
, tf_warning_or_error
);
2499 t
= build_call_a (fn
, i
, argarray
);
2500 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (t
)))
2501 t
= build_cplus_new (TREE_TYPE (t
), t
, tf_warning_or_error
);
2502 t
= fold_convert (void_type_node
, t
);
2503 t
= fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
2504 append_to_statement_list (t
, &ret
);
2506 t
= fold_build_pointer_plus (p1
, TYPE_SIZE_UNIT (inner_type
));
2507 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, t
);
2508 append_to_statement_list (t
, &ret
);
2512 t
= fold_build_pointer_plus (p2
, TYPE_SIZE_UNIT (inner_type
));
2513 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, t
);
2514 append_to_statement_list (t
, &ret
);
2517 t
= build2 (NE_EXPR
, boolean_type_node
, p1
, end1
);
2518 t
= build3 (COND_EXPR
, void_type_node
, t
, build_and_jump (&lab
), NULL
);
2519 append_to_statement_list (t
, &ret
);
2525 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg1
);
2527 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg2
);
2528 /* Handle default arguments. */
2529 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
2530 parm
= TREE_CHAIN (parm
), i
++)
2531 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
2532 TREE_PURPOSE (parm
), fn
,
2533 i
- is_method
, tf_warning_or_error
);
2534 t
= build_call_a (fn
, i
, argarray
);
2535 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (t
)))
2536 t
= build_cplus_new (TREE_TYPE (t
), t
, tf_warning_or_error
);
2537 t
= fold_convert (void_type_node
, t
);
2538 return fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
2542 /* Return code to initialize DECL with its default constructor, or
2543 NULL if there's nothing to do. */
2546 cxx_omp_clause_default_ctor (tree clause
, tree decl
, tree
/*outer*/)
2548 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2552 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), decl
, NULL
);
2557 /* Return code to initialize DST with a copy constructor from SRC. */
2560 cxx_omp_clause_copy_ctor (tree clause
, tree dst
, tree src
)
2562 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2566 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), dst
, src
);
2568 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
2573 /* Similarly, except use an assignment operator instead. */
2576 cxx_omp_clause_assign_op (tree clause
, tree dst
, tree src
)
2578 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2582 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 2), dst
, src
);
2584 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
2589 /* Return code to destroy DECL. */
2592 cxx_omp_clause_dtor (tree clause
, tree decl
)
2594 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2598 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 1), decl
, NULL
);
2603 /* True if OpenMP should privatize what this DECL points to rather
2604 than the DECL itself. */
2607 cxx_omp_privatize_by_reference (const_tree decl
)
2609 return (TYPE_REF_P (TREE_TYPE (decl
))
2610 || is_invisiref_parm (decl
));
2613 /* Return true if DECL is const qualified var having no mutable member. */
2615 cxx_omp_const_qual_no_mutable (tree decl
)
2617 tree type
= TREE_TYPE (decl
);
2618 if (TYPE_REF_P (type
))
2620 if (!is_invisiref_parm (decl
))
2622 type
= TREE_TYPE (type
);
2624 if (TREE_CODE (decl
) == RESULT_DECL
&& DECL_NAME (decl
))
2626 /* NVR doesn't preserve const qualification of the
2628 tree outer
= outer_curly_brace_block (current_function_decl
);
2632 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
2634 && DECL_NAME (decl
) == DECL_NAME (var
)
2635 && (TYPE_MAIN_VARIANT (type
)
2636 == TYPE_MAIN_VARIANT (TREE_TYPE (var
))))
2638 if (TYPE_READONLY (TREE_TYPE (var
)))
2639 type
= TREE_TYPE (var
);
2645 if (type
== error_mark_node
)
2648 /* Variables with const-qualified type having no mutable member
2649 are predetermined shared. */
2650 if (TYPE_READONLY (type
) && !cp_has_mutable_p (type
))
2656 /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
2657 of DECL is predetermined. */
2659 enum omp_clause_default_kind
2660 cxx_omp_predetermined_sharing_1 (tree decl
)
2662 /* Static data members are predetermined shared. */
2663 if (TREE_STATIC (decl
))
2665 tree ctx
= CP_DECL_CONTEXT (decl
);
2666 if (TYPE_P (ctx
) && MAYBE_CLASS_TYPE_P (ctx
))
2667 return OMP_CLAUSE_DEFAULT_SHARED
;
2669 if (c_omp_predefined_variable (decl
))
2670 return OMP_CLAUSE_DEFAULT_SHARED
;
2673 /* this may not be specified in data-sharing clauses, still we need
2674 to predetermined it firstprivate. */
2675 if (decl
== current_class_ptr
)
2676 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
2678 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
2681 /* Likewise, but also include the artificial vars. We don't want to
2682 disallow the artificial vars being mentioned in explicit clauses,
2683 as we use artificial vars e.g. for loop constructs with random
2684 access iterators other than pointers, but during gimplification
2685 we want to treat them as predetermined. */
2687 enum omp_clause_default_kind
2688 cxx_omp_predetermined_sharing (tree decl
)
2690 enum omp_clause_default_kind ret
= cxx_omp_predetermined_sharing_1 (decl
);
2691 if (ret
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
2694 /* Predetermine artificial variables holding integral values, those
2695 are usually result of gimplify_one_sizepos or SAVE_EXPR
2698 && DECL_ARTIFICIAL (decl
)
2699 && INTEGRAL_TYPE_P (TREE_TYPE (decl
))
2700 && !(DECL_LANG_SPECIFIC (decl
)
2701 && DECL_OMP_PRIVATIZED_MEMBER (decl
)))
2702 return OMP_CLAUSE_DEFAULT_SHARED
;
2704 /* Similarly for typeinfo symbols. */
2705 if (VAR_P (decl
) && DECL_ARTIFICIAL (decl
) && DECL_TINFO_P (decl
))
2706 return OMP_CLAUSE_DEFAULT_SHARED
;
2708 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
2711 enum omp_clause_defaultmap_kind
2712 cxx_omp_predetermined_mapping (tree decl
)
2714 /* Predetermine artificial variables holding integral values, those
2715 are usually result of gimplify_one_sizepos or SAVE_EXPR
2718 && DECL_ARTIFICIAL (decl
)
2719 && INTEGRAL_TYPE_P (TREE_TYPE (decl
))
2720 && !(DECL_LANG_SPECIFIC (decl
)
2721 && DECL_OMP_PRIVATIZED_MEMBER (decl
)))
2722 return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
;
2724 if (c_omp_predefined_variable (decl
))
2725 return OMP_CLAUSE_DEFAULTMAP_TO
;
2727 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
;
2730 /* Finalize an implicitly determined clause. */
2733 cxx_omp_finish_clause (tree c
, gimple_seq
*, bool /* openacc */)
2735 tree decl
, inner_type
;
2736 bool make_shared
= false;
2738 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
2739 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
2740 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LASTPRIVATE
2741 || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
)))
2744 decl
= OMP_CLAUSE_DECL (c
);
2745 decl
= require_complete_type (decl
);
2746 inner_type
= TREE_TYPE (decl
);
2747 if (decl
== error_mark_node
)
2749 else if (TYPE_REF_P (TREE_TYPE (decl
)))
2750 inner_type
= TREE_TYPE (inner_type
);
2752 /* We're interested in the base element, not arrays. */
2753 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
2754 inner_type
= TREE_TYPE (inner_type
);
2756 /* Check for special function availability by building a call to one.
2757 Save the results, because later we won't be in the right context
2758 for making these queries. */
2759 bool first
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
;
2760 bool last
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
;
2762 && CLASS_TYPE_P (inner_type
)
2763 && cxx_omp_create_clause_info (c
, inner_type
, !first
, first
, last
,
2769 OMP_CLAUSE_CODE (c
) = OMP_CLAUSE_SHARED
;
2770 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
) = 0;
2771 OMP_CLAUSE_SHARED_READONLY (c
) = 0;
2775 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2776 disregarded in OpenMP construct, because it is going to be
2777 remapped during OpenMP lowering. SHARED is true if DECL
2778 is going to be shared, false if it is going to be privatized. */
2781 cxx_omp_disregard_value_expr (tree decl
, bool shared
)
2786 && DECL_HAS_VALUE_EXPR_P (decl
)
2787 && DECL_ARTIFICIAL (decl
)
2788 && DECL_LANG_SPECIFIC (decl
)
2789 && DECL_OMP_PRIVATIZED_MEMBER (decl
))
2791 if (VAR_P (decl
) && DECL_CONTEXT (decl
) && is_capture_proxy (decl
))
2796 /* Fold expression X which is used as an rvalue if RVAL is true. */
2799 cp_fold_maybe_rvalue (tree x
, bool rval
, fold_flags_t flags
)
2803 x
= cp_fold (x
, flags
);
2805 x
= mark_rvalue_use (x
);
2806 if (rval
&& DECL_P (x
)
2807 && !TYPE_REF_P (TREE_TYPE (x
)))
2809 tree v
= decl_constant_value (x
);
2810 if (v
!= x
&& v
!= error_mark_node
)
2822 cp_fold_maybe_rvalue (tree x
, bool rval
)
2824 return cp_fold_maybe_rvalue (x
, rval
, ff_none
);
2827 /* Fold expression X which is used as an rvalue. */
2830 cp_fold_rvalue (tree x
, fold_flags_t flags
)
2832 return cp_fold_maybe_rvalue (x
, true, flags
);
2836 cp_fold_rvalue (tree x
)
2838 return cp_fold_rvalue (x
, ff_none
);
2841 /* Perform folding on expression X. */
2844 cp_fully_fold (tree x
, mce_value manifestly_const_eval
)
2846 if (processing_template_decl
)
2848 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2849 have to call both. */
2850 if (cxx_dialect
>= cxx11
)
2852 x
= maybe_constant_value (x
, /*decl=*/NULL_TREE
, manifestly_const_eval
);
2853 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2854 a TARGET_EXPR; undo that here. */
2855 if (TREE_CODE (x
) == TARGET_EXPR
)
2856 x
= TARGET_EXPR_INITIAL (x
);
2857 else if (TREE_CODE (x
) == VIEW_CONVERT_EXPR
2858 && TREE_CODE (TREE_OPERAND (x
, 0)) == CONSTRUCTOR
2859 && TREE_TYPE (TREE_OPERAND (x
, 0)) == TREE_TYPE (x
))
2860 x
= TREE_OPERAND (x
, 0);
2862 fold_flags_t flags
= ff_none
;
2863 if (manifestly_const_eval
== mce_false
)
2864 flags
|= ff_mce_false
;
2865 return cp_fold_rvalue (x
, flags
);
2869 cp_fully_fold (tree x
)
2871 return cp_fully_fold (x
, mce_unknown
);
2874 /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2878 cp_fully_fold_init (tree x
)
2880 if (processing_template_decl
)
2882 x
= cp_fully_fold (x
, mce_false
);
2883 cp_fold_data
data (ff_mce_false
);
2884 cp_walk_tree (&x
, cp_fold_r
, &data
, NULL
);
2888 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2889 and certain changes are made to the folding done. Or should be (FIXME). We
2890 never touch maybe_const, as it is only used for the C front-end
2891 C_MAYBE_CONST_EXPR. */
2894 c_fully_fold (tree x
, bool /*in_init*/, bool */
*maybe_const*/
, bool lval
)
2896 return cp_fold_maybe_rvalue (x
, !lval
);
2899 static GTY((deletable
)) hash_map
<tree
, tree
> *fold_caches
[2];
2901 /* Subroutine of cp_fold. Returns which fold cache to use according
2902 to the given flags. We need multiple caches since the result of
2903 folding may depend on which flags are used. */
2905 static hash_map
<tree
, tree
> *&
2906 get_fold_cache (fold_flags_t flags
)
2908 if (flags
& ff_mce_false
)
2909 return fold_caches
[1];
2911 return fold_caches
[0];
2914 /* Dispose of the whole FOLD_CACHE. */
2917 clear_fold_cache (void)
2919 for (auto& fold_cache
: fold_caches
)
2920 if (fold_cache
!= NULL
)
2921 fold_cache
->empty ();
2924 /* This function tries to fold an expression X.
2925 To avoid combinatorial explosion, folding results are kept in fold_cache.
2926 If X is invalid, we don't fold at all.
2927 For performance reasons we don't cache expressions representing a
2928 declaration or constant.
2929 Function returns X or its folded variant. */
2932 cp_fold (tree x
, fold_flags_t flags
)
2934 tree op0
, op1
, op2
, op3
;
2935 tree org_x
= x
, r
= NULL_TREE
;
2936 enum tree_code code
;
2938 bool rval_ops
= true;
2940 if (!x
|| x
== error_mark_node
)
2943 if (EXPR_P (x
) && (!TREE_TYPE (x
) || TREE_TYPE (x
) == error_mark_node
))
2946 /* Don't bother to cache DECLs or constants. */
2947 if (DECL_P (x
) || CONSTANT_CLASS_P (x
))
2950 auto& fold_cache
= get_fold_cache (flags
);
2951 if (fold_cache
== NULL
)
2952 fold_cache
= hash_map
<tree
, tree
>::create_ggc (101);
2954 if (tree
*cached
= fold_cache
->get (x
))
2956 /* unshare_expr doesn't recurse into SAVE_EXPRs. If SAVE_EXPR's
2957 argument has been folded into a tree invariant, make sure it is
2958 unshared. See PR112727. */
2959 if (TREE_CODE (x
) == SAVE_EXPR
&& *cached
!= x
)
2960 return unshare_expr (*cached
);
2964 uid_sensitive_constexpr_evaluation_checker c
;
2966 code
= TREE_CODE (x
);
2969 case CLEANUP_POINT_EXPR
:
2970 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2972 r
= cp_fold_rvalue (TREE_OPERAND (x
, 0), flags
);
2973 if (!TREE_SIDE_EFFECTS (r
))
2978 x
= fold_sizeof_expr (x
);
2981 case VIEW_CONVERT_EXPR
:
2984 case NON_LVALUE_EXPR
:
2987 if (VOID_TYPE_P (TREE_TYPE (x
)))
2989 /* This is just to make sure we don't end up with casts to
2990 void from error_mark_node. If we just return x, then
2991 cp_fold_r might fold the operand into error_mark_node and
2992 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2993 during gimplification doesn't like such casts.
2994 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2995 folding of the operand should be in the caches and if in cp_fold_r
2996 it will modify it in place. */
2997 op0
= cp_fold (TREE_OPERAND (x
, 0), flags
);
2998 if (op0
== error_mark_node
)
2999 x
= error_mark_node
;
3003 loc
= EXPR_LOCATION (x
);
3004 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
, flags
);
3006 if (code
== CONVERT_EXPR
3007 && SCALAR_TYPE_P (TREE_TYPE (x
))
3008 && op0
!= void_node
)
3009 /* During parsing we used convert_to_*_nofold; re-convert now using the
3010 folding variants, since fold() doesn't do those transformations. */
3011 x
= fold (convert (TREE_TYPE (x
), op0
));
3012 else if (op0
!= TREE_OPERAND (x
, 0))
3014 if (op0
== error_mark_node
)
3015 x
= error_mark_node
;
3017 x
= fold_build1_loc (loc
, code
, TREE_TYPE (x
), op0
);
3022 /* Conversion of an out-of-range value has implementation-defined
3023 behavior; the language considers it different from arithmetic
3024 overflow, which is undefined. */
3025 if (TREE_CODE (op0
) == INTEGER_CST
3026 && TREE_OVERFLOW_P (x
) && !TREE_OVERFLOW_P (op0
))
3027 TREE_OVERFLOW (x
) = false;
3031 case EXCESS_PRECISION_EXPR
:
3032 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
, flags
);
3033 x
= fold_convert_loc (EXPR_LOCATION (x
), TREE_TYPE (x
), op0
);
3037 /* We don't need the decltype(auto) obfuscation anymore. */
3038 if (REF_PARENTHESIZED_P (x
))
3040 tree p
= maybe_undo_parenthesized_ref (x
);
3042 return cp_fold (p
, flags
);
3047 loc
= EXPR_LOCATION (x
);
3048 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), false, flags
);
3050 /* Cope with user tricks that amount to offsetof. */
3051 if (op0
!= error_mark_node
3052 && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0
)))
3054 tree val
= get_base_address (op0
);
3056 && INDIRECT_REF_P (val
)
3057 && COMPLETE_TYPE_P (TREE_TYPE (val
))
3058 && TREE_CONSTANT (TREE_OPERAND (val
, 0)))
3060 val
= TREE_OPERAND (val
, 0);
3062 val
= maybe_constant_value (val
);
3063 if (TREE_CODE (val
) == INTEGER_CST
)
3064 return fold_offsetof (op0
, TREE_TYPE (x
));
3074 case FIX_TRUNC_EXPR
:
3080 case TRUTH_NOT_EXPR
:
3081 case FIXED_CONVERT_EXPR
:
3084 loc
= EXPR_LOCATION (x
);
3085 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
, flags
);
3088 if (op0
!= TREE_OPERAND (x
, 0))
3090 if (op0
== error_mark_node
)
3091 x
= error_mark_node
;
3094 x
= fold_build1_loc (loc
, code
, TREE_TYPE (x
), op0
);
3095 if (code
== INDIRECT_REF
3096 && (INDIRECT_REF_P (x
) || TREE_CODE (x
) == MEM_REF
))
3098 TREE_READONLY (x
) = TREE_READONLY (org_x
);
3099 TREE_SIDE_EFFECTS (x
) = TREE_SIDE_EFFECTS (org_x
);
3100 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
3107 gcc_assert (TREE_CODE (x
) != COND_EXPR
3108 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x
, 0))));
3111 case UNARY_PLUS_EXPR
:
3112 op0
= cp_fold_rvalue (TREE_OPERAND (x
, 0), flags
);
3113 if (op0
== error_mark_node
)
3114 x
= error_mark_node
;
3116 x
= fold_convert (TREE_TYPE (x
), op0
);
3119 case POSTDECREMENT_EXPR
:
3120 case POSTINCREMENT_EXPR
:
3122 case PREDECREMENT_EXPR
:
3123 case PREINCREMENT_EXPR
:
3128 case POINTER_PLUS_EXPR
:
3130 case POINTER_DIFF_EXPR
:
3133 case TRUNC_DIV_EXPR
:
3135 case FLOOR_DIV_EXPR
:
3136 case ROUND_DIV_EXPR
:
3137 case TRUNC_MOD_EXPR
:
3139 case ROUND_MOD_EXPR
:
3141 case EXACT_DIV_EXPR
:
3151 case TRUTH_AND_EXPR
:
3152 case TRUTH_ANDIF_EXPR
:
3154 case TRUTH_ORIF_EXPR
:
3155 case TRUTH_XOR_EXPR
:
3156 case LT_EXPR
: case LE_EXPR
:
3157 case GT_EXPR
: case GE_EXPR
:
3158 case EQ_EXPR
: case NE_EXPR
:
3159 case UNORDERED_EXPR
: case ORDERED_EXPR
:
3160 case UNLT_EXPR
: case UNLE_EXPR
:
3161 case UNGT_EXPR
: case UNGE_EXPR
:
3162 case UNEQ_EXPR
: case LTGT_EXPR
:
3163 case RANGE_EXPR
: case COMPLEX_EXPR
:
3165 loc
= EXPR_LOCATION (x
);
3166 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
, flags
);
3167 op1
= cp_fold_rvalue (TREE_OPERAND (x
, 1), flags
);
3169 /* decltype(nullptr) has only one value, so optimize away all comparisons
3170 with that type right away, keeping them in the IL causes troubles for
3171 various optimizations. */
3172 if (COMPARISON_CLASS_P (org_x
)
3173 && TREE_CODE (TREE_TYPE (op0
)) == NULLPTR_TYPE
3174 && TREE_CODE (TREE_TYPE (op1
)) == NULLPTR_TYPE
)
3179 x
= constant_boolean_node (true, TREE_TYPE (x
));
3182 x
= constant_boolean_node (false, TREE_TYPE (x
));
3187 return omit_two_operands_loc (loc
, TREE_TYPE (x
), x
,
3191 if (op0
!= TREE_OPERAND (x
, 0) || op1
!= TREE_OPERAND (x
, 1))
3193 if (op0
== error_mark_node
|| op1
== error_mark_node
)
3194 x
= error_mark_node
;
3196 x
= fold_build2_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
);
3201 /* This is only needed for -Wnonnull-compare and only if
3202 TREE_NO_WARNING (org_x), but to avoid that option affecting code
3203 generation, we do it always. */
3204 if (COMPARISON_CLASS_P (org_x
))
3206 if (x
== error_mark_node
|| TREE_CODE (x
) == INTEGER_CST
)
3208 else if (COMPARISON_CLASS_P (x
))
3210 if (warn_nonnull_compare
3211 && warning_suppressed_p (org_x
, OPT_Wnonnull_compare
))
3212 suppress_warning (x
, OPT_Wnonnull_compare
);
3214 /* Otherwise give up on optimizing these, let GIMPLE folders
3215 optimize those later on. */
3216 else if (op0
!= TREE_OPERAND (org_x
, 0)
3217 || op1
!= TREE_OPERAND (org_x
, 1))
3219 x
= build2_loc (loc
, code
, TREE_TYPE (org_x
), op0
, op1
);
3220 if (warn_nonnull_compare
3221 && warning_suppressed_p (org_x
, OPT_Wnonnull_compare
))
3222 suppress_warning (x
, OPT_Wnonnull_compare
);
3232 loc
= EXPR_LOCATION (x
);
3233 op0
= cp_fold_rvalue (TREE_OPERAND (x
, 0), flags
);
3234 op1
= cp_fold (TREE_OPERAND (x
, 1), flags
);
3235 op2
= cp_fold (TREE_OPERAND (x
, 2), flags
);
3237 if (TREE_CODE (TREE_TYPE (x
)) == BOOLEAN_TYPE
)
3239 warning_sentinel
s (warn_int_in_bool_context
);
3240 if (!VOID_TYPE_P (TREE_TYPE (op1
)))
3241 op1
= cp_truthvalue_conversion (op1
, tf_warning_or_error
);
3242 if (!VOID_TYPE_P (TREE_TYPE (op2
)))
3243 op2
= cp_truthvalue_conversion (op2
, tf_warning_or_error
);
3245 else if (VOID_TYPE_P (TREE_TYPE (x
)))
3247 if (TREE_CODE (op0
) == INTEGER_CST
)
3249 /* If the condition is constant, fold can fold away
3250 the COND_EXPR. If some statement-level uses of COND_EXPR
3251 have one of the branches NULL, avoid folding crash. */
3253 op1
= build_empty_stmt (loc
);
3255 op2
= build_empty_stmt (loc
);
3259 /* Otherwise, don't bother folding a void condition, since
3260 it can't produce a constant value. */
3261 if (op0
!= TREE_OPERAND (x
, 0)
3262 || op1
!= TREE_OPERAND (x
, 1)
3263 || op2
!= TREE_OPERAND (x
, 2))
3264 x
= build3_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
);
3269 if (op0
!= TREE_OPERAND (x
, 0)
3270 || op1
!= TREE_OPERAND (x
, 1)
3271 || op2
!= TREE_OPERAND (x
, 2))
3273 if (op0
== error_mark_node
3274 || op1
== error_mark_node
3275 || op2
== error_mark_node
)
3276 x
= error_mark_node
;
3278 x
= fold_build3_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
);
3283 /* A COND_EXPR might have incompatible types in branches if one or both
3284 arms are bitfields. If folding exposed such a branch, fix it up. */
3285 if (TREE_CODE (x
) != code
3286 && x
!= error_mark_node
3287 && !useless_type_conversion_p (TREE_TYPE (org_x
), TREE_TYPE (x
)))
3288 x
= fold_convert (TREE_TYPE (org_x
), x
);
3294 tree callee
= get_callee_fndecl (x
);
3296 /* "Inline" calls to std::move/forward and other cast-like functions
3297 by simply folding them into a corresponding cast to their return
3298 type. This is cheaper than relying on the middle end to do so, and
3299 also means we avoid generating useless debug info for them at all.
3301 At this point the argument has already been converted into a
3302 reference, so it suffices to use a NOP_EXPR to express the
3304 if ((OPTION_SET_P (flag_fold_simple_inlines
)
3305 ? flag_fold_simple_inlines
3307 && call_expr_nargs (x
) == 1
3308 && decl_in_std_namespace_p (callee
)
3309 && DECL_NAME (callee
) != NULL_TREE
3310 && (id_equal (DECL_NAME (callee
), "move")
3311 || id_equal (DECL_NAME (callee
), "forward")
3312 || id_equal (DECL_NAME (callee
), "addressof")
3313 /* This addressof equivalent is used heavily in libstdc++. */
3314 || id_equal (DECL_NAME (callee
), "__addressof")
3315 || id_equal (DECL_NAME (callee
), "as_const")))
3317 r
= CALL_EXPR_ARG (x
, 0);
3318 /* Check that the return and argument types are sane before
3320 if (INDIRECT_TYPE_P (TREE_TYPE (x
))
3321 && INDIRECT_TYPE_P (TREE_TYPE (r
)))
3323 if (!same_type_p (TREE_TYPE (x
), TREE_TYPE (r
)))
3324 r
= build_nop (TREE_TYPE (x
), r
);
3325 x
= cp_fold (r
, flags
);
3330 int sv
= optimize
, nw
= sv
;
3332 /* Some built-in function calls will be evaluated at compile-time in
3333 fold (). Set optimize to 1 when folding __builtin_constant_p inside
3334 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
3335 if (callee
&& fndecl_built_in_p (callee
) && !optimize
3336 && DECL_IS_BUILTIN_CONSTANT_P (callee
)
3337 && current_function_decl
3338 && DECL_DECLARED_CONSTEXPR_P (current_function_decl
))
3341 if (callee
&& fndecl_built_in_p (callee
, BUILT_IN_FRONTEND
))
3343 iloc_sentinel
ils (EXPR_LOCATION (x
));
3344 switch (DECL_FE_FUNCTION_CODE (callee
))
3346 case CP_BUILT_IN_IS_CONSTANT_EVALUATED
:
3347 /* Defer folding __builtin_is_constant_evaluated unless
3348 we know this isn't a manifestly constant-evaluated
3350 if (flags
& ff_mce_false
)
3351 x
= boolean_false_node
;
3353 case CP_BUILT_IN_SOURCE_LOCATION
:
3354 x
= fold_builtin_source_location (x
);
3356 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER
:
3357 x
= fold_builtin_is_corresponding_member
3358 (EXPR_LOCATION (x
), call_expr_nargs (x
),
3359 &CALL_EXPR_ARG (x
, 0));
3361 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS
:
3362 x
= fold_builtin_is_pointer_inverconvertible_with_class
3363 (EXPR_LOCATION (x
), call_expr_nargs (x
),
3364 &CALL_EXPR_ARG (x
, 0));
3373 && fndecl_built_in_p (callee
, CP_BUILT_IN_SOURCE_LOCATION
,
3376 x
= fold_builtin_source_location (x
);
3380 bool changed
= false;
3381 int m
= call_expr_nargs (x
);
3382 for (int i
= 0; i
< m
; i
++)
3384 r
= cp_fold (CALL_EXPR_ARG (x
, i
), flags
);
3385 if (r
!= CALL_EXPR_ARG (x
, i
))
3387 if (r
== error_mark_node
)
3389 x
= error_mark_node
;
3394 CALL_EXPR_ARG (x
, i
) = r
;
3398 if (x
== error_mark_node
)
3405 if (TREE_CODE (r
) != CALL_EXPR
)
3407 x
= cp_fold (r
, flags
);
3413 /* Invoke maybe_constant_value for functions declared
3414 constexpr and not called with AGGR_INIT_EXPRs.
3416 Do constexpr expansion of expressions where the call itself is not
3417 constant, but the call followed by an INDIRECT_REF is. */
3418 if (callee
&& DECL_DECLARED_CONSTEXPR_P (callee
)
3421 mce_value manifestly_const_eval
= mce_unknown
;
3422 if (flags
& ff_mce_false
)
3423 /* Allow folding __builtin_is_constant_evaluated to false during
3424 constexpr evaluation of this call. */
3425 manifestly_const_eval
= mce_false
;
3426 r
= maybe_constant_value (x
, /*decl=*/NULL_TREE
,
3427 manifestly_const_eval
);
3431 if (TREE_CODE (r
) != CALL_EXPR
)
3433 if (DECL_CONSTRUCTOR_P (callee
))
3435 loc
= EXPR_LOCATION (x
);
3436 tree a
= CALL_EXPR_ARG (x
, 0);
3437 bool return_this
= targetm
.cxx
.cdtor_returns_this ();
3439 a
= cp_save_expr (a
);
3440 tree s
= build_fold_indirect_ref_loc (loc
, a
);
3441 r
= cp_build_init_expr (s
, r
);
3443 r
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (x
), r
,
3444 fold_convert_loc (loc
, TREE_TYPE (x
), a
));
3457 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (x
);
3458 vec
<constructor_elt
, va_gc
> *nelts
= NULL
;
3459 FOR_EACH_VEC_SAFE_ELT (elts
, i
, p
)
3461 tree op
= cp_fold (p
->value
, flags
);
3464 if (op
== error_mark_node
)
3466 x
= error_mark_node
;
3471 nelts
= elts
->copy ();
3472 (*nelts
)[i
].value
= op
;
3477 x
= build_constructor (TREE_TYPE (x
), nelts
);
3478 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x
)
3479 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x
);
3480 CONSTRUCTOR_MUTABLE_POISON (x
)
3481 = CONSTRUCTOR_MUTABLE_POISON (org_x
);
3483 if (VECTOR_TYPE_P (TREE_TYPE (x
)))
3489 bool changed
= false;
3490 int n
= TREE_VEC_LENGTH (x
);
3492 for (int i
= 0; i
< n
; i
++)
3494 tree op
= cp_fold (TREE_VEC_ELT (x
, i
), flags
);
3495 if (op
!= TREE_VEC_ELT (x
, i
))
3499 TREE_VEC_ELT (x
, i
) = op
;
3508 case ARRAY_RANGE_REF
:
3510 loc
= EXPR_LOCATION (x
);
3511 op0
= cp_fold (TREE_OPERAND (x
, 0), flags
);
3512 op1
= cp_fold (TREE_OPERAND (x
, 1), flags
);
3513 op2
= cp_fold (TREE_OPERAND (x
, 2), flags
);
3514 op3
= cp_fold (TREE_OPERAND (x
, 3), flags
);
3516 if (op0
!= TREE_OPERAND (x
, 0)
3517 || op1
!= TREE_OPERAND (x
, 1)
3518 || op2
!= TREE_OPERAND (x
, 2)
3519 || op3
!= TREE_OPERAND (x
, 3))
3521 if (op0
== error_mark_node
3522 || op1
== error_mark_node
3523 || op2
== error_mark_node
3524 || op3
== error_mark_node
)
3525 x
= error_mark_node
;
3528 x
= build4_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
, op3
);
3529 TREE_READONLY (x
) = TREE_READONLY (org_x
);
3530 TREE_SIDE_EFFECTS (x
) = TREE_SIDE_EFFECTS (org_x
);
3531 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
3539 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
3540 folding, evaluates to an invariant. In that case no need to wrap
3541 this folded tree with a SAVE_EXPR. */
3542 r
= cp_fold (TREE_OPERAND (x
, 0), flags
);
3543 if (tree_invariant_p (r
))
3548 x
= evaluate_requires_expr (x
);
3555 if (EXPR_P (x
) && TREE_CODE (x
) == code
)
3557 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
3558 copy_warning (x
, org_x
);
3561 if (!c
.evaluation_restricted_p ())
3563 fold_cache
->put (org_x
, x
);
3564 /* Prevent that we try to fold an already folded result again. */
3566 fold_cache
->put (x
, x
);
3572 /* Look up "hot", "cold", "likely" or "unlikely" in attribute list LIST. */
3575 lookup_hotness_attribute (tree list
)
3577 for (; list
; list
= TREE_CHAIN (list
))
3579 tree name
= get_attribute_name (list
);
3580 if ((is_attribute_p ("hot", name
)
3581 || is_attribute_p ("cold", name
)
3582 || is_attribute_p ("likely", name
)
3583 || is_attribute_p ("unlikely", name
))
3584 && is_attribute_namespace_p ("", list
))
3590 /* Remove "hot", "cold", "likely" and "unlikely" attributes from LIST. */
3593 remove_hotness_attribute (tree list
)
3595 for (tree
*p
= &list
; *p
; )
3598 tree name
= get_attribute_name (l
);
3599 if ((is_attribute_p ("hot", name
)
3600 || is_attribute_p ("cold", name
)
3601 || is_attribute_p ("likely", name
)
3602 || is_attribute_p ("unlikely", name
))
3603 && is_attribute_namespace_p ("", l
))
3605 *p
= TREE_CHAIN (l
);
3608 p
= &TREE_CHAIN (l
);
3613 /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
3617 process_stmt_hotness_attribute (tree std_attrs
, location_t attrs_loc
)
3619 if (std_attrs
== error_mark_node
)
3621 if (tree attr
= lookup_hotness_attribute (std_attrs
))
3623 tree name
= get_attribute_name (attr
);
3624 bool hot
= (is_attribute_p ("hot", name
)
3625 || is_attribute_p ("likely", name
));
3626 tree pred
= build_predict_expr (hot
? PRED_HOT_LABEL
: PRED_COLD_LABEL
,
3627 hot
? TAKEN
: NOT_TAKEN
);
3628 SET_EXPR_LOCATION (pred
, attrs_loc
);
3630 if (tree other
= lookup_hotness_attribute (TREE_CHAIN (attr
)))
3631 warning (OPT_Wattributes
, "ignoring attribute %qE after earlier %qE",
3632 get_attribute_name (other
), name
);
3633 std_attrs
= remove_hotness_attribute (std_attrs
);
3638 /* Build IFN_ASSUME internal call for assume condition ARG. */
3641 build_assume_call (location_t loc
, tree arg
)
3643 if (!processing_template_decl
)
3644 arg
= fold_build_cleanup_point_expr (TREE_TYPE (arg
), arg
);
3645 return build_call_expr_internal_loc (loc
, IFN_ASSUME
, void_type_node
,
3649 /* If [[assume (cond)]] appears on this statement, handle it. */
3652 process_stmt_assume_attribute (tree std_attrs
, tree statement
,
3653 location_t attrs_loc
)
3655 if (std_attrs
== error_mark_node
)
3657 tree attr
= lookup_attribute ("gnu", "assume", std_attrs
);
3660 /* The next token after the assume attribute is not ';'. */
3663 warning_at (attrs_loc
, OPT_Wattributes
,
3664 "%<assume%> attribute not followed by %<;%>");
3667 for (; attr
; attr
= lookup_attribute ("gnu", "assume", TREE_CHAIN (attr
)))
3669 tree args
= TREE_VALUE (attr
);
3670 if (args
&& PACK_EXPANSION_P (args
))
3672 auto_diagnostic_group d
;
3673 error_at (attrs_loc
, "pack expansion of %qE attribute",
3674 get_attribute_name (attr
));
3675 if (cxx_dialect
>= cxx17
)
3676 inform (attrs_loc
, "use fold expression in the attribute "
3677 "argument instead");
3680 int nargs
= list_length (args
);
3683 auto_diagnostic_group d
;
3684 error_at (attrs_loc
, "wrong number of arguments specified for "
3685 "%qE attribute", get_attribute_name (attr
));
3686 inform (attrs_loc
, "expected %i, found %i", 1, nargs
);
3690 tree arg
= TREE_VALUE (args
);
3691 if (!type_dependent_expression_p (arg
))
3692 arg
= contextual_conv_bool (arg
, tf_warning_or_error
);
3693 if (error_operand_p (arg
))
3695 finish_expr_stmt (build_assume_call (attrs_loc
, arg
));
3698 return remove_attribute ("gnu", "assume", std_attrs
);
3701 /* Return the type std::source_location::__impl after performing
3702 verification on it. */
3705 get_source_location_impl_type ()
3707 tree name
= get_identifier ("source_location");
3708 tree decl
= lookup_qualified_name (std_node
, name
);
3709 if (TREE_CODE (decl
) != TYPE_DECL
)
3711 auto_diagnostic_group d
;
3712 if (decl
== error_mark_node
|| TREE_CODE (decl
) == TREE_LIST
)
3713 qualified_name_lookup_error (std_node
, name
, decl
, input_location
);
3715 error ("%qD is not a type", decl
);
3716 return error_mark_node
;
3718 name
= get_identifier ("__impl");
3719 tree type
= TREE_TYPE (decl
);
3720 decl
= lookup_qualified_name (type
, name
);
3721 if (TREE_CODE (decl
) != TYPE_DECL
)
3723 auto_diagnostic_group d
;
3724 if (decl
== error_mark_node
|| TREE_CODE (decl
) == TREE_LIST
)
3725 qualified_name_lookup_error (type
, name
, decl
, input_location
);
3727 error ("%qD is not a type", decl
);
3728 return error_mark_node
;
3730 type
= TREE_TYPE (decl
);
3731 if (TREE_CODE (type
) != RECORD_TYPE
)
3733 error ("%qD is not a class type", decl
);
3734 return error_mark_node
;
3738 for (tree field
= TYPE_FIELDS (type
);
3739 (field
= next_aggregate_field (field
)) != NULL_TREE
;
3740 field
= DECL_CHAIN (field
))
3742 if (DECL_NAME (field
) != NULL_TREE
)
3744 const char *n
= IDENTIFIER_POINTER (DECL_NAME (field
));
3745 if (strcmp (n
, "_M_file_name") == 0
3746 || strcmp (n
, "_M_function_name") == 0)
3748 if (TREE_TYPE (field
) != const_string_type_node
)
3750 error ("%qD does not have %<const char *%> type", field
);
3751 return error_mark_node
;
3756 else if (strcmp (n
, "_M_line") == 0 || strcmp (n
, "_M_column") == 0)
3758 if (TREE_CODE (TREE_TYPE (field
)) != INTEGER_TYPE
)
3760 error ("%qD does not have integral type", field
);
3761 return error_mark_node
;
3772 error ("%<std::source_location::__impl%> does not contain only "
3773 "non-static data members %<_M_file_name%>, "
3774 "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
3775 return error_mark_node
;
3777 return build_qualified_type (type
, TYPE_QUAL_CONST
);
3780 /* Type for source_location_table hash_set. */
3781 struct GTY((for_user
)) source_location_table_entry
{
3787 /* Traits class for function start hash maps below. */
3789 struct source_location_table_entry_hash
3790 : ggc_remove
<source_location_table_entry
>
3792 typedef source_location_table_entry value_type
;
3793 typedef source_location_table_entry compare_type
;
3796 hash (const source_location_table_entry
&ref
)
3798 inchash::hash
hstate (0);
3799 hstate
.add_int (ref
.loc
);
3800 hstate
.add_int (ref
.uid
);
3801 return hstate
.end ();
3805 equal (const source_location_table_entry
&ref1
,
3806 const source_location_table_entry
&ref2
)
3808 return ref1
.loc
== ref2
.loc
&& ref1
.uid
== ref2
.uid
;
3812 mark_deleted (source_location_table_entry
&ref
)
3814 ref
.loc
= UNKNOWN_LOCATION
;
3816 ref
.var
= NULL_TREE
;
3819 static const bool empty_zero_p
= true;
3822 mark_empty (source_location_table_entry
&ref
)
3824 ref
.loc
= UNKNOWN_LOCATION
;
3826 ref
.var
= NULL_TREE
;
3830 is_deleted (const source_location_table_entry
&ref
)
3832 return (ref
.loc
== UNKNOWN_LOCATION
3834 && ref
.var
== NULL_TREE
);
3838 is_empty (const source_location_table_entry
&ref
)
3840 return (ref
.loc
== UNKNOWN_LOCATION
3842 && ref
.var
== NULL_TREE
);
3846 pch_nx (source_location_table_entry
&p
)
3848 extern void gt_pch_nx (source_location_table_entry
&);
3853 pch_nx (source_location_table_entry
&p
, gt_pointer_operator op
, void *cookie
)
3855 extern void gt_pch_nx (source_location_table_entry
*, gt_pointer_operator
,
3857 gt_pch_nx (&p
, op
, cookie
);
3861 static GTY(()) hash_table
<source_location_table_entry_hash
>
3862 *source_location_table
;
3863 static GTY(()) unsigned int source_location_id
;
3865 /* Fold the __builtin_source_location () call T. */
3868 fold_builtin_source_location (const_tree t
)
3870 gcc_assert (TREE_CODE (t
) == CALL_EXPR
);
3871 /* TREE_TYPE (t) is const std::source_location::__impl* */
3872 tree source_location_impl
= TREE_TYPE (TREE_TYPE (t
));
3873 if (source_location_impl
== error_mark_node
)
3874 return build_zero_cst (const_ptr_type_node
);
3875 gcc_assert (CLASS_TYPE_P (source_location_impl
)
3876 && id_equal (TYPE_IDENTIFIER (source_location_impl
), "__impl"));
3878 location_t loc
= EXPR_LOCATION (t
);
3879 if (source_location_table
== NULL
)
3880 source_location_table
3881 = hash_table
<source_location_table_entry_hash
>::create_ggc (64);
3882 const line_map_ordinary
*map
;
3883 source_location_table_entry entry
;
3885 = linemap_resolve_location (line_table
, loc
, LRK_MACRO_EXPANSION_POINT
,
3887 entry
.uid
= current_function_decl
? DECL_UID (current_function_decl
) : -1;
3888 entry
.var
= error_mark_node
;
3889 source_location_table_entry
*entryp
3890 = source_location_table
->find_slot (entry
, INSERT
);
3897 ASM_GENERATE_INTERNAL_LABEL (tmp_name
, "Lsrc_loc", source_location_id
++);
3898 var
= build_decl (loc
, VAR_DECL
, get_identifier (tmp_name
),
3899 source_location_impl
);
3900 TREE_STATIC (var
) = 1;
3901 TREE_PUBLIC (var
) = 0;
3902 DECL_ARTIFICIAL (var
) = 1;
3903 DECL_IGNORED_P (var
) = 1;
3904 DECL_EXTERNAL (var
) = 0;
3905 DECL_DECLARED_CONSTEXPR_P (var
) = 1;
3906 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var
) = 1;
3907 layout_decl (var
, 0);
3909 vec
<constructor_elt
, va_gc
> *v
= NULL
;
3911 for (tree field
= TYPE_FIELDS (source_location_impl
);
3912 (field
= next_aggregate_field (field
)) != NULL_TREE
;
3913 field
= DECL_CHAIN (field
))
3915 const char *n
= IDENTIFIER_POINTER (DECL_NAME (field
));
3916 tree val
= NULL_TREE
;
3917 if (strcmp (n
, "_M_file_name") == 0)
3919 if (const char *fname
= LOCATION_FILE (loc
))
3921 fname
= remap_macro_filename (fname
);
3922 val
= build_string_literal (fname
);
3925 val
= build_string_literal ("");
3927 else if (strcmp (n
, "_M_function_name") == 0)
3929 const char *name
= "";
3931 if (current_function_decl
)
3932 name
= cxx_printable_name (current_function_decl
, 2);
3934 val
= build_string_literal (name
);
3936 else if (strcmp (n
, "_M_line") == 0)
3937 val
= build_int_cst (TREE_TYPE (field
), LOCATION_LINE (loc
));
3938 else if (strcmp (n
, "_M_column") == 0)
3939 val
= build_int_cst (TREE_TYPE (field
), LOCATION_COLUMN (loc
));
3942 CONSTRUCTOR_APPEND_ELT (v
, field
, val
);
3945 tree ctor
= build_constructor (source_location_impl
, v
);
3946 TREE_CONSTANT (ctor
) = 1;
3947 TREE_STATIC (ctor
) = 1;
3948 DECL_INITIAL (var
) = ctor
;
3949 varpool_node::finalize_decl (var
);
3954 return build_fold_addr_expr_with_type_loc (loc
, var
, TREE_TYPE (t
));
3957 #include "gt-cp-cp-gimplify.h"